diff --git a/.github/workflows/build-and-deploy-to-main.yml b/.github/workflows/build-and-deploy-to-main.yml index 3bf82164..14c3c3e1 100644 --- a/.github/workflows/build-and-deploy-to-main.yml +++ b/.github/workflows/build-and-deploy-to-main.yml @@ -3,6 +3,9 @@ name: (main) Build and push api image to Azure Container Registry on: + push: + branches: + - main pull_request: types: [closed] branches: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 44975a75..b071c8cc 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -21,9 +21,9 @@ jobs: - uses: Gr1N/setup-poetry@v8 - name: Install dependencies - run: poetry install + run: poetry install --no-root - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: .venv key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} diff --git a/.github/workflows/staging-build-and-deploy.yml b/.github/workflows/staging-build-and-deploy.yml index 5acd3026..cacd1338 100644 --- a/.github/workflows/staging-build-and-deploy.yml +++ b/.github/workflows/staging-build-and-deploy.yml @@ -1,48 +1,48 @@ -on: - push: - branches: - - staging - pull_request: - types: [opened, synchronize, reopened, closed] - branches: - - staging +on: + # push: + # branches: + # - staging + # pull_request: + # types: [opened, synchronize, reopened, closed] + # branches: + # - staging workflow_dispatch: - + name: Staging_Container_Workflow jobs: - build-and-deploy: - runs-on: ubuntu-latest - steps: - # checkout the repo - - name: 'Checkout GitHub Action' - uses: actions/checkout@v4 - with: - ref: staging - - - name: 'Login via Azure CLI' - uses: azure/login@v1 - with: - creds: ${{ secrets.AZURE_CREDENTIALS }} - - - name: 'Build and push image' - uses: azure/docker-login@v1 - with: - login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} - username: ${{ secrets.REGISTRY_USERNAME }} - password: ${{ secrets.REGISTRY_PASSWORD }} - - run: | - docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/fairhub-flask-api:staging${{ github.sha }} - docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/fairhub-flask-api:staging${{ github.sha }} + build-and-deploy: + runs-on: ubuntu-latest + steps: + # checkout the repo + - name: "Checkout GitHub Action" + uses: actions/checkout@v4 + with: + ref: staging + + - name: "Login via Azure CLI" + uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: "Build and push image" + uses: azure/docker-login@v1 + with: + login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} + username: ${{ secrets.REGISTRY_USERNAME }} + password: ${{ secrets.REGISTRY_PASSWORD }} + - run: | + docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/fairhub-flask-api:staging${{ github.sha }} + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/fairhub-flask-api:staging${{ github.sha }} - - name: 'Deploy to Azure Container Instances' - uses: 'azure/aci-deploy@v1' - with: - resource-group: ${{ secrets.RESOURCE_GROUP }} - dns-name-label: ${{ secrets.RESOURCE_GROUP }}${{ github.run_number }} - image: ${{ secrets.REGISTRY_LOGIN_SERVER }}/sampleapp:${{ github.sha }} - registry-login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} - registry-username: ${{ secrets.REGISTRY_USERNAME }} - registry-password: ${{ secrets.REGISTRY_PASSWORD }} - name: fairhub-flask-api-staging - location: 'west us' + - name: "Deploy to Azure Container Instances" + uses: "azure/aci-deploy@v1" + with: + resource-group: ${{ secrets.RESOURCE_GROUP }} + dns-name-label: ${{ secrets.RESOURCE_GROUP }}${{ github.run_number }} + image: ${{ secrets.REGISTRY_LOGIN_SERVER }}/sampleapp:${{ github.sha }} + registry-login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} + registry-username: ${{ secrets.REGISTRY_USERNAME }} + registry-password: ${{ secrets.REGISTRY_PASSWORD }} + name: fairhub-flask-api-staging + location: "west us" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8ecc9bb4..7f449e4f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -29,9 +29,9 @@ jobs: - uses: Gr1N/setup-poetry@v8 - name: Install dependencies - run: poetry install + run: poetry install --no-root - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: .venv key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} diff --git a/Dockerfile b/Dockerfile index b25c9554..1488a7ff 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,34 +1,47 @@ -FROM python:3.10-alpine +# Use official slim Python image +FROM python:3.10-slim +# Expose the port your app will run on EXPOSE 5000 +# Set working directory WORKDIR /app +# Environment ENV POETRY_VERSION=1.3.2 - -RUN apk update -RUN apk add --no-cache gcc libffi-dev musl-dev postgresql-dev - +# Note: Force pip to use pre-built wheels +ENV PIP_ONLY_BINARY=:all: + +# Install system dependencies for building Python packages +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + build-essential \ + libffi-dev \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* + +# Install Poetry RUN pip install "poetry==$POETRY_VERSION" -COPY poetry.lock pyproject.toml ./ - +# Install Python dependencies +COPY pyproject.toml poetry.lock ./ RUN poetry config virtualenvs.create false -RUN poetry install +RUN poetry install --no-root +# Copy source code and config COPY apis ./apis COPY model ./model COPY core ./core COPY modules ./modules -COPY app.py . -COPY config.py . -COPY caching.py . - +COPY app.py config.py caching.py ./ +# Copy database/migration setup COPY alembic ./alembic COPY alembic.ini . - +# Copy runtime entrypoint COPY entrypoint.sh . +COPY alembic alembic.ini ./ +COPY entrypoint.sh ./ +# Setup Entrypoint RUN chmod +x entrypoint.sh - -ENTRYPOINT ["./entrypoint.sh"] \ No newline at end of file +ENTRYPOINT ["./entrypoint.sh"] diff --git a/alembic/versions/0ff53a655198_update_email_verification.py b/alembic/versions/0ff53a655198_update_email_verification.py new file mode 100644 index 00000000..dc958c29 --- /dev/null +++ b/alembic/versions/0ff53a655198_update_email_verification.py @@ -0,0 +1,36 @@ +"""update_email_verification + +Revision ID: 0ff53a655198 +Revises: 3ffefbd9c03b +Create Date: 2025-02-18 13:50:48.808176 + +""" +from typing import Sequence, Union +import sqlalchemy as sa +import datetime +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = '0ff53a655198' +down_revision: Union[str, None] = '5c1257547eb8' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +created_at = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) + + +def upgrade() -> None: + op.alter_column("email_verification", "token", type_=sa.String) + op.alter_column("email_verification", "user_id", type_=sa.CHAR(36)) + + op.drop_column("email_verification", "created_at") + + op.add_column( + "email_verification", sa.Column("created_at", sa.BIGINT(), nullable=True) + ) + + op.execute(f"UPDATE \"email_verification\" SET created_at ='{created_at}'") + + op.alter_column("email_verification", "created_at", nullable=False) diff --git a/alembic/versions/3ffefbd9c03b_email_verified_type.py b/alembic/versions/3ffefbd9c03b_email_verified_type.py new file mode 100644 index 00000000..b3792d95 --- /dev/null +++ b/alembic/versions/3ffefbd9c03b_email_verified_type.py @@ -0,0 +1,33 @@ +"""email verified type + +Revision ID: 3ffefbd9c03b +Revises: 9698369d7a8c +Create Date: 2024-07-01 12:28:02.596192 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '3ffefbd9c03b' +down_revision: Union[str, None] = '9698369d7a8c' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + with op.batch_alter_table("user") as batch_op: + batch_op.alter_column( + "email_verified", + type_=sa.Boolean(), + postgresql_using="email_verified::boolean", + ) + + + + + + diff --git a/alembic/versions/5c1257547eb8_update_study_acronym.py b/alembic/versions/5c1257547eb8_update_study_acronym.py new file mode 100644 index 00000000..9a87ec03 --- /dev/null +++ b/alembic/versions/5c1257547eb8_update_study_acronym.py @@ -0,0 +1,28 @@ +"""update_study_acronym + +Revision ID: 5c1257547eb8 +Revises: 3ffefbd9c03b +Create Date: 2025-02-19 16:25:24.597207 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '5c1257547eb8' +down_revision: Union[str, None] = '3ffefbd9c03b' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + with op.batch_alter_table("study") as batch_op: + batch_op.alter_column( + "acronym", + new_column_name="short_description", + type_=sa.String(300), + nullable=False + ) diff --git a/alembic/versions/9698369d7a8c_create_session_table.py b/alembic/versions/9698369d7a8c_create_session_table.py new file mode 100644 index 00000000..b092a8b4 --- /dev/null +++ b/alembic/versions/9698369d7a8c_create_session_table.py @@ -0,0 +1,27 @@ +"""create session table + +Revision ID: 9698369d7a8c +Revises: +Create Date: 2024-06-13 09:59:17.605666 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '9698369d7a8c' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'session', + sa.Column('id', sa.CHAR(36), primary_key=True), + sa.Column('user_id', sa.CHAR(36), sa.ForeignKey("user.id"), nullable=False), + sa.Column('expires_at', sa.BigInteger, nullable=False) + ) + diff --git a/alembic/versions/df71df391cdb_add_token_to_user.py b/alembic/versions/df71df391cdb_add_token_to_user.py new file mode 100644 index 00000000..b0a530e1 --- /dev/null +++ b/alembic/versions/df71df391cdb_add_token_to_user.py @@ -0,0 +1,26 @@ +"""add token to user + +Revision ID: df71df391cdb +Revises: 3ffefbd9c03b +Create Date: 2024-07-03 10:15:49.657807 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import uuid + + +# revision identifiers, used by Alembic. +revision: str = 'df71df391cdb' +down_revision: Union[str, None] = '0ff53a655198' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + +password_reset_token = str(uuid.uuid4()) + +def upgrade() -> None: + op.add_column( + "user", sa.Column("password_reset_token", sa.String, nullable=True) + ) diff --git a/alembic/versions_backup/6173282aef08_remove_invite_info.py b/alembic/versions_backup/6173282aef08_remove_invite_info.py new file mode 100644 index 00000000..92c029a6 --- /dev/null +++ b/alembic/versions_backup/6173282aef08_remove_invite_info.py @@ -0,0 +1,24 @@ +"""remove_invite_info + +Revision ID: 6173282aef08 +Revises: f150341d2741 +Create Date: 2023-12-19 00:32:08.157538 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "6173282aef08" +down_revision: Union[str, None] = "f150341d2741" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.drop_column("invite", "info") + with op.batch_alter_table("notification") as batch_op: + batch_op.alter_column("target", nullable=True) diff --git a/tests/functional/__init__.py b/alembic/versions_backup/72ac2b020c7c_delete_dataset_readme_table.py similarity index 100% rename from tests/functional/__init__.py rename to alembic/versions_backup/72ac2b020c7c_delete_dataset_readme_table.py diff --git a/alembic/versions_backup/95d6e53e2578_edit_invite_table_pk.py b/alembic/versions_backup/95d6e53e2578_edit_invite_table_pk.py new file mode 100644 index 00000000..e19aa4d3 --- /dev/null +++ b/alembic/versions_backup/95d6e53e2578_edit_invite_table_pk.py @@ -0,0 +1,38 @@ +"""edit invite table PK + +Revision ID: 95d6e53e2578 +Revises: db1b62d02def +Create Date: 2023-11-28 14:58:43.869472 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = "95d6e53e2578" +down_revision: Union[str, None] = "db1b62d02def" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade(): + connection = op.get_bind() + inspector = sa.inspect(connection) + + # Check if the table exists before dropping it + # if not inspector.has_table("invite"): + # if 'id' not in inspector.get_columns("invite"): + op.add_column("invite", sa.Column("id", sa.CHAR(36), nullable=True)) + op.execute( + "UPDATE invite SET id = uuid_in(overlay(overlay(md5(random()::text || ':' " + "|| random()::text) placing '4' from 13) placing to_hex(floor(random()*(11-8+1) + 8)::int)" + "::text from 17)::cstring);" + ) + op.execute("ALTER TABLE invite DROP CONSTRAINT invited_study_contributor_pkey") + + op.create_primary_key("id", "invite", ["id"]) + with op.batch_alter_table("invite") as batch_op: + batch_op.alter_column("study_id", nullable=True) diff --git a/alembic/versions_backup/b20e07d8924f_delete_token_fron_user_table.py b/alembic/versions_backup/b20e07d8924f_delete_token_fron_user_table.py new file mode 100644 index 00000000..5e07b699 --- /dev/null +++ b/alembic/versions_backup/b20e07d8924f_delete_token_fron_user_table.py @@ -0,0 +1,27 @@ +"""delete_token_fron_user_table + +Revision ID: b20e07d8924f +Revises: f189827ee101 +Create Date: 2023-12-13 13:31:38.810816 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "b20e07d8924f" +down_revision: Union[str, None] = "f189827ee101" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.drop_column("user", "token") + op.drop_column("user", "token_generated") + + +def downgrade() -> None: + pass diff --git a/alembic/versions_backup/db1b62d02def_edit_invite_table.py b/alembic/versions_backup/db1b62d02def_edit_invite_table.py new file mode 100644 index 00000000..2f9be132 --- /dev/null +++ b/alembic/versions_backup/db1b62d02def_edit_invite_table.py @@ -0,0 +1,25 @@ +"""edit invite table + +Revision ID: db1b62d02def +Revises: 72ac2b020c7c +Create Date: 2023-11-28 13:56:41.821141 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + + +revision: str = "db1b62d02def" +down_revision: Union[str, None] = "72ac2b020c7c" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.rename_table("invited_study_contributor", "invite") + op.add_column("invite", sa.Column("info", sa.String(), nullable=True)) + op.create_unique_constraint( + "study_per_user", "invite", ["study_id", "email_address"] + ) diff --git a/alembic/versions_backup/eee9610b2cdc_role_nullable_and_fk_user_id.py b/alembic/versions_backup/eee9610b2cdc_role_nullable_and_fk_user_id.py new file mode 100644 index 00000000..310bf7af --- /dev/null +++ b/alembic/versions_backup/eee9610b2cdc_role_nullable_and_fk_user_id.py @@ -0,0 +1,56 @@ +"""role_nullable_and_fk_user_id + +Revision ID: eee9610b2cdc +Revises: 95d6e53e2578 +Create Date: 2023-12-01 00:09:44.745776 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +import datetime +import uuid +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = "eee9610b2cdc" +down_revision: Union[str, None] = "95d6e53e2578" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + +created_at = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) + +id = str(uuid.uuid4()) +hashed = str(uuid.uuid4()) + + +def upgrade() -> None: + op.add_column("invite", sa.Column("user_id", sa.CHAR(36))) + op.execute( + f'INSERT INTO "user" ("id", "email_address", "username", "hash", "created_at", "email_verified") VALUES ' + f"('{id}', 'eee9610b2cdc@fairhub.io', 'eee9610b2cdc', '{hashed}', '{created_at}', False)" + ) + + user_obj = f"SELECT * FROM user WHERE id = '{id}'" + if len(user_obj) < 1: + return "error", 403 + op.execute(f"UPDATE invite SET user_id ='{id}'") + + with op.batch_alter_table("invite") as batch_op: + batch_op.alter_column("permission", nullable=True) + batch_op.alter_column("user_id", nullable=False) + op.create_foreign_key( + "fk_user_id", + "invite", + "user", + ["user_id"], + ["id"], + ) + + with op.batch_alter_table("notification") as batch_op: + batch_op.alter_column("title", nullable=False) + batch_op.alter_column("message", nullable=False) + batch_op.alter_column("type", nullable=False) + batch_op.alter_column("target", nullable=False) + batch_op.alter_column("read", nullable=False) diff --git a/alembic/versions_backup/f150341d2741_modify_email_verification.py b/alembic/versions_backup/f150341d2741_modify_email_verification.py new file mode 100644 index 00000000..ce272600 --- /dev/null +++ b/alembic/versions_backup/f150341d2741_modify_email_verification.py @@ -0,0 +1,35 @@ +"""modify_email_verification + +Revision ID: f150341d2741 +Revises: b20e07d8924f +Create Date: 2023-12-13 20:43:24.637259 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +import datetime +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "f150341d2741" +down_revision: Union[str, None] = "b20e07d8924f" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + +created_at = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) + + +def upgrade() -> None: + op.alter_column("email_verification", "token", type_=sa.String) + op.alter_column("email_verification", "user_id", type_=sa.CHAR(36)) + + op.drop_column("email_verification", "created_at") + + op.add_column( + "email_verification", sa.Column("created_at", sa.BIGINT(), nullable=True) + ) + + op.execute(f"UPDATE \"email_verification\" SET created_at ='{created_at}'") + + op.alter_column("email_verification", "created_at", nullable=False) diff --git a/alembic/versions_backup/f189827ee101_user_table_email_verfication.py b/alembic/versions_backup/f189827ee101_user_table_email_verfication.py new file mode 100644 index 00000000..28c1634e --- /dev/null +++ b/alembic/versions_backup/f189827ee101_user_table_email_verfication.py @@ -0,0 +1,36 @@ +"""user_table_email_verfication + +Revision ID: f189827ee101 +Revises: fed13d793eff +Create Date: 2023-12-11 14:54:31.303523 + +""" +from typing import Sequence, Union +import sqlalchemy as sa +import datetime +import random +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "f189827ee101" +down_revision: Union[str, None] = "fed13d793eff" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None +token_generated = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) +token = random.randint(10 ** (7 - 1), (10**7) - 1) + + +def upgrade(): + op.add_column("user", sa.Column("token_generated", sa.BIGINT, nullable=True)) + op.add_column("user", sa.Column("token", sa.String, nullable=True)) + op.execute(f"UPDATE \"user\" SET token_generated ='{token_generated}'") + op.execute(f"UPDATE \"user\" SET token ='{token}'") + op.execute("UPDATE invite SET info ='info'") + op.execute(f'UPDATE "user" SET email_verified = FALSE') + + with op.batch_alter_table("user") as batch_op: + batch_op.alter_column("token", nullable=False) + batch_op.alter_column("token_generated", nullable=False) + batch_op.alter_column("email_verified", nullable=False) + with op.batch_alter_table("invite") as batch_op: + batch_op.alter_column("info", nullable=False) diff --git a/alembic/versions_backup/fed13d793eff_email_verified_type.py b/alembic/versions_backup/fed13d793eff_email_verified_type.py new file mode 100644 index 00000000..fbc32a58 --- /dev/null +++ b/alembic/versions_backup/fed13d793eff_email_verified_type.py @@ -0,0 +1,27 @@ +"""email_verified_type + +Revision ID: fed13d793eff +Revises: eee9610b2cdc +Create Date: 2023-12-05 16:03:51.166254 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "fed13d793eff" +down_revision: Union[str, None] = "eee9610b2cdc" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + with op.batch_alter_table("user") as batch_op: + batch_op.alter_column( + "email_verified", + type_=sa.Boolean(), + postgresql_using="email_verified::boolean", + ) diff --git a/apis/__init__.py b/apis/__init__.py index c58a7559..883cb1d8 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -9,42 +9,29 @@ from .contributor import api as contributors_api from .dashboard import api as dashboard from .dataset import api as dataset_api -from .dataset_metadata.dataset_access import api as access +from .dataset_metadata.dataset_access_rights import api as access_rights from .dataset_metadata.dataset_alternate_identifier import api as alternate_identifier -from .dataset_metadata.dataset_consent import api as consent -from .dataset_metadata.dataset_contributor import api as dataset_contributor -from .dataset_metadata.dataset_date import api as date -from .dataset_metadata.dataset_de_ident_level import api as de_ident_level -from .dataset_metadata.dataset_description import api as description -from .dataset_metadata.dataset_funder import api as funder +from .dataset_metadata.dataset_data_management import api as dataset_data_management +from .dataset_metadata.dataset_team import api as dataset_team from .dataset_metadata.dataset_healthsheet import api as healthsheet -from .dataset_metadata.dataset_managing_organization import api as managing_organization from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_related_identifier import api as related_identifier -from .dataset_metadata.dataset_rights import api as rights -from .dataset_metadata.dataset_subject import api as subject -from .dataset_metadata.dataset_title import api as title +from .dataset_metadata.dataset_general_information import api as general_information from .file import api as file_api from .participant import api as participants_api from .redcap import api as redcap from .study import api as study_api from .study_metadata.study_arm import api as arm from .study_metadata.study_central_contact import api as central_contact -from .study_metadata.study_collaborators import api as collaborators -from .study_metadata.study_conditions import api as conditions from .study_metadata.study_description import api as study_description from .study_metadata.study_design import api as design from .study_metadata.study_eligibility import api as eligibility -from .study_metadata.study_identification import api as identification from .study_metadata.study_intervention import api as intervention - -# from .study_metadata.study_other import api as other -from .study_metadata.study_keywords import api as keywords from .study_metadata.study_location import api as location from .study_metadata.study_overall_official import api as overall_official from .study_metadata.study_oversight import api as oversight -from .study_metadata.study_sponsors import api as sponsors from .study_metadata.study_status import api as status +from .study_metadata.study_team import api as sponsors from .user import api as user from .utils import api as utils @@ -55,26 +42,19 @@ ) __all__ = [ - "managing_organization", "dataset_metadata_namespace", "study_metadata_namespace", "authentication", "contributors_api", "dataset_api", - "access", + "access_rights", "alternate_identifier", - "consent", + "dataset_data_management", "healthsheet", - "date", - "de_ident_level", - "description", - "funder", "dataset_other", "related_identifier", "api", - "rights", - "subject", - "title", + "general_information", "participants_api", "study_api", "arm", @@ -83,21 +63,17 @@ "eligibility", "intervention", "location", - # "other", - "keywords", - "conditions", "oversight", "overall_official", "sponsors", - "collaborators", "status", "user", - "identification", "study_description", - "dataset_contributor", + "dataset_team", "redcap", "dashboard", "utils", + # "invite_general_users", ] @@ -114,7 +90,6 @@ class HelloEverynyan(Resource): @api.response(400, "Validation Error") def get(self): """Returns a simple 'Server Active' message""" - return "Server active!" diff --git a/apis/authentication.py b/apis/authentication.py index b81f86d3..1e05eb41 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -6,18 +6,25 @@ import importlib import os import re +import time import uuid from datetime import timezone from typing import Any, Union import jwt from email_validator import EmailNotValidError, validate_email -from flask import g, make_response, request +from flask import g, make_response, request, Response from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate import model +# from modules.invitation import reset_password, forgot_password + +# from modules.invitation import ( +# send_email_verification, +# ) + api = Namespace("Authentication", description="Authentication paths", path="/") signup_model = api.model( @@ -66,6 +73,7 @@ def post(self): "bpatel@fairhub.io", "sanjay@fairhub.io", "aydan@fairhub.io", + "cordier@ohsu.edu", ] if data["email_address"] not in bypassed_emails: @@ -147,19 +155,103 @@ def validate_password(instance): ).one_or_none() if user: return "This email address is already in use", 409 + new_user = model.User.from_data(data) + verification = model.EmailVerification(new_user) + new_user.email_verified = False + + # '''enable once email verification is on''' + # if os.environ.get("FLASK_ENV") == "testing": + # verification.token = 1234567 + + model.db.session.add(new_user) + model.db.session.add(verification) + + new_user.email_verified = True + + # '''When /confirm endpoint will be enabled, this logic will be moved there + # since users can not be a study contributor without email verification + # set to true, and this can happen only there''' invitations = model.StudyInvitedContributor.query.filter_by( email_address=data["email_address"] ).all() - - new_user = model.User.from_data(data) for invite in invitations: invite.study.add_user_to_study(new_user, invite.permission) model.db.session.delete(invite) - model.db.session.add(new_user) model.db.session.commit() + # """When the email verification functionality fully enabled these + # lines will be commented out and email will not be verified without email verification.""" + # if os.environ.get("FLASK_ENV") != "testing": + # if new_user.email_address in bypassed_emails: + # new_user.email_verified = True + + # if g.gb.is_on("email-verification"): + # if os.environ.get("FLASK_ENV") != "testing": + # if new_user.email_address not in bypassed_emails: + # send_email_verification(new_user.email_address, verification.token) + return f"Hi, {new_user.email_address}, you have successfully signed up", 201 +# @api.route("/auth/email-verification/confirm") +# class EmailVerification(Resource): +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(contributors_model) +# def post(self): +# data: Union[Any, dict] = request.json +# if "token" not in data or "email" not in data: +# return "email or token are required", 422 +# user = model.User.query.filter_by(email_address=data["email"]).one_or_none() +# if not user: +# return "user not found", 404 +# if user.email_verified: +# return "user already verified", 422 +# if os.environ.get("FLASK_ENV") != "testing": +# if not user.verify_token(data["token"]): +# return "Token invalid or expired", 422 +# user.email_verified = True +# +# model.db.session.commit() +# return "Email verified", 201 +# + +# @api.route("/auth/email-verification/resend") +# class GenerateVerification(Resource): +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(contributors_model) +# def post(self): +# data: Union[Any, dict] = request.json +# user = model.User.query.filter_by(email_address=data["email"]).one_or_none() +# if not user: +# return "user not found", 404 +# if user.email_verified: +# return "user already verified", 422 +# +# # user.email_verified = True +# # token = user.generate_token() +# +# # if g.gb.is_on("email-verification"): +# # if os.environ.get("FLASK_ENV") != "testing": +# # send_email_verification(user.email_address, token) +# +# model.db.session.commit() +# return "Your email is verified", 201 + + +@api.route("/auth/email-verification/check") +class GenerateVerificationCheck(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(contributors_model) + def post(self): + data: Union[Any, dict] = request.json + user = model.User.query.filter_by(email_address=data["email"]).one_or_none() + if not user: + return {"message": "User not found"}, 404 + return {"isVerified": user.email_verified}, 200 + + @api.route("/auth/login") class Login(Resource): """Login class is used to login users to the system""" @@ -172,7 +264,6 @@ def post(self): """logs in user and handles few authentication errors. Also, it sets token for logged user along with expiration date""" data: Union[Any, dict] = request.json - email_address = data["email_address"] def validate_is_valid_email(instance): @@ -206,10 +297,19 @@ def validate_is_valid_email(instance): validate(instance=data, schema=schema, format_checker=format_checker) except ValidationError as e: return e.message, 400 - user = model.User.query.filter_by(email_address=email_address).one_or_none() if not user: return "Invalid credentials", 401 + if os.environ.get("FLASK_ENV") != "testing": + bypassed_emails = [ + "test@fairhub.io", + "bpatel@fairhub.io", + "sanjay@fairhub.io", + "aydan@fairhub.io", + "cordier@ohsu.edu", + ] + if email_address in bypassed_emails: + user.email_verified = True validate_pass = user.check_password(data["password"]) @@ -232,24 +332,55 @@ def validate_is_valid_email(instance): # If not testing, directly use the 'config' module config = config_module + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + minutes=180 + ) + jti = str(uuid.uuid4()) encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=180), # noqa: W503 - "jti": str(uuid.uuid4()), + "exp": expired_in, + "jti": jti, }, # noqa: W503 config.FAIRHUB_SECRET, algorithm="HS256", ) - resp = make_response(user.to_dict()) + if not user.email_verified: + return resp resp.set_cookie( "token", encoded_jwt_code, secure=True, httponly=True, samesite="None" ) - resp.status_code = 200 - + # if g.gb.is_on("email-verification"): + # if os.environ.get("FLASK_ENV") != "testing": + # if not check_trusted_device(): + # title = "you logged in" + # device_ip = request.remote_addr + # notification_type = "info" + # target = "" + # read = False + # send_notification = model.Notification.from_data( + # user, + # { + # "title": title, + # "message": device_ip, + # "type": notification_type, + # "target": target, + # "read": read, + # }, + # ) + # model.db.session.add(send_notification) + # model.db.session.commit() + # signin_notification(user, device_ip) + # add_user_to_device_list(resp, user) + # resp.status_code = 200 + + g.token = jti + added_session = model.Session.from_data(jti, expired_in.timestamp(), user) + + model.db.session.add(added_session) + model.db.session.commit() return resp @@ -257,7 +388,7 @@ def authentication(): """it authenticates users to a study, sets access and refresh token. In addition, it handles error handling of expired token and non existed users""" g.user = None - + g.token = None if "token" not in request.cookies: return token: str = ( @@ -285,7 +416,20 @@ def authentication(): token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) if token_blacklist: return + # decode user user = model.User.query.get(decoded["user"]) + # decode session + session = model.Session.query.get(decoded["jti"]) + if not session: + g.user = None + g.token = None + return + + if session.expires_at < time.time(): + g.user = None + g.token = None + return + g.token = decoded["jti"] g.user = user @@ -306,7 +450,10 @@ def authorization(): if bool(re.search(route_pattern, request.path)): return if g.user: - return + if os.environ.get("FLASK_ENV") == "testing": + return + if g.user.email_verified: + return raise UnauthenticatedException("Access denied", 403) @@ -317,6 +464,9 @@ def is_granted(permission: str, study=None): ).first() if not contributor: return False + if os.environ.get("FLASK_ENV") != "testing": + if not g.user.email_verified: + return False role = { "owner": [ "owner", @@ -396,6 +546,7 @@ class Logout(Resource): @api.response(400, "Validation Error") def post(self): """simply logges out user from the system""" + resp = make_response() resp.set_cookie( "token", @@ -406,6 +557,14 @@ def post(self): expires=datetime.datetime.now(timezone.utc), ) resp.status_code = 204 + + if g.user and g.token: + remove_session = model.Session.query.filter( + model.Session.id == g.token + ).first() + if remove_session: + model.db.session.delete(remove_session) + model.db.session.commit() return resp @@ -472,20 +631,219 @@ def confirm_new_password(instance): data: Union[Any, dict] = request.json user = model.User.query.get(g.user.id) + user.set_password(data["new_password"]) + model.db.session.commit() + session_logout() return "Password updated successfully", 200 -# @api.route("/auth/current-users") -# class CurrentUsers(Resource): -# """function is used to see all logged users in -# the system. For now, it is used for testing purposes""" +def session_logout(): + if g.user and g.token: + remove_sessions = model.Session.query.filter( + model.Session.user_id == g.user.id + ).all() -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def get(self): -# """returns all logged users in the system""" -# if not g.user: -# return None -# return g.user.to_dict() + for session in remove_sessions: + model.db.session.delete(session) + model.db.session.commit() + # return "Sessions are removed successfully", 200 + + +@api.route("/auth/forgot-password") +class ForgotPassword(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self): + """function is used to reset password in case users forget""" + + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + + config_module = importlib.import_module(config_module_name) + + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module + + def validate_is_valid_email(instance): + email_address = instance + try: + validate_email(email_address) + return True + except EmailNotValidError as e: + raise ValidationError("Invalid email address format") from e + + # Schema validation + schema = { + "type": "object", + "required": ["email_address"], + "additionalProperties": False, + "properties": { + "email_address": {"type": "string", "format": "valid_email"} + }, + } + + format_checker = FormatChecker() + format_checker.checks("valid_email")(validate_is_valid_email) + + try: + validate( + instance=request.json, schema=schema, format_checker=format_checker + ) + except ValidationError as e: + return e.message, 400 + + data: Union[Any, dict] = request.json + email_address: str = data["email_address"] + + user = model.User.query.filter( + model.User.email_address == email_address + ).first() + + if not user: + raise ValidationError("User associated with this email does not exist") + + expired_in = get_now() + datetime.timedelta(minutes=5) + jti = str(uuid.uuid4()) + reset_token = jwt.encode( + { + "user": user.id, + "exp": expired_in, + "jti": jti, + "email": email_address, + }, # noqa: W503 + config.FAIRHUB_SECRET, + algorithm="HS256", + ) + # email_address = email_address if user else "" + # first_name = user.user_details.first_name if user else "" + # last_name = user.user_details.last_name if user else "" + + # if g.gb.is_on("email-verification"): + # if os.environ.get("FLASK_ENV") != "testing": + # forgot_password(email_address, first_name, last_name, reset_token) + user.update_password_reset(reset_token) + model.db.session.commit() + + response = make_response("email is sent successfully", 200) + if os.environ.get("FLASK_ENV") == "testing": + response.headers.add("X-Token", reset_token) + return response + + +@api.route("/auth/reset-password") +class ResetPassword(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self): + """function is used to reset password in case users forget""" + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + + config_module = importlib.import_module(config_module_name) + + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module + + data: Union[Any, dict] = request.json + + try: + decoded = jwt.decode( + data["token"], config.FAIRHUB_SECRET, algorithms=["HS256"] + ) + except (jwt.ExpiredSignatureError, jwt.DecodeError, jwt.InvalidSignatureError): + return Response(status=401) + user = model.User.query.filter( + model.User.email_address == decoded["email"] + ).first() + if not user: + raise ValidationError("Email doesnt exist") + + if data["token"] != user.password_reset_token: + return "Invalid token", 400 + + validate_pass = user.check_password(data["new_password"]) + if validate_pass: + return "old and new password can not be same. Please select a new one", 422 + + def confirm_new_password(instance): + new_password = data["new_password"] + confirm_password = instance + + if new_password != confirm_password: + raise ValidationError("New password and confirm password do not match") + + return True + + schema = { + "type": "object", + "required": ["new_password", "confirm_password", "token"], + "additionalProperties": False, + "properties": { + "new_password": {"type": "string", "minLength": 1}, + "token": {"type": "string", "minLength": 1}, + "confirm_password": { + "type": "string", + "minLength": 1, + "format": "password confirmation", + }, + }, + } + format_checker = FormatChecker() + + # format_checker.checks("current password")(validate_current_password) + format_checker.checks("password confirmation")(confirm_new_password) + + try: + validate( + instance=request.json, schema=schema, format_checker=format_checker + ) + except ValidationError as e: + return e.message, 400 + + user.set_password(data["new_password"]) + model.db.session.commit() + + user.update_password_reset(None) + model.db.session.commit() + + # email_address = user.email_address if user else "" + # first_name = user.user_details.first_name if user else "" + # last_name = user.user_details.last_name if user else "" + # if os.environ.get("FLASK_ENV") != "testing": + # if g.gb.is_on("email-verification"): + # if user: + # reset_password( + # email_address, + # first_name, + # last_name, + # ) + + return "Password reset successfully", 200 + + +frozen_date: Union[datetime.datetime, None] = None + + +def set_now(now: Union[datetime.datetime, None]) -> None: + global frozen_date + frozen_date = now + + +def get_now() -> datetime.datetime: + if frozen_date: + return frozen_date + return datetime.datetime.now(datetime.timezone.utc) diff --git a/apis/contributor.py b/apis/contributor.py index 9f282312..11a5ab42 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -8,6 +8,12 @@ from .authentication import is_granted +# import os + + +# from modules.invitation import send_access_contributors, send_invitation_study + + api = Namespace("Contributor", description="Contributors", path="/") @@ -20,7 +26,7 @@ @api.route("/study//contributor") -class AddContributor(Resource): +class AllContributors(Resource): @api.doc("contributor list") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -48,8 +54,10 @@ def post(self, study_id: int): email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] - contributor_ = None - + # contributor_ = None + # study_name = study_obj.title + # first_name = user.user_details.first_name if user else "" + # last_name = user.user_details.last_name if user else "" try: if user: contributor_ = study_obj.add_user_to_study(user, permission) @@ -59,6 +67,23 @@ def post(self, study_id: int): except model.StudyException as ex: return ex.args[0], 409 model.db.session.commit() + # if os.environ.get("FLASK_ENV") != "testing": + # if g.gb.is_on("email-verification"): + # if user: + # send_access_contributors( + # email_address, + # study_obj, + # first_name, + # last_name, + # contributor_.permission, + # ) + # else: + # send_invitation_study( + # email_address, + # contributor_.token, + # study_name, + # contributor_.permission, + # ) return contributor_.to_dict(), 201 @@ -100,7 +125,6 @@ def put(self, study_id: int, user_id: int): if not can_grant: return f"User cannot grant {permission}", 403 - # TODO: Owners downgrading themselves if user != g.user: grantee_level = list(grants.keys()).index(grantee.permission) # 1 new_level: int = list(grants.keys()).index(str(permission)) # 2 diff --git a/apis/dashboard.py b/apis/dashboard.py index 061abf5a..7b322867 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -9,10 +9,9 @@ import caching import model from modules.etl import ModuleTransform, RedcapLiveTransform, RedcapReleaseTransform -from modules.etl.config import ( +from modules.etl.config import ( # redcapReleaseTransformConfig, moduleTransformConfigs, redcapLiveTransformConfig, - redcapReleaseTransformConfig, ) from .authentication import is_granted @@ -211,6 +210,75 @@ ) +def execute_transform( + transformConfig: Dict[str, Any], + redcap_project_view: Dict[str, Any], + redcap_project_dashboard: Dict[str, Any], + live: bool = False, +) -> Dict[str, Any]: + # Set report_ids for ETL + report_keys = [] + for report in redcap_project_dashboard["reports"]: + for i, report_config in enumerate(transformConfig["reports"]): + if ( + len(report["report_id"]) > 0 + and report["report_key"] == report_config["key"] + ): + report_keys.append(report["report_key"]) + transformConfig["reports"][i]["kwdargs"]["report_id"] = report[ + "report_id" + ] + + # Remove Unused Reports + transformConfig["reports"] = [ + report for report in transformConfig["reports"] if report["key"] in report_keys + ] + + # Set Post Transform Merge + index_columns, post_transform_merges = transformConfig["post_transform_merge"] + transformConfig["post_transform_merge"] = ( + index_columns, + [ + (report_key, transform_kwdargs) + for report_key, transform_kwdargs in post_transform_merges + if report_key in report_keys + ], + ) + + # Execute REDCap Data Transform + if not live: + redcap_etl_config = transformConfig + redcapTransform = RedcapReleaseTransform(redcap_etl_config).run() + else: + transformConfig["redcap_api_url"] = redcap_project_view["api_url"] + transformConfig["redcap_api_key"] = redcap_project_view["api_key"] + redcap_etl_config = transformConfig + redcapTransform = RedcapLiveTransform(redcap_etl_config).run() + + # Execute Dashboard Module Transforms + for dashboard_module in redcap_project_dashboard["modules"]: + if dashboard_module["selected"]: + mergedTransform = redcapTransform.merged + transform, module_etl_config = moduleTransformConfigs[ + dashboard_module["id"] + ] + moduleTransform = ModuleTransform(module_etl_config) + transformed = getattr(moduleTransform, transform)( + mergedTransform + ).transformed + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": transformed, + } + else: + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": [], + } + + return redcap_project_dashboard + + @api.route("/study//dashboard") class RedcapProjectDashboards(Resource): @api.doc("Get all study dashboards") @@ -387,13 +455,6 @@ def get(self, study_id: str, dashboard_id: str): if not is_granted("view", study): return "Access denied, you can not view this dashboard", 403 - # Retrieve Dashboard Redis Cache if Available - # cached_redcap_project_dashboard = caching.cache.get( - # f"$study_id#{study_id}$dashboard_id#{dashboard_id}" - # ) - # if cached_redcap_project_dashboard is not None: - # return cached_redcap_project_dashboard, 201 - # Get Base Transform Config for ETL - Live transformConfig = redcapLiveTransformConfig @@ -412,66 +473,21 @@ def get(self, study_id: str, dashboard_id: str): ) redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() - # Set report_ids for ETL - report_keys = [] - for report in redcap_project_dashboard["reports"]: - for i, report_config in enumerate(transformConfig["reports"]): - if ( - len(report["report_id"]) > 0 - and report["report_key"] == report_config["key"] - ): - report_keys.append(report["report_key"]) - transformConfig["reports"][i]["kwdargs"]["report_id"] = report[ - "report_id" - ] - - # Remove Unused Reports - transformConfig["reports"] = [ - report - for report in redcapLiveTransformConfig["reports"] - if report["key"] in report_keys - ] - - # Set Post Transform Merge - index_columns, post_transform_merges = transformConfig["post_transform_merge"] - transformConfig["post_transform_merge"] = ( - index_columns, - [ - (report_key, transform_kwdargs) - for report_key, transform_kwdargs in post_transform_merges - if report_key in report_keys - ], - ) - - # Set REDCap API Config - transformConfig["redcap_api_url"] = redcap_project_view["api_url"] - transformConfig["redcap_api_key"] = redcap_project_view["api_key"] - # Finalize ETL Config - redcap_etl_config = transformConfig - - redcapTransform = RedcapLiveTransform(redcap_etl_config) - - # Execute Dashboard Module Transforms - for dashboard_module in redcap_project_dashboard["modules"]: - if dashboard_module["selected"]: - mergedTransform = redcapTransform.merged - transform, module_etl_config = moduleTransformConfigs[ - dashboard_module["id"] - ] - moduleTransform = ModuleTransform(module_etl_config) - transformed = getattr(moduleTransform, transform)( - mergedTransform - ).transformed - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": transformed, - } - else: - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": [], - } + transformConfig = redcapLiveTransformConfig + redcap_project_dashboard = execute_transform( + transformConfig, + redcap_project_view, + redcap_project_dashboard, + live=True, + ) + # transformConfig = redcapReleaseTransformConfig + # redcap_project_dashboard = execute_transform( + # transformConfig, + # redcap_project_view, + # redcap_project_dashboard, + # live=False, + # ) # Create Dashboard Redis Cache caching.cache.set( @@ -636,8 +652,6 @@ def get(self, study_id: str): """Get REDCap project dashboard""" model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) - # if not is_granted("view", study): - # return "Access denied, you can not view this dashboard", 403 # Get Dashboard redcap_project_dashboards_query = model.StudyDashboard.query.filter_by( @@ -657,7 +671,14 @@ def get(self, study_id: str): # Public Dashboard ID dashboard_id = redcap_project_dashboard["id"] - # Retrieve Dashboard Redis Cache if Available + # Get REDCap Project + redcap_id = redcap_project_dashboard["redcap_id"] + redcap_project_view_query: Any = model.db.session.query(model.StudyRedcap).get( + redcap_id + ) + redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() + + # # Retrieve Dashboard Redis Cache if Available # cached_redcap_project_dashboard = caching.cache.get( # f"$study_id#{study_id}$dashboard_id#{dashboard_id}#public" # ) @@ -668,66 +689,14 @@ def get(self, study_id: str): # No Cache, Do ETL # - # Get Base Transform Config for ETL - Release - transformConfig = redcapReleaseTransformConfig - - # Set report_ids for ETL - report_keys = [] - for report in redcap_project_dashboard["reports"]: - for i, report_config in enumerate(transformConfig["reports"]): - if ( - len(report["report_id"]) > 0 - and report["report_key"] == report_config["key"] - ): - report_keys.append(report["report_key"]) - transformConfig["reports"][i]["kwdargs"]["report_id"] = report[ - "report_id" - ] - - # Remove Unused Reports - transformConfig["reports"] = [ - report - for report in redcapLiveTransformConfig["reports"] - if report["key"] in report_keys - ] - - # Set Post Transform Merge - index_columns, post_transform_merges = transformConfig["post_transform_merge"] - transformConfig["post_transform_merge"] = ( - index_columns, - [ - (report_key, transform_kwdargs) - for report_key, transform_kwdargs in post_transform_merges - if report_key in report_keys - ], - ) - # Finalize ETL Config - redcap_etl_config = transformConfig - - # Execute REDCap Release ETL - redcapTransform = RedcapReleaseTransform(redcap_etl_config) - - # Execute Dashboard Module Transforms - for dashboard_module in redcap_project_dashboard["modules"]: - if dashboard_module["selected"]: - mergedTransform = redcapTransform.merged - transform, module_etl_config = moduleTransformConfigs[ - dashboard_module["id"] - ] - moduleTransform = ModuleTransform(module_etl_config) - transformed = getattr(moduleTransform, transform)( - mergedTransform - ).transformed - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": transformed, - } - else: - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": [], - } + transformConfig = redcapLiveTransformConfig + redcap_project_dashboard = execute_transform( + transformConfig, + redcap_project_view, + redcap_project_dashboard, + live=True, + ) # Create Dashboard Redis Cache caching.cache.set( diff --git a/apis/dataset.py b/apis/dataset.py index 5524583c..68194dfd 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -31,8 +31,7 @@ "id": fields.String(required=True), "updated_on": fields.String(required=True), "created_at": fields.String(required=True), - "dataset_versions": fields.Nested(dataset_versions_model, required=True), - "latest_version": fields.String(required=True), + "latest_version": fields.Boolean(required=True), "title": fields.String(required=True), "description": fields.String(required=True), }, @@ -43,7 +42,7 @@ class DatasetList(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset) + # @api.marshal_with(dataset) @api.doc("view datasets") def get(self, study_id): study = model.Study.query.get(study_id) diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py deleted file mode 100644 index 4bf52f39..00000000 --- a/apis/dataset_metadata/dataset_access.py +++ /dev/null @@ -1,72 +0,0 @@ -"""API for dataset access metadata""" - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_access = api.model( - "DatasetAccess", - { - "id": fields.String(required=True), - "type": fields.String(required=True), - "description": fields.String(required=True), - "url": fields.String(required=True), - "url_last_checked": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/access") -class DatasetAccessResource(Resource): - """Dataset Access Resource""" - - @api.doc("access") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_access) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset access""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_access_ = dataset_.dataset_access - return dataset_access_.to_dict(), 200 - - @api.doc("update access") - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Update dataset access""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "description": {"type": "string", "minLength": 1}, - "type": {"type": "string", "minLength": 1}, - "url": {"type": "string"}, - "url_last_checked": {"type": ["integer", "null"]}, - }, - "required": [ - "description", - "type", - "url", - "url_last_checked", - ], - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_access.update(request.json) - model.db.session.commit() - return dataset_.dataset_access.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_access_rights.py b/apis/dataset_metadata/dataset_access_rights.py new file mode 100644 index 00000000..a2b50b38 --- /dev/null +++ b/apis/dataset_metadata/dataset_access_rights.py @@ -0,0 +1,209 @@ +"""API for dataset access and rights metadata""" + +from typing import Any, Union + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_access_rights = api.model( + "DatasetAccessRights", + { + "access": fields.Nested( + api.model( + "Access", + { + "id": fields.String(required=True), + "type": fields.String(required=True), + "description": fields.String(required=True), + "url": fields.String(required=True), + "url_last_checked": fields.Integer(required=True), + }, + ) + ), + "rights": fields.Nested( + api.model( + "Rights", + { + "id": fields.String(required=True), + "rights": fields.String(required=True), + "uri": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_scheme": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), + "license_text": fields.String(required=True), + }, + ) + ), + }, +) + + +@api.route("/study//dataset//metadata/access-rights") +class DatasetAccessRights(Resource): + """Dataset Access and Rights Resource""" + + @api.doc("access") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_access_rights) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset access""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_access_ = dataset_.dataset_access + dataset_rights_ = dataset_.dataset_rights + return { + "access": dataset_access_.to_dict(), + "rights": [d.to_dict() for d in dataset_rights_], + }, 200 + + @api.doc("update access") + @api.marshal_with(dataset_access_rights) + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Update dataset access""" + study_obj = model.Study.query.get(study_id) + data: Union[Any, dict] = request.json + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "rights": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + "rights": {"type": "string", "minLength": 1}, + "uri": {"type": "string"}, + "license_text": {"type": "string"}, + }, + "required": [ + "identifier", + "identifier_scheme", + "rights", + "uri", + "license_text", + ], + }, + "uniqueItems": True, + }, + "access": { + "type": "object", + "additionalProperties": False, + "properties": { + "description": {"type": "string", "minLength": 1}, + "type": {"type": "string", "minLength": 1}, + "url": {"type": "string"}, + "url_last_checked": {"type": ["integer", "null"]}, + }, + "required": [ + "description", + "type", + "url", + "url_last_checked", + ], + }, + }, + "required": ["rights", "access"], + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_access.update(data["access"]) + list_of_rights = [] + for i in data["rights"]: + if "id" in i and i["id"]: + dataset_rights_ = model.DatasetRights.query.get(i["id"]) + if not dataset_rights_: + return f"Study link {i['id']} Id is not found", 404 + dataset_rights_.update(i) + list_of_rights.append(dataset_rights_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_rights_ = model.DatasetRights.from_data(dataset_, i) + model.db.session.add(dataset_rights_) + list_of_rights.append(dataset_rights_.to_dict()) + model.db.session.commit() + return { + "access": dataset_.dataset_access.to_dict(), + "rights": list_of_rights, + }, 200 + + +@api.route("/study//dataset//metadata/rights") +class DatasetRightsResource(Resource): + """Dataset Rights Resource""" + + @api.doc("update rights") + @api.response(201, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): + """Update dataset rights""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + "rights": {"type": "string", "minLength": 1}, + "uri": {"type": "string"}, + "license_text": {"type": "string"}, + }, + "required": [ + "identifier", + "identifier_scheme", + "rights", + "uri", + "license_text", + ], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_rights_ = model.DatasetRights.query.get(i["id"]) + if not dataset_rights_: + return f"Study link {i['id']} Id is not found", 404 + dataset_rights_.update(i) + list_of_elements.append(dataset_rights_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_rights_ = model.DatasetRights.from_data(data_obj, i) + model.db.session.add(dataset_rights_) + list_of_elements.append(dataset_rights_.to_dict()) + model.db.session.commit() + return list_of_elements, 200 diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 5d6b5e07..085d1f3e 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -28,7 +28,7 @@ class DatasetAlternateIdentifierResource(Resource): @api.doc("identifier") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(dataset_identifier) + @api.marshal_with(dataset_identifier) def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argument """Get dataset alternate identifier""" dataset_ = model.Dataset.query.get(dataset_id) @@ -38,6 +38,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argum @api.doc("update identifier") @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(dataset_identifier) def post(self, study_id: int, dataset_id: int): """Update dataset alternate identifier""" study_obj = model.Study.query.get(study_id) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py deleted file mode 100644 index 958822b2..00000000 --- a/apis/dataset_metadata/dataset_consent.py +++ /dev/null @@ -1,84 +0,0 @@ -"""API for dataset consent metadata""" - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_consent = api.model( - "DatasetConsent", - { - "id": fields.String(required=True), - "type": fields.String(required=True), - "noncommercial": fields.Boolean(required=True), - "geog_restrict": fields.Boolean(required=True), - "research_type": fields.Boolean(required=True), - "genetic_only": fields.Boolean(required=True), - "no_methods": fields.Boolean(required=True), - "details": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/consent") -class DatasetConsentResource(Resource): - """Dataset Consent Resource""" - - @api.doc("consent") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_consent) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset consent""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return dataset_consent_.to_dict(), 200 - - @api.doc("update consent") - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int, dataset_id: int): - """Update dataset consent""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "type": {"type": "string", "minLength": 1}, - "details": { - "type": "string", - }, - "genetic_only": {"type": "boolean"}, - "geog_restrict": {"type": "boolean"}, - "no_methods": {"type": "boolean"}, - "noncommercial": {"type": "boolean"}, - "research_type": {"type": "boolean"}, - }, - "required": [ - "type", - "details", - "genetic_only", - "geog_restrict", - "no_methods", - "noncommercial", - "research_type", - ], - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_consent.update(data) - model.db.session.commit() - return dataset_.dataset_consent.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py deleted file mode 100644 index 41336079..00000000 --- a/apis/dataset_metadata/dataset_contributor.py +++ /dev/null @@ -1,300 +0,0 @@ -"""API for dataset contributor metadata""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_contributor = api.model( - "DatasetContributor", - {}, -) - - -@api.route("/study//dataset//metadata/contributor") -class DatasetContributorResource(Resource): - """Dataset Contributor Resource""" - - @api.doc("contributor") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_contributor) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset contributor""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_contributor_ = dataset_.dataset_contributors - - return [ - d.to_dict() for d in dataset_contributor_ if not d.to_dict()["creator"] - ], 200 - - @api.doc("update contributor") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset contributor""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, can't modify dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "contributor_type": { - "type": "string", - "minLength": 1, - }, - "given_name": { - "type": "string", - "minLength": 1, - }, - "family_name": {"type": ["string", "null"]}, - "name_identifier": { - "type": "string", - "minLength": 1, - }, - "name_identifier_scheme": { - "type": "string", - "minLength": 1, - }, - "name_identifier_scheme_uri": { - "type": "string", - }, - "name_type": { - "type": "string", - "enum": [ - "Personal", - "Organizational", - ], - "minLength": 1, - }, - "affiliations": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "name": { - "type": "string", - }, - "identifier": { - "type": "string", - }, - "scheme": { - "type": "string", - }, - "scheme_uri": { - "type": "string", - }, - }, - }, - "uniqueItems": True, - }, - }, - "required": [ - "contributor_type", - "name_type", - "given_name", - "affiliations", - "name_identifier", - "name_identifier_scheme", - ], - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - i["creator"] = False - if "id" in i and i["id"]: - dataset_contributor_ = model.DatasetContributor.query.get(i["id"]) - if not dataset_contributor_: - return f"Study link {i['id']} Id is not found", 404 - dataset_contributor_.update(i) - list_of_elements.append(dataset_contributor_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_contributor_ = model.DatasetContributor.from_data(data_obj, i) - model.db.session.add(dataset_contributor_) - list_of_elements.append(dataset_contributor_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route( - "/study//dataset//metadata/contributor/" -) -class DatasetContributorDelete(Resource): - """Dataset Contributor Delete Resource""" - - @api.doc("delete contributor") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - contributor_id: int, - ): - """Delete dataset contributor""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - contributor_ = model.DatasetContributor.query.get(contributor_id) - - model.db.session.delete(contributor_) - model.db.session.commit() - - return Response(status=204) - - -@api.route("/study//dataset//metadata/creator") -class DatasetCreatorResource(Resource): - """Dataset Creator Resource""" - - @api.doc("creator") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_contributor) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset creator""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_creator_ = dataset_.dataset_contributors - # TODO d.creator - return [d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"]], 200 - - @api.doc("update creator") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset creator""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "given_name": { - "type": "string", - "minLength": 1, - }, - "family_name": {"type": ["string", "null"]}, - "name_identifier": { - "type": "string", - "minLength": 1, - }, - "name_identifier_scheme": { - "type": "string", - "minLength": 1, - }, - "name_identifier_scheme_uri": { - "type": "string", - }, - "name_type": { - "type": "string", - "enum": [ - "Personal", - "Organizational", - ], - "minLength": 1, - }, - "affiliations": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "name": { - "type": "string", - }, - "identifier": { - "type": "string", - }, - "scheme": { - "type": "string", - }, - "scheme_uri": { - "type": "string", - }, - }, - }, - "uniqueItems": True, - }, - }, - "required": [ - "name_type", - "given_name", - "affiliations", - "name_identifier", - "name_identifier_scheme", - ], - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - i["creator"] = True - if "id" in i and i["id"]: - i["contributor_type"] = None - dataset_creator_ = model.DatasetContributor.query.get(i["id"]) - if not dataset_creator_: - return f"Study link {i['id']} Id is not found", 404 - dataset_creator_.update(i) - list_of_elements.append(dataset_creator_.to_dict()) - elif "id" not in i or not i["id"]: - i["contributor_type"] = None - dataset_creator_ = model.DatasetContributor.from_data(data_obj, i) - model.db.session.add(dataset_creator_) - list_of_elements.append(dataset_creator_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/creator/") -class DatasetCreatorDelete(Resource): - @api.doc("delete creator") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - creator_id: int, - ): - """Delete dataset creator""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_creator_ = model.DatasetContributor.query.get(creator_id) - model.db.session.delete(dataset_creator_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_data_management.py b/apis/dataset_metadata/dataset_data_management.py new file mode 100644 index 00000000..2cea865e --- /dev/null +++ b/apis/dataset_metadata/dataset_data_management.py @@ -0,0 +1,219 @@ +"""API for dataset consent metadata""" +import typing + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_data_management = api.model( + "DatasetDataManagement", + { + "consent": fields.Nested( + api.model( + "DatasetConsent", + { + "id": fields.String(required=True), + "type": fields.String(required=True), + "noncommercial": fields.Boolean(required=True), + "geog_restrict": fields.Boolean(required=True), + "research_type": fields.Boolean(required=True), + "genetic_only": fields.Boolean(required=True), + "no_methods": fields.Boolean(required=True), + "details": fields.String(required=True), + }, + ) + ), + "subjects": fields.List( + fields.Nested( + api.model( + "DatasetSubjects", + { + "id": fields.String(required=True), + "subject": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "value_uri": fields.String(required=True), + "classification_code": fields.String(required=True), + }, + ) + ) + ), + "deident": fields.Nested( + api.model( + "DatasetDeIdentLevel", + { + "id": fields.String(required=True), + "type": fields.String(required=True), + "direct": fields.Boolean(required=True), + "hipaa": fields.Boolean(required=True), + "dates": fields.Boolean(required=True), + "nonarr": fields.Boolean(required=True), + "k_anon": fields.Boolean(required=True), + "details": fields.String(required=True), + }, + ) + ), + }, +) + + +@api.route("/study//dataset//metadata/data-management") +class DatasetDataManagement(Resource): + """Dataset Data management Resource""" + + @api.doc("consent") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(dataset_consent) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset consent""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + de_ident_level_ = dataset_.dataset_de_ident_level + dataset_subject_ = dataset_.dataset_subject + return { + "consent": dataset_consent_.to_dict(), + "deident": de_ident_level_.to_dict(), + "subjects": [d.to_dict() for d in dataset_subject_], + }, 200 + + @api.doc("update consent") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): + """Update dataset consent""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "consent": { + "type": "object", + "additionalProperties": False, + "properties": { + "type": {"type": "string", "minLength": 1}, + "details": {"type": "string"}, + "genetic_only": {"type": "boolean"}, + "geog_restrict": {"type": "boolean"}, + "no_methods": {"type": "boolean"}, + "noncommercial": {"type": "boolean"}, + "research_type": {"type": "boolean"}, + }, + "required": [ + "type", + "details", + "genetic_only", + "geog_restrict", + "no_methods", + "noncommercial", + "research_type", + ], + }, + "subjects": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "classification_code": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "subject": {"type": "string", "minLength": 1}, + "value_uri": {"type": "string"}, + }, + "required": [ + "subject", + "scheme", + "scheme_uri", + "value_uri", + "classification_code", + ], + }, + "uniqueItems": True, + }, + "deident": { + "type": "object", + "additionalProperties": False, + "properties": { + "type": {"type": "string", "minLength": 1}, + "details": {"type": "string"}, + "direct": {"type": "boolean"}, + "hipaa": {"type": "boolean"}, + "dates": {"type": "boolean"}, + "k_anon": {"type": "boolean"}, + "nonarr": {"type": "boolean"}, + }, + "required": [ + "type", + "details", + "direct", + "hipaa", + "dates", + "k_anon", + "nonarr", + ], + }, + }, + "required": ["consent", "subjects", "deident"], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data: typing.Union[dict, typing.Any] = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_consent.update(data["consent"]) + dataset_.dataset_de_ident_level.update(data["deident"]) + list_of_subjects = [] + for i in data["subjects"]: + if "id" in i and i["id"]: + dataset_subject_ = model.DatasetSubject.query.get(i["id"]) + if not dataset_subject_: + return f"Study link {i['id']} Id is not found", 404 + dataset_subject_.update(i) + list_of_subjects.append(dataset_subject_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_subject_ = model.DatasetSubject.from_data(dataset_, i) + model.db.session.add(dataset_subject_) + list_of_subjects.append(dataset_subject_.to_dict()) + model.db.session.commit() + return { + "consent": dataset_.dataset_consent.to_dict(), + "deident": dataset_.dataset_de_ident_level.to_dict(), + "subjects": list_of_subjects, + }, 200 + + +@api.route("/study//dataset//metadata/subject/") +class DatasetSubjectUpdate(Resource): + """Dataset Subject Update Resource""" + + @api.doc("delete subject") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, # pylint: disable= unused-argument + dataset_id: int, # pylint: disable= unused-argument + subject_id: int, + ): + """Delete dataset subject""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can't make change in dataset metadata", 403 + dataset_subject_ = model.DatasetSubject.query.get(subject_id) + + model.db.session.delete(dataset_subject_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py deleted file mode 100644 index 947c356d..00000000 --- a/apis/dataset_metadata/dataset_date.py +++ /dev/null @@ -1,112 +0,0 @@ -"""APIs for dataset date metadata""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_date = api.model( - "DatasetDate", - { - "id": fields.String(required=True), - "date": fields.String(required=True), - "type": fields.String(required=True), - "information": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/date") -class DatasetDateResource(Resource): - """Dataset Date Resource""" - - @api.doc("date") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_date) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset date""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_date_ = dataset_.dataset_date - return [d.to_dict() for d in dataset_date_], 200 - - @api.doc("update date") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset date""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "date": { - "type": "integer", - }, - "type": { - "type": "string", - "minLength": 1, - }, - "information": { - "type": "string", - }, - }, - "required": ["date", "type", "information"], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_date_ = model.DatasetDate.query.get(i["id"]) - if not dataset_date_: - return f"Study link {i['id']} Id is not found", 404 - dataset_date_.update(i) - list_of_elements.append(dataset_date_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_date_ = model.DatasetDate.from_data(data_obj, i) - model.db.session.add(dataset_date_) - list_of_elements.append(dataset_date_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/date/") -class DatasetDateDeleteResource(Resource): - """Dataset Date Delete Resource""" - - @api.doc("delete date") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, study_id: int, dataset_id: int, date_id: int - ): # pylint: disable= unused-argument - """Delete dataset date""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - date_ = model.DatasetDate.query.get(date_id) - - model.db.session.delete(date_) - model.db.session.commit() - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py deleted file mode 100644 index a9f7c7f5..00000000 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ /dev/null @@ -1,84 +0,0 @@ -"""APIs for dataset de-identification level""" - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -de_ident_level = api.model( - "DatasetDeIdentLevel", - { - "id": fields.String(required=True), - "type": fields.String(required=True), - "direct": fields.Boolean(required=True), - "hipaa": fields.Boolean(required=True), - "dates": fields.Boolean(required=True), - "nonarr": fields.Boolean(required=True), - "k_anon": fields.Boolean(required=True), - "details": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/de-identification-level") -class DatasetDeIdentLevelResource(Resource): - """Dataset De-Identification Level Resource""" - - @api.doc("de_ident_level") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(de_ident_level) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset de-identification level""" - dataset_ = model.Dataset.query.get(dataset_id) - de_ident_level_ = dataset_.dataset_de_ident_level - return de_ident_level_.to_dict(), 200 - - @api.doc("update ident level") - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int, dataset_id: int): - """Update dataset de-identification level""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "type": {"type": "string", "minLength": 1}, - "details": { - "type": "string", - }, - "direct": {"type": "boolean"}, - "hipaa": {"type": "boolean"}, - "dates": {"type": "boolean"}, - "k_anon": {"type": "boolean"}, - "nonarr": {"type": "boolean"}, - }, - "required": [ - "type", - "details", - "direct", - "hipaa", - "dates", - "k_anon", - "nonarr", - ], - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_de_ident_level.update(data) - model.db.session.commit() - return dataset_.dataset_de_ident_level.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py deleted file mode 100644 index ddd4c56a..00000000 --- a/apis/dataset_metadata/dataset_description.py +++ /dev/null @@ -1,137 +0,0 @@ -"""API endpoints for dataset description""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_description = api.model( - "DatasetDescription", - { - "id": fields.String(required=True), - "description": fields.String(required=True), - "description_type": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/description") -class DatasetDescriptionResource(Resource): - """Dataset Description Resource""" - - @api.doc("description") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_description) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset description""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_description_ = dataset_.dataset_description - return [d.to_dict() for d in dataset_description_], 200 - - @api.doc("update description") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset description""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "description": { - "type": "string", - "minLength": 1, - }, - "type": { - "type": "string", - "enum": [ - "Abstract", - "Methods", - "SeriesInformation", - "TableOfContents", - "TechnicalInfo", - "Other", - ], - }, - }, - "required": ["description", "type"], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_description_ = model.DatasetDescription.query.get(i["id"]) - # if dataset_description_.type == "Abstract": - # return ( - # "Abstract type can not be modified", - # 403, - # ) - dataset_description_.update(i) - list_of_elements.append(dataset_description_.to_dict()) - elif "id" not in i or not i["id"]: - if i["type"] == "Abstract": - return ( - "Abstract type in description can not be given", - 403, - ) - dataset_description_ = model.DatasetDescription.from_data(data_obj, i) - model.db.session.add(dataset_description_) - list_of_elements.append(dataset_description_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - @api.route( - "/study//dataset//" - "metadata/description/" - ) - class DatasetDescriptionUpdate(Resource): - """Dataset Description Update Resource""" - - @api.doc("delete description") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - description_id: int, - ): - """Delete dataset description""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return ( - "Access denied, you can not make any change in dataset metadata", - 403, - ) - dataset_description_ = model.DatasetDescription.query.get(description_id) - if dataset_description_.type == "Abstract": - return ( - "Abstract description can not be deleted", - 403, - ) - model.db.session.delete(dataset_description_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py deleted file mode 100644 index 7110e5b9..00000000 --- a/apis/dataset_metadata/dataset_funder.py +++ /dev/null @@ -1,125 +0,0 @@ -"""API endpoints for dataset funder""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_funder = api.model( - "DatasetFunder", - { - "id": fields.String(required=True), - "name": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_type": fields.String(required=True), - "identifier_scheme_uri": fields.String(required=True), - "award_number": fields.String(required=True), - "award_uri": fields.String(required=True), - "award_title": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/funder") -class DatasetFunderResource(Resource): - """Dataset Funder Resource""" - - @api.doc("funder") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(dataset_funder) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset funder""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_funder_ = dataset_.dataset_funder - return [d.to_dict() for d in dataset_funder_], 200 - - @api.doc("update funder") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Update dataset funder""" - data: Union[Any, dict] = request.json - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "award_number": {"type": "string", "minLength": 1}, - "award_title": {"type": "string"}, - "award_uri": {"type": "string"}, - "identifier": {"type": "string", "minLength": 1}, - "identifier_scheme_uri": {"type": "string"}, - "identifier_type": {"type": ["string", "null"]}, - }, - "required": [ - "name", - "award_number", - "award_title", - "award_uri", - "identifier", - "identifier_scheme_uri", - "identifier_type", - ], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_funder_ = model.DatasetFunder.query.get(i["id"]) - if not dataset_funder_: - return f"Study link {i['id']} Id is not found", 404 - dataset_funder_.update(i) - list_of_elements.append(dataset_funder_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_funder_ = model.DatasetFunder.from_data(data_obj, i) - model.db.session.add(dataset_funder_) - list_of_elements.append(dataset_funder_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/funder/") -class DatasetFunderUpdate(Resource): - """Dataset Funder Update Resource""" - - @api.doc("delete funder") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - funder_id: int, - ): - """Delete dataset funder""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_funder_ = model.DatasetFunder.query.get(funder_id) - - model.db.session.delete(dataset_funder_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_general_information.py b/apis/dataset_metadata/dataset_general_information.py new file mode 100644 index 00000000..4d81987d --- /dev/null +++ b/apis/dataset_metadata/dataset_general_information.py @@ -0,0 +1,302 @@ +"""API for dataset title metadata""" + +from typing import Any, Union + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_general_information = api.model( + "DatasetGeneralInformation", + { + "titles": fields.List( + fields.Nested( + api.model( + "DatasetTitle", + { + "id": fields.String(required=True), + "title": fields.String(required=True), + "type": fields.String(required=True), + }, + ) + ) + ), + "descriptions": fields.List( + fields.Nested( + api.model( + "DatasetDescription", + { + "id": fields.String(required=True), + "description": fields.String(required=True), + "type": fields.String(required=True), + }, + ) + ) + ), + "dates": fields.List( + fields.Nested( + api.model( + "DatasetDate", + { + "id": fields.String(required=True), + "date": fields.Integer(required=True), + "type": fields.String(required=True), + "information": fields.String(required=True), + }, + ) + ) + ), + }, +) + + +@api.route("/study//dataset//metadata/general-information") +class DatasetGeneralInformation(Resource): + """Dataset General Information Resource""" + + @api.doc("title") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_general_information) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset title""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_title_ = dataset_.dataset_title + dataset_description_ = dataset_.dataset_description + dataset_date_ = dataset_.dataset_date + return { + "titles": [d.to_dict() for d in dataset_title_], + "descriptions": [d.to_dict() for d in dataset_description_], + "dates": [d.to_dict() for d in dataset_date_], + }, 200 + + @api.doc("update general information") + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_general_information) + def post(self, study_id: int, dataset_id: int): + """Update dataset title""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "titles": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "title": {"type": "string", "minLength": 1}, + "type": { + "type": "string", + "enum": [ + "MainTitle", + "AlternativeTitle", + "Subtitle", + "TranslatedTitle", + "OtherTitle", + ], + }, + }, + "required": ["title", "type"], + }, + }, + "descriptions": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "description": {"type": "string", "minLength": 1}, + "type": { + "type": "string", + "enum": [ + "Abstract", + "Methods", + "SeriesInformation", + "TableOfContents", + "TechnicalInfo", + "Other", + ], + }, + }, + "required": ["description", "type"], + }, + }, + "dates": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "date": {"type": "integer"}, + "type": {"type": "string", "minLength": 1}, + "information": {"type": "string"}, + }, + "required": ["date", "type", "information"], + }, + }, + }, + } + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + list_of_titles = [] + for i in data["titles"]: + if "id" in i and i["id"]: + dataset_title_ = model.DatasetTitle.query.get(i["id"]) + dataset_title_.update(i) + list_of_titles.append(dataset_title_.to_dict()) + elif "id" not in i or not i["id"]: + if i["type"] == "MainTitle": + return ( + "Main Title type can not be given", + 403, + ) + dataset_title_ = model.DatasetTitle.from_data(data_obj, i) + model.db.session.add(dataset_title_) + list_of_titles.append(dataset_title_.to_dict()) + + list_of_description = [] + for i in data["descriptions"]: + if "id" in i and i["id"]: + dataset_description_ = model.DatasetDescription.query.get(i["id"]) + # if dataset_description_.type == "Abstract": + # return ( + # "Abstract type can not be modified", + # 403, + # ) + dataset_description_.update(i) + list_of_description.append(dataset_description_.to_dict()) + elif "id" not in i or not i["id"]: + if i["type"] == "Abstract": + return ( + "Abstract type in description can not be given", + 403, + ) + dataset_description_ = model.DatasetDescription.from_data(data_obj, i) + model.db.session.add(dataset_description_) + list_of_description.append(dataset_description_.to_dict()) + + list_of_dates = [] + for i in data["dates"]: + if "id" in i and i["id"]: + dataset_date_ = model.DatasetDate.query.get(i["id"]) + if not dataset_date_: + return f"Study link {i['id']} Id is not found", 404 + dataset_date_.update(i) + list_of_dates.append(dataset_date_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_date_ = model.DatasetDate.from_data(data_obj, i) + model.db.session.add(dataset_date_) + list_of_dates.append(dataset_date_.to_dict()) + + model.db.session.commit() + + return ( + { + "titles": list_of_titles, + "descriptions": list_of_description, + "dates": list_of_dates, + }, + 200, + ) + + +@api.route("/study//dataset//metadata/title/") +class DatasetTitleDelete(Resource): + """Dataset Title Update Resource""" + + @api.doc("delete title") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + title_id: int, + ): + """Delete dataset title""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not make any change in dataset metadata", + 403, + ) + dataset_title_ = model.DatasetTitle.query.get(title_id) + if dataset_title_.type == "MainTitle": + return ( + "Main Title type can not be deleted", + 403, + ) + model.db.session.delete(dataset_title_) + model.db.session.commit() + return Response(status=204) + + +@api.route("/study//dataset//metadata/date/") +class DatasetDateDeleteResource(Resource): + """Dataset Date Delete Resource""" + + @api.doc("delete date") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, study_id: int, dataset_id: int, date_id: int + ): # pylint: disable= unused-argument + """Delete dataset date""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + date_ = model.DatasetDate.query.get(date_id) + + model.db.session.delete(date_) + model.db.session.commit() + return Response(status=204) + + @api.route( + "/study//dataset//" + "metadata/description/" + ) + class DatasetDescriptionUpdate(Resource): + """Dataset Description Update Resource""" + + @api.doc("delete description") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + description_id: int, + ): + """Delete dataset description""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not make any change in dataset metadata", + 403, + ) + dataset_description_ = model.DatasetDescription.query.get(description_id) + if dataset_description_.type == "Abstract": + return ( + "Abstract description can not be deleted", + 403, + ) + model.db.session.delete(dataset_description_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py deleted file mode 100644 index 2ca590ae..00000000 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ /dev/null @@ -1,73 +0,0 @@ -"""API endpoints for other dataset metadata""" - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_managing_organization = api.model( - "DatasetManagingOrganization", - { - "name": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_scheme": fields.String(required=True), - "identifier_scheme_uri": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/managing-organization") -class DatasetManagingOrganization(Resource): - """Dataset Publisher Resource""" - - @api.doc("publisher") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(dataset_managing_organization) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset publisher metadata""" - dataset_ = model.Dataset.query.get(dataset_id) - managing_organization_ = dataset_.dataset_managing_organization - return managing_organization_.to_dict(), 200 - - @api.doc("update organization") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(dataset_managing_organization) - def put(self, study_id: int, dataset_id: int): - """Update dataset managing organization metadata""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "name": {"type": "string", "minLength": 1}, - "identifier": {"type": "string"}, - "identifier_scheme": {"type": "string"}, - "identifier_scheme_uri": {"type": "string"}, - }, - "required": [ - "name", - "identifier", - "identifier_scheme", - "identifier_scheme_uri", - ], - } - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_managing_organization.update(data) - - model.db.session.commit() - return dataset_.dataset_managing_organization.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 04246f97..c620d00f 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,24 +1,24 @@ """API endpoints for other dataset metadata""" from flask import request -from flask_restx import Resource +from flask_restx import Resource, fields from jsonschema import ValidationError, validate import model from apis.authentication import is_granted from apis.dataset_metadata_namespace import api -# dataset_other = api.model( -# "DatasetOther", -# { -# "language": fields.String(required=True), -# "size": fields.List(fields.String, required=True), -# "format": fields.List(fields.String, required=True), -# "standards_followed": fields.String(required=True), -# "acknowledgement": fields.String(required=True), -# "resource_type": fields.String(required=True), -# }, -# ) +dataset_other = api.model( + "DatasetOther", + { + "language": fields.String(required=False), + "size": fields.List(fields.String(required=False), required=True), + "format": fields.List(fields.String(required=False), required=True), + "standards_followed": fields.String(required=True), + "acknowledgement": fields.String(required=True), + "resource_type": fields.String(required=True), + }, +) @api.route("/study//dataset//metadata/other") @@ -28,7 +28,7 @@ class DatasetOtherResource(Resource): @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(dataset_other) + @api.marshal_with(dataset_other) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument """Get dataset other metadata""" dataset_ = model.Dataset.query.get(dataset_id) @@ -42,7 +42,6 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume def put(self, study_id: int, dataset_id: int): """Update dataset other metadata""" study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 diff --git a/apis/dataset_metadata/dataset_related_identifier.py b/apis/dataset_metadata/dataset_related_identifier.py index 3e2ce088..cdede905 100644 --- a/apis/dataset_metadata/dataset_related_identifier.py +++ b/apis/dataset_metadata/dataset_related_identifier.py @@ -32,7 +32,7 @@ class DatasetRelatedIdentifierResource(Resource): @api.doc("related identifier") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(dataset_related_identifier) + @api.marshal_with(dataset_related_identifier) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument """Get dataset related identifier""" dataset_ = model.Dataset.query.get(dataset_id) @@ -42,6 +42,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.doc("update related identifier") @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(dataset_related_identifier) def post(self, study_id: int, dataset_id: int): """Update dataset related identifier""" study_obj = model.Study.query.get(study_id) diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py deleted file mode 100644 index 404558fc..00000000 --- a/apis/dataset_metadata/dataset_rights.py +++ /dev/null @@ -1,123 +0,0 @@ -"""API endpoints for dataset rights""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_rights = api.model( - "DatasetRights", - { - "id": fields.String(required=True), - "rights": fields.String(required=True), - "uri": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_scheme": fields.String(required=True), - "identifier_scheme_uri": fields.String(required=True), - "license_text": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/rights") -class DatasetRightsResource(Resource): - """Dataset Rights Resource""" - - @api.doc("rights") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_rights) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset rights""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_rights_ = dataset_.dataset_rights - print([d.to_dict() for d in dataset_rights_]) - return [d.to_dict() for d in dataset_rights_], 200 - - @api.doc("update rights") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset rights""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "identifier": {"type": "string"}, - "identifier_scheme": {"type": "string"}, - "identifier_scheme_uri": {"type": "string"}, - "rights": {"type": "string", "minLength": 1}, - "uri": {"type": "string"}, - "license_text": {"type": "string"}, - }, - "required": [ - "identifier", - "identifier_scheme", - "rights", - "uri", - "license_text", - ], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_rights_ = model.DatasetRights.query.get(i["id"]) - if not dataset_rights_: - return f"Study link {i['id']} Id is not found", 404 - dataset_rights_.update(i) - list_of_elements.append(dataset_rights_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_rights_ = model.DatasetRights.from_data(data_obj, i) - model.db.session.add(dataset_rights_) - list_of_elements.append(dataset_rights_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/rights/") -class DatasetRightsUpdate(Resource): - """Dataset Rights Update Resource""" - - @api.doc("delete rights") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - rights_id: int, - ): - """Delete dataset rights""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_rights_ = model.DatasetRights.query.get(rights_id) - - model.db.session.delete(dataset_rights_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py deleted file mode 100644 index 6aa372cc..00000000 --- a/apis/dataset_metadata/dataset_subject.py +++ /dev/null @@ -1,120 +0,0 @@ -"""API endpoints for dataset subject""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_subject = api.model( - "DatasetSubject", - { - "id": fields.String(required=True), - "subject": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "value_uri": fields.String(required=True), - "classification_code": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/subject") -class DatasetSubjectResource(Resource): - """Dataset Subject Resource""" - - @api.doc("subject") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_subject) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset subject""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_subject_ = dataset_.dataset_subject - return [d.to_dict() for d in dataset_subject_], 200 - - @api.doc("update subject") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset subject""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can't modify dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "classification_code": {"type": "string"}, - "scheme": {"type": "string"}, - "scheme_uri": {"type": "string"}, - "subject": {"type": "string", "minLength": 1}, - "value_uri": {"type": "string"}, - }, - "required": [ - "subject", - "scheme", - "scheme_uri", - "value_uri", - "classification_code", - ], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_subject_ = model.DatasetSubject.query.get(i["id"]) - if not dataset_subject_: - return f"Study link {i['id']} Id is not found", 404 - dataset_subject_.update(i) - list_of_elements.append(dataset_subject_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_subject_ = model.DatasetSubject.from_data(data_obj, i) - model.db.session.add(dataset_subject_) - list_of_elements.append(dataset_subject_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/subject/") -class DatasetSubjectUpdate(Resource): - """Dataset Subject Update Resource""" - - @api.doc("delete subject") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, # pylint: disable= unused-argument - dataset_id: int, # pylint: disable= unused-argument - subject_id: int, - ): - """Delete dataset subject""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can't make change in dataset metadata", 403 - dataset_subject_ = model.DatasetSubject.query.get(subject_id) - - model.db.session.delete(dataset_subject_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_team.py b/apis/dataset_metadata/dataset_team.py new file mode 100644 index 00000000..d17aaeda --- /dev/null +++ b/apis/dataset_metadata/dataset_team.py @@ -0,0 +1,403 @@ +"""API for dataset contributor metadata""" + +from typing import Any, Union + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_team = api.model( + "DatasetTeam", + { + "contributors": fields.List( + fields.Nested( + api.model( + "Contributor", + { + "id": fields.String(required=True), + "family_name": fields.String(), + "given_name": fields.String(required=True), + "name_type": fields.String(), + "name_identifier": fields.String(required=True), + "name_identifier_scheme": fields.String(required=True), + "name_identifier_scheme_uri": fields.String(required=True), + "creator": fields.Boolean(required=True), + "contributor_type": fields.String(), + "affiliations": fields.Raw(required=True), + "created_at": fields.Integer(required=True), + }, + ) + ) + ), + "creators": fields.List( + fields.Nested( + api.model( + "Creator", + { + "id": fields.String(required=True), + "family_name": fields.String(), + "given_name": fields.String(required=True), + "name_type": fields.String(), + "name_identifier": fields.String(required=True), + "name_identifier_scheme": fields.String(required=True), + "name_identifier_scheme_uri": fields.String(required=True), + "creator": fields.Boolean(required=True), + "contributor_type": fields.String(), + "affiliations": fields.Raw(required=True), + "created_at": fields.Integer(required=True), + }, + ) + ) + ), + "managing_organization": fields.Nested( + api.model( + "DatasetManagingOrganization", + { + "name": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_scheme": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), + }, + ) + ), + "funders": fields.List( + fields.Nested( + api.model( + "Funders", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), + "award_number": fields.String(required=True), + "award_uri": fields.String(required=True), + "award_title": fields.String(required=True), + }, + ) + ) + ), + }, +) + + +@api.route("/study//dataset//metadata/team") +class DatasetTeamResource(Resource): + """Dataset Team Resource""" + + @api.doc("team") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_team) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset creator""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_creator_ = dataset_.dataset_contributors + dataset_contributor_ = dataset_.dataset_contributors + dataset_funder_ = dataset_.dataset_funder + managing_organization_ = dataset_.dataset_managing_organization + return { + "creators": [ + d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"] + ], + "contributors": [ + d.to_dict() for d in dataset_contributor_ if not d.to_dict()["creator"] + ], + "managing_organization": managing_organization_.to_dict(), + "funders": [d.to_dict() for d in dataset_funder_], + }, 200 + + @api.doc("update team") + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_team) + def post(self, study_id: int, dataset_id: int): + """Update dataset team""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "creators": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "given_name": {"type": "string", "minLength": 1}, + "family_name": {"type": ["string", "null"]}, + "name_identifier": {"type": "string", "minLength": 1}, + "name_identifier_scheme": { + "type": "string", + "minLength": 1, + }, + "name_identifier_scheme_uri": {"type": "string"}, + "name_type": { + "type": "string", + "enum": ["Personal", "Organizational"], + "minLength": 1, + }, + "affiliations": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": {"type": "string"}, + "identifier": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + }, + }, + "uniqueItems": True, + }, + }, + "required": [ + "name_type", + "given_name", + "affiliations", + "name_identifier", + "name_identifier_scheme", + ], + }, + }, + "contributors": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "contributor_type": {"type": "string", "minLength": 1}, + "given_name": {"type": "string", "minLength": 1}, + "family_name": {"type": ["string", "null"]}, + "name_identifier": {"type": "string", "minLength": 1}, + "name_identifier_scheme": { + "type": "string", + "minLength": 1, + }, + "name_identifier_scheme_uri": {"type": "string"}, + "name_type": { + "type": "string", + "enum": ["Personal", "Organizational"], + "minLength": 1, + }, + "affiliations": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": {"type": "string"}, + "identifier": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + }, + }, + "uniqueItems": True, + }, + }, + "required": [ + "contributor_type", + "name_type", + "given_name", + "affiliations", + "name_identifier", + "name_identifier_scheme", + ], + }, + }, + "funders": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "award_number": {"type": "string", "minLength": 1}, + "award_title": {"type": "string"}, + "award_uri": {"type": "string"}, + "identifier": {"type": "string", "minLength": 1}, + "identifier_scheme_uri": {"type": "string"}, + "identifier_type": {"type": ["string", "null"]}, + }, + "required": [ + "name", + "award_number", + "award_title", + "award_uri", + "identifier", + "identifier_scheme_uri", + "identifier_type", + ], + }, + "uniqueItems": True, + }, + "managing_organization": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": {"type": "string", "minLength": 1}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + }, + "required": [ + "name", + "identifier", + "identifier_scheme", + "identifier_scheme_uri", + ], + }, + }, + "required": [ + "creators", + "contributors", + "funders", + "managing_organization", + ], + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + + list_of_creator = [] + for i in data["creators"]: + i["creator"] = True + if "id" in i and i["id"]: + i["contributor_type"] = None + dataset_creator_ = model.DatasetContributor.query.get(i["id"]) + if not dataset_creator_: + return f"Study link {i['id']} Id is not found", 404 + dataset_creator_.update(i) + list_of_creator.append(dataset_creator_.to_dict()) + elif "id" not in i or not i["id"]: + i["contributor_type"] = None + dataset_creator_ = model.DatasetContributor.from_data(data_obj, i) + model.db.session.add(dataset_creator_) + list_of_creator.append(dataset_creator_.to_dict()) + + list_of_contributors = [] + for i in data["contributors"]: + i["creator"] = False + if "id" in i and i["id"]: + dataset_contributor_ = model.DatasetContributor.query.get(i["id"]) + if not dataset_contributor_: + return f"Study link {i['id']} Id is not found", 404 + dataset_contributor_.update(i) + list_of_contributors.append(dataset_contributor_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_contributor_ = model.DatasetContributor.from_data(data_obj, i) + model.db.session.add(dataset_contributor_) + list_of_contributors.append(dataset_contributor_.to_dict()) + + list_of_funders = [] + for i in data["funders"]: + if "id" in i and i["id"]: + dataset_funder_ = model.DatasetFunder.query.get(i["id"]) + if not dataset_funder_: + return f"Study link {i['id']} Id is not found", 404 + dataset_funder_.update(i) + list_of_funders.append(dataset_funder_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_funder_ = model.DatasetFunder.from_data(data_obj, i) + model.db.session.add(dataset_funder_) + list_of_funders.append(dataset_funder_.to_dict()) + + data_obj.dataset_managing_organization.update(data["managing_organization"]) + model.db.session.commit() + return { + "creators": list_of_creator, + "contributors": list_of_contributors, + "managing_organization": data_obj.dataset_managing_organization.to_dict(), + "funders": list_of_funders, + }, 200 + + +@api.route( + "/study//dataset//metadata/contributor/" +) +class DatasetContributorDelete(Resource): + """Dataset Contributor Delete Resource""" + + @api.doc("delete contributor") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + contributor_id: int, + ): + """Delete dataset contributor""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + contributor_ = model.DatasetContributor.query.get(contributor_id) + + model.db.session.delete(contributor_) + model.db.session.commit() + + return Response(status=204) + + +@api.route("/study//dataset//metadata/creator/") +class DatasetCreatorDelete(Resource): + @api.doc("delete creator") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + creator_id: int, + ): + """Delete dataset creator""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_creator_ = model.DatasetContributor.query.get(creator_id) + model.db.session.delete(dataset_creator_) + model.db.session.commit() + + return Response(status=204) + + +@api.route("/study//dataset//metadata/funder/") +class DatasetFunderUpdate(Resource): + """Dataset Funder Update Resource""" + + @api.doc("delete funder") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + funder_id: int, + ): + """Delete dataset funder""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_funder_ = model.DatasetFunder.query.get(funder_id) + + model.db.session.delete(dataset_funder_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py deleted file mode 100644 index b9da2382..00000000 --- a/apis/dataset_metadata/dataset_title.py +++ /dev/null @@ -1,128 +0,0 @@ -"""API for dataset title metadata""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_title = api.model( - "DatasetTitle", - { - "id": fields.String(required=True), - "title": fields.String(required=True), - "type": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/title") -class DatasetTitleResource(Resource): - """Dataset Title Resource""" - - @api.doc("title") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_title) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset title""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_title_ = dataset_.dataset_title - return [d.to_dict() for d in dataset_title_], 200 - - @api.doc("update title") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset title""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "title": { - "type": "string", - "minLength": 1, - }, - "type": { - "type": "string", - "enum": [ - "MainTitle", - "AlternativeTitle", - "Subtitle", - "TranslatedTitle", - "OtherTitle", - ], - }, - }, - "required": ["title", "type"], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_title_ = model.DatasetTitle.query.get(i["id"]) - dataset_title_.update(i) - list_of_elements.append(dataset_title_.to_dict()) - elif "id" not in i or not i["id"]: - if i["type"] == "MainTitle": - return ( - "Main Title type can not be given", - 403, - ) - dataset_title_ = model.DatasetTitle.from_data(data_obj, i) - model.db.session.add(dataset_title_) - list_of_elements.append(dataset_title_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - @api.route("/study//dataset//metadata/title/") - class DatasetTitleDelete(Resource): - """Dataset Title Update Resource""" - - @api.doc("delete title") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - title_id: int, - ): - """Delete dataset title""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return ( - "Access denied, you can not make any change in dataset metadata", - 403, - ) - dataset_title_ = model.DatasetTitle.query.get(title_id) - if dataset_title_.type == "MainTitle": - return ( - "Main Title type can not be deleted", - 403, - ) - model.db.session.delete(dataset_title_) - model.db.session.commit() - return Response(status=204) diff --git a/apis/file.py b/apis/file.py index c2ad8d98..e31f3b14 100644 --- a/apis/file.py +++ b/apis/file.py @@ -2,42 +2,130 @@ import importlib import os -import uuid -from datetime import datetime, timezone -from urllib.parse import quote +import typing -import requests +from azure.storage.filedatalake import FileSystemClient from flask_restx import Namespace, Resource, reqparse +import model + api = Namespace("File", description="File operations", path="/") +class FileException(Exception): + pass + + +@api.errorhandler(FileException) +def handle_file_exception(error): + return {"message": str(error)}, 404 + + +# @api.route("/study//files1") +# class Files(Resource): +# """Files for a study""" +# +# parser = reqparse.RequestParser() +# parser.add_argument("path", type=str, required=False, location="args") +# @api.doc(description="Return a list of all files for a study") +# @api.param("path", "The folder path on the file system") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def get(self, study_id): # pylint: disable=unused-argument +# """Return a list of all files for a study""" +# # with the same name as the study id. +# +# # Determine the appropriate configuration module based on the testing context +# if os.environ.get("FLASK_ENV") == "testing": +# config_module_name = "pytest_config" +# else: +# config_module_name = "config" +# +# config_module = importlib.import_module(config_module_name) +# if os.environ.get("FLASK_ENV") == "testing": +# # If testing, use the 'TestConfig' class for accessing 'secret' +# config = config_module.TestConfig +# else: +# # If not testing, directly use the 'config' module +# config = config_module +# if not config.AZURE_STORAGE_CONNECTION_STRING and not config.AZURE_CONTAINER: +# return "azure connection string is missing", 404 +# def get_file_tree(): +# container = config.CONTAINER +# file_system_client = FileSystemClient.from_connection_string( +# config.AZURE_STORAGE_CONNECTION_STRING, +# file_system_name=container, +# ) +# source: str = f"AI-READI/test-files/{study_id}" +# return recurse_file_tree(file_system_client, source) +# +# def recurse_file_tree(file_system_client: FileSystemClient, source: str): +# source_client = file_system_client.get_directory_client(source) +# if not source_client.exists(): +# raise FileException("source directory does not exist!") +# props = source_client.get_directory_properties() +# updated_on = props['last_modified'] +# size = 0 +# path_name = os.path.basename(source) +# return model.FolderStructure( +# path_name, +# size, +# updated_on, +# True, +# [ +# ( +# recurse_file_tree(file_system_client, child_path.name) +# if child_path.is_directory +# else model.FileStructure( +# os.path.basename(child_path.name), +# child_path.content_length, +# child_path.last_modified, +# False +# ) +# ) +# for child_path in file_system_client.get_paths(source, recursive=False) +# ], +# ) +# return get_file_tree().to_dict(), 200 + + @api.route("/study//files") class Files(Resource): """Files for a study""" parser = reqparse.RequestParser() - parser.add_argument("path", type=str, required=False, location="args") - - @api.doc(description="Return a list of all files for a study") - @api.param("path", "The folder path on the file system") + parser.add_argument( + "path", + type=str, + required=False, + location="args", + default="", + help="The folder path to list. Defaults to the study root.", + ) + + @api.doc( + description="Return a flat list of files and folders for a given path within a study." + ) + @api.param("path", "The folder path on the file system to explore.") @api.response(200, "Success") - @api.response(400, "Validation Error") + @api.response(400, "Validation Error or Invalid Path") + @api.response(404, "Path not found") def get(self, study_id): # pylint: disable=unused-argument - """Return a list of all files for a study""" + """Returns a flat list of files and folders for a given path""" + study = model.Study.query.get(study_id) + if not study: + return "Study not found", 404 - # todo: anticipating that each study will have a folder in the storage account - # with the same name as the study id. + args = self.parser.parse_args() + relative_path = args.get("path", "") + relative_path = relative_path.lstrip("/\\") - # Determine the appropriate configuration module - # based on the testing context if os.environ.get("FLASK_ENV") == "testing": config_module_name = "pytest_config" else: config_module_name = "config" config_module = importlib.import_module(config_module_name) - if os.environ.get("FLASK_ENV") == "testing": # If testing, use the 'TestConfig' class for accessing 'secret' config = config_module.TestConfig @@ -45,69 +133,51 @@ def get(self, study_id): # pylint: disable=unused-argument # If not testing, directly use the 'config' module config = config_module - storage_account_name = config.FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME - storage_account_sas_token = config.FAIRHUB_AZURE_READ_SAS_TOKEN - request_time = datetime.now(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT") - - container = "pooled-data-pilot" # todo: this should be the study id - - query_params = ( - f"recursive=false&resource=filesystem&{storage_account_sas_token}" + if not config.AZURE_STORAGE_CONNECTION_STRING and not config.AZURE_CONTAINER: + return "azure connection string is missing", 404 + # --- Path Sanitization --- + base_dir = os.path.normpath(f"AI-READI/test-files/{study_id}") + full_path = os.path.normpath(os.path.join(base_dir, relative_path)) + if os.path.commonpath([base_dir, full_path]) != base_dir: + return {"message": "Access denied: Invalid path provided."}, 400 + source_path = full_path.replace("\\", "/") + + # --- Azure Client and Directory Listing --- + file_system_client = FileSystemClient.from_connection_string( + config.AZURE_STORAGE_CONNECTION_STRING, + file_system_name=config.AZURE_CONTAINER, ) - request_args = self.parser.parse_args() - - # subdirectory traversal - if prefix_path := request_args["path"]: - print(prefix_path) - query_path = quote(prefix_path.encode("utf-8")) - query_params = f"directory={query_path}&{query_params}" - - url = f"https://{storage_account_name}.dfs.core.windows.net/{container}?{query_params}" # noqa: E501 # pylint: disable=line-too-long - - print(url) - - api_version = "2023-08-03" - headers = { - "x-ms-date": request_time, - "x-ms-version": api_version, - } - - try: - response = requests.get( - url, - headers=headers, - timeout=30, - ) - - response_json = response.json() - - print(response_json) - - paths = [] - - for file in response_json["paths"]: - data = { - "id": str(uuid.uuid4()), - "content_length": file["contentLength"], - # "created_at": file["creationTime"], - "name": file["name"], - "is_directory": bool("isDirectory" in file and file["isDirectory"]), - "last_modified": file["lastModified"], - } - - # convert lastModified to unix timestamp - if "lastModified" in file: - date_string = file["lastModified"] - date_object = datetime.strptime( - date_string, "%a, %d %b %Y %H:%M:%S %Z" - ) - - data["updated_on"] = int(date_object.timestamp()) - - paths.append(data) - - return paths - except requests.exceptions.RequestException as e: - print(f"An error occurred: {e}") - return "Something went wrong with the request", 500 + directory_client = file_system_client.get_directory_client(source_path) + + # Check for existence and raise exception as requested + if not directory_client.exists(): + raise FileException(f"Source directory does not exist: {source_path}") + + # The response is a simple list of items in the directory + directory_contents = [] + + for child_path in file_system_client.get_paths( + path=source_path, recursive=False + ): + item: typing.Union[model.FolderStructure, model.FileStructure] + + if child_path.is_directory: + item = model.FolderStructure( + name=os.path.basename(child_path.name), + content_length=0, + updated_on=child_path.last_modified, + is_directory=True, + files=[], + ) + else: + item = model.FileStructure( + name=os.path.basename(child_path.name), + content_length=child_path.content_length, + updated_on=child_path.last_modified, + is_directory=False, + ) + + directory_contents.append(item.to_dict()) + + return directory_contents, 200 diff --git a/apis/redcap.py b/apis/redcap.py index 7c24d120..b2770b98 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -246,87 +246,3 @@ def delete(self, study_id: str, redcap_id: str): model.StudyRedcap.query.filter_by(id=redcap_id).delete() model.db.session.commit() return 204 - - -# @api.route("/study//redcap") -# class EditRedcapProjectAPI(Resource): -# @api.doc(parser=project_parser) -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_api_view_model) -# def put(self, study_id: int): -# """Update REDCap project API link""" -# study = model.Study.query.get(study_id) -# if not is_granted("update_redcap", study): -# return "Access denied, you can not modify this redcap project", 403 -# # Schema validation -# data: Union[Any, dict] = request.json -# schema = { -# "type": "object", -# "additionalProperties": False, -# "required": [ -# "api_pid", -# "title", -# "api_url", -# "api_active", -# ], -# "properties": { -# "api_pid": {"type": "string", "minLength": 1, "maxLength": 12}, -# "title": {"type": "string", "minLength": 1}, -# "api_url": {"type": "string", "minLength": 1}, -# "api_active": {"type": "boolean"}, -# }, -# } -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 - -# if len(data["api_pid"]) < 1: -# return ( -# f"""redcap api_pid is required for redcap access: -# {data['api_pid']}""", -# 400, -# ) -# if len(data["title"]) < 1: -# return ( -# f"""redcap title is required for redcap access: -# {data['title']}""", -# 400, -# ) -# if len(data["api_url"]) < 1: -# return ( -# f"""redcap api_url is required for redcap access: -# {data['api_url']}""", -# 400, -# ) -# if not isinstance(data["api_active"], bool): -# return ( -# f"""redcap api_active is required for redcap access: -# {data['api_active']}""", -# 400, -# ) -# update_redcap_project_view = model.StudyRedcap.query.get( -# data["api_pid"] -# ) -# update_redcap_project_view.update(data) -# model.db.session.commit() -# update_redcap_project_view = update_redcap_project_view.to_dict() -# return update_redcap_project_view, 201 - - -# @api.route("/study//redcap") -# class DeleteRedcapProjectAPI(Resource): -# @api.doc(parser=project_parser) -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_api_view_model) -# def delete(self, study_id: int): -# """Delete REDCap project API link""" -# study = model.Study.query.get(study_id) -# if not is_granted("delete_redcap", study): -# return "Access denied, you can not delete this redcap project", 403 -# api_pid = project_parser.parse_args()["api_pid"] -# model.StudyRedcap.query.filter_by(api_pid=api_pid).delete() -# model.db.session.commit() -# return 204 diff --git a/apis/study.py b/apis/study.py index 19768204..4c518190 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,10 +1,15 @@ """APIs for study operations""" "" +import os +import re from typing import Any, Union +import requests +from azure.storage.filedatalake import FileSystemClient from flask import Response, g, request from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate +import config import model from .authentication import is_granted @@ -16,7 +21,6 @@ "Study", { "title": fields.String(required=True, default=""), - "image": fields.String(required=True, default=""), }, ) @@ -53,7 +57,7 @@ def get(self): return [s.to_dict() for s in studies], 200 - @api.expect(study_model) + # @api.expect(study_model) @api.response(201, "Success") @api.response(400, "Validation Error") def post(self): @@ -62,23 +66,25 @@ def post(self): # Schema validation schema = { "type": "object", - "required": ["title", "image", "acronym"], + "required": ["title", "image", "short_description"], "additionalProperties": False, "properties": { "title": {"type": "string", "minLength": 1, "maxLength": 300}, - "acronym": {"type": "string", "maxLength": 14}, + "short_description": {"type": "string", "maxLength": 300}, "image": {"type": "string"}, + "clinical_id": {"type": ["string", "null"]}, }, } + data: Union[Any, dict] = request.json + add_study = model.Study.from_data(data) + identifier = data.get("clinical_id") + try: - validate(request.json, schema) + validate(instance=data, schema=schema) except ValidationError as e: return e.message, 400 - data: Union[Any, dict] = request.json - - add_study = model.Study.from_data(data) model.db.session.add(add_study) study_id = add_study.id @@ -87,6 +93,52 @@ def post(self): study_contributor = model.StudyContributor.from_data(study_, g.user, "owner") model.db.session.add(study_contributor) + if os.environ.get("FLASK_ENV") != "testing": + # TODO finish study testing integration + container = config.AZURE_CONTAINER + if config.AZURE_STORAGE_CONNECTION_STRING and config.AZURE_CONTAINER: + file_system_client = FileSystemClient.from_connection_string( + config.AZURE_STORAGE_CONNECTION_STRING, + file_system_name=container, + ) + file_system_client.create_directory(f"AI-READI/test-files/{study_id}") + try: + if isinstance(identifier, str) and re.match( + r"^NCT\d{8}$", identifier.strip() + ): + url = f"https://classic.clinicaltrials.gov/api/v2/studies/{identifier}" + # AI-READI id-NCT06002048 + + response = requests.get(url, timeout=10) + if response.status_code == 404: + return { + "error": "No clinical study was found with the provided identifier", + "status_code": 404, + "message": f"No study found for identifier '{identifier}'.", + }, 404 + + if response.status_code != 200: + return { + "error": "Failed to fetch clinical trial data", + "status_code": response.status_code, + "message": f"ClinicalTrials.gov returned status {response.status_code}.", + }, response.status_code + + clinical_data = response.json() + study_.update_identification_id(clinical_data["protocolSection"]) + study_.import_from_clinical_data(clinical_data["protocolSection"]) + except requests.exceptions.RequestException as e: + return { + "error": "Failed to connect to ClinicalTrials.gov API", + "status_code": 503, + "message": str(e), + }, 503 + except Exception as e: + return { + "error": "Unexpected server error", + "status_code": 500, + "message": str(e), + }, 500 model.db.session.commit() return study_.to_dict(), 201 @@ -115,12 +167,13 @@ def put(self, study_id: int): # Schema validation schema = { "type": "object", - "required": ["title", "image", "acronym"], + "required": ["title", "short_description", "is_overwrite"], "additionalProperties": False, "properties": { "title": {"type": "string", "minLength": 1}, - "image": {"type": "string", "minLength": 1}, - "acronym": {"type": "string", "maxLength": 14}, + "short_description": {"type": "string", "maxLength": 300}, + "is_overwrite": {"type": "boolean"}, + "clinical_id": {"type": ["string", "null"]}, }, } @@ -130,11 +183,59 @@ def put(self, study_id: int): return e.message, 400 update_study = model.Study.query.get(study_id) - + data: Union[Any, dict] = request.json if not is_granted("update_study", update_study): return "Access denied, you can not modify", 403 - update_study.update(request.json) + identifier = data["clinical_id"].strip() + is_overwrite = data["is_overwrite"] + + update_study.update(data) + + if identifier: + try: + if not identifier or not isinstance(identifier, str): + raise ValueError("Identifier must be a non-empty string.") + + if not re.match(r"^NCT\d{8}$", identifier): + raise ValueError("Identifier must be in the format 'NCT########'.") + + url = f"https://classic.clinicaltrials.gov/api/v2/studies/{identifier}" + + response = requests.get(url, timeout=10) + if response.status_code == 404: + return { + "error": "No clinical study was found with the provided identifier", + "status_code": 404, + "message": f"No study found for identifier '{identifier}'.", + }, 404 + + if response.status_code != 200: + return { + "error": "Failed to fetch clinical trial data", + "status_code": response.status_code, + "message": f"ClinicalTrials.gov returned status {response.status_code}.", + }, response.status_code + + clinical_data = response.json() + update_study.update_identification_id(clinical_data["protocolSection"]) + if is_overwrite: + update_study.import_from_clinical_data( + clinical_data["protocolSection"] + ) + except requests.exceptions.RequestException as e: + return { + "error": "Failed to connect to ClinicalTrials.gov API", + "status_code": 503, + "message": str(e), + }, 503 + except Exception as e: + return { + "error": "Unexpected server error", + "status_code": 500, + "message": str(e), + }, 500 + model.db.session.commit() return update_study.to_dict(), 200 diff --git a/apis/study_metadata/study_collaborators.py b/apis/study_metadata/study_collaborators.py deleted file mode 100644 index 018f3062..00000000 --- a/apis/study_metadata/study_collaborators.py +++ /dev/null @@ -1,108 +0,0 @@ -"""API routes for study collaborators metadata""" - -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_collaborators = api.model( - "StudyCollaborators", - { - "id": fields.String(required=True), - "name": fields.String(required=True), - "identifier": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/collaborators") -class StudyCollaboratorsResource(Resource): - """Study Collaborators Metadata""" - - @api.doc("collaborators") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_collaborators) - def get(self, study_id: int): - """Get study collaborators metadata""" - study_ = model.Study.query.get(study_id) - study_collaborators_ = study_.study_collaborators - - return [collab.to_dict() for collab in study_collaborators_], 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """updating study collaborators""" - # Schema validation - schema = { - "type": "array", - "additionalProperties": False, - "items": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string"}, - "identifier": {"type": "string"}, - "identifier_scheme": {"type": "string"}, - "identifier_scheme_uri": {"type": "string"}, - }, - "required": [ - "name", - "identifier", - "identifier_scheme", - ], - }, - } - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - study_collaborators_ = model.StudyCollaborators.query.get(i["id"]) - study_collaborators_.update(i) - else: - study_collaborators_ = model.StudyCollaborators.from_data(study_obj, i) - model.db.session.add(study_collaborators_) - list_of_elements.append(study_collaborators_.to_dict()) - model.db.session.commit() - - return list_of_elements, 201 - - -@api.route("/study//metadata/collaborators/") -class StudyLocationUpdate(Resource): - """delete Study Collaborators Metadata""" - - @api.doc("delete study collaborators") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, collaborator_id: int): - """Delete study collaborators metadata""" - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 - study_collaborators_ = model.StudyCollaborators.query.get(collaborator_id) - - model.db.session.delete(study_collaborators_) - - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_conditions.py b/apis/study_metadata/study_conditions.py deleted file mode 100644 index c0dfcfcb..00000000 --- a/apis/study_metadata/study_conditions.py +++ /dev/null @@ -1,107 +0,0 @@ -"""API routes for study other metadata""" - -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_other = api.model( - "StudyConditions", - { - "id": fields.String(required=True), - "name": fields.Boolean(required=True), - "classification_code": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "condition_uri": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/conditions") -class StudyCondition(Resource): - """Study Conditions Metadata""" - - @api.doc("conditions") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_other) - def get(self, study_id: int): - """Get study conditions metadata""" - study_ = model.Study.query.get(study_id) - - study_conditions = study_.study_conditions - - return [s.to_dict() for s in study_conditions], 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """Create study condition metadata""" - # Schema validation - schema = { - "type": "array", - "additionalProperties": False, - "items": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "classification_code": {"type": "string"}, - "scheme": {"type": "string"}, - "scheme_uri": {"type": "string"}, - "condition_uri": {"type": "string"}, - }, - "required": ["name", "classification_code", "condition_uri"], - }, - } - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - - data: typing.Union[dict, typing.Any] = request.json - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - study_conditions_ = model.StudyConditions.query.get(i["id"]) - if not study_conditions_: - return f"Study condition {i['id']} Id is not found", 404 - study_conditions_.update(i) - list_of_elements.append(study_conditions_.to_dict()) - elif "id" not in i or not i["id"]: - study_conditions_ = model.StudyConditions.from_data(study_obj, i) - model.db.session.add(study_conditions_) - list_of_elements.append(study_conditions_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//metadata/conditions/") -class StudyConditionsUpdate(Resource): - """Study Conditions Metadata update""" - - @api.doc("Delete Study Identifications") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, condition_id: int): - """Delete study conditions metadata""" - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 - - study_conditions_ = model.StudyConditions.query.get(condition_id) - - model.db.session.delete(study_conditions_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 0c181dee..12ca0fa0 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -1,6 +1,8 @@ """API routes for study description metadata""" -from flask import request +import typing + +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -10,11 +12,82 @@ from ..authentication import is_granted study_description = api.model( - "StudyDescription", + "StudyMetadataDescription", { - "id": fields.String(required=True), - "brief_summary": fields.String(required=True), - "detailed_description": fields.String(required=True), + "description": fields.Nested( + api.model( + "StudyDescription", + { + "id": fields.String(required=True), + "brief_summary": fields.String(required=True), + "detailed_description": fields.String(required=True), + }, + ) + ), + "conditions": fields.List( + fields.Nested( + api.model( + "StudyConditions", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "classification_code": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "condition_uri": fields.String(required=True), + }, + ) + ) + ), + "keywords": fields.List( + fields.Nested( + api.model( + "StudyKeywords", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "classification_code": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "keyword_uri": fields.String(required=True), + }, + ) + ) + ), + "identification": fields.Nested( + api.model( + "StudyIdentification", + { + "primary": fields.Nested( + api.model( + "PrimaryIdentification", + { + "id": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "identifier_domain": fields.String(required=True), + "identifier_link": fields.String(required=True), + }, + ) + ), + "secondary": fields.List( + fields.Nested( + api.model( + "SecondaryIdentification", + { + "id": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "identifier_domain": fields.String(required=True), + "identifier_link": fields.String(required=True), + }, + ) + ), + required=True, + ), + }, + ) + ), }, ) @@ -30,27 +103,86 @@ class StudyDescriptionResource(Resource): def get(self, study_id: int): """Get study description metadata""" study_ = model.Study.query.get(study_id) - + identifiers = model.Identifiers(study_) + study_keywords = study_.study_keywords + study_conditions = study_.study_conditions study_description_ = study_.study_description - - return study_description_.to_dict(), 200 + return { + "keywords": [k.to_dict() for k in study_keywords], + "conditions": [c.to_dict() for c in study_conditions], + "description": study_description_.to_dict(), + "identification": identifiers.to_dict(), + }, 200 @api.response(200, "Success") @api.response(400, "Validation Error") - def put(self, study_id: int): + @api.marshal_with(study_description) + def post(self, study_id: int): """Update study description metadata""" - study_obj = model.Study.query.get(study_id) # Schema validation schema = { "type": "object", "additionalProperties": False, + "required": ["conditions", "keywords", "description", "identification"], "properties": { - "brief_summary": {"type": "string", "minLength": 1}, - "detailed_description": { - "type": "string", + "conditions": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "classification_code": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "condition_uri": {"type": "string"}, + }, + "required": ["name", "classification_code", "condition_uri"], + "additionalProperties": False, + }, + }, + "keywords": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "classification_code": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "keyword_uri": {"type": "string"}, + }, + "required": ["name", "classification_code", "keyword_uri"], + "additionalProperties": False, + }, + }, + "identification": { + "type": "object", + "additionalProperties": False, + "properties": { + "primary": { + "type": "object", + "additionalProperties": False, + "properties": { + "identifier": {"type": "string"}, + "identifier_type": {"type": "string", "minLength": 1}, + "identifier_domain": {"type": "string"}, + "identifier_link": {"type": "string"}, + }, + }, + "secondary": {"type": "array"}, + }, + }, + "description": { + "type": "object", + "additionalProperties": False, + "properties": { + "brief_summary": {"type": "string"}, + "detailed_description": {"type": "string"}, + }, }, }, - "required": ["brief_summary", "detailed_description"], } try: @@ -59,12 +191,134 @@ def put(self, study_id: int): return e.message, 400 study_obj = model.Study.query.get(study_id) + data: typing.Union[dict, typing.Any] = request.json + if not is_granted("study_metadata", study_obj): return "Access denied, you can not modify study", 403 - study_ = model.Study.query.get(study_id) + study_obj.study_description.update(data["description"]) + + list_of_keywords = [] + for i in data["keywords"]: + if "id" in i and i["id"]: + study_keywords_ = model.StudyKeywords.query.get(i["id"]) + if not study_keywords_: + return f"Study keywords {i['id']} Id is not found", 404 + study_keywords_.update(i) + list_of_keywords.append(study_keywords_.to_dict()) + elif "id" not in i or not i["id"]: + study_keywords_ = model.StudyKeywords.from_data(study_obj, i) + model.db.session.add(study_keywords_) + list_of_keywords.append(study_keywords_.to_dict()) + + list_of_conditions = [] + for i in data["conditions"]: + if "id" in i and i["id"]: + study_conditions_ = model.StudyConditions.query.get(i["id"]) + if not study_conditions_: + return f"Study condition {i['id']} Id is not found", 404 + study_conditions_.update(i) + list_of_conditions.append(study_conditions_.to_dict()) + elif "id" not in i or not i["id"]: + study_conditions_ = model.StudyConditions.from_data(study_obj, i) + model.db.session.add(study_conditions_) + list_of_conditions.append(study_conditions_.to_dict()) + + identifiers = [i for i in study_obj.study_identification if not i.secondary] + primary_identifier = identifiers[0] if len(identifiers) else None + + primary: dict = data["identification"]["primary"] + + if primary_identifier: + primary_identifier.update(primary) + else: + study_identification_ = model.StudyIdentification.from_data( + study_obj, primary, False + ) + model.db.session.add(study_identification_) + + for i in data["identification"]["secondary"]: + i["secondary"] = True + if "id" in i and i["id"]: + study_identification_ = model.StudyIdentification.query.get(i["id"]) + study_identification_.update(i) + else: + study_identification_ = model.StudyIdentification.from_data( + study_obj, i, True + ) + model.db.session.add(study_identification_) + + model.db.session.commit() + + final_identifiers = model.Identifiers(study_obj) + + return { + "description": study_obj.study_description.to_dict(), + "conditions": list_of_conditions, + "keywords": list_of_keywords, + "identification": final_identifiers.to_dict(), + }, 201 + + +@api.route("/study//metadata/keywords/") +class StudyKeywordsDelete(Resource): + """Study keywords Metadata update""" + + @api.doc("Delete Study Keywords") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, keyword_id: int): + """Delete study conditions metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 + + study_keywords_ = model.StudyKeywords.query.get(keyword_id) + + model.db.session.delete(study_keywords_) + model.db.session.commit() + + return Response(status=204) + + +@api.route("/study//metadata/conditions/") +class StudyConditionsUpdate(Resource): + """Study Conditions Metadata update""" + + @api.doc("Delete Study Identifications") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, condition_id: int): + """Delete study conditions metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 + + study_conditions_ = model.StudyConditions.query.get(condition_id) + + model.db.session.delete(study_conditions_) + model.db.session.commit() + + return Response(status=204) + + +@api.route("/study//metadata/identification/") +class StudyIdentificationdDelete(Resource): + """Study Identification Metadata""" + + @api.doc("Delete Study Identifications") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, identification_id: int): + """Delete study identification metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 - study_.study_description.update(request.json) + study_identification_ = model.StudyIdentification.query.get(identification_id) + if not study_identification_.secondary: + return "primary identifier can not be deleted", 400 + model.db.session.delete(study_identification_) model.db.session.commit() - return study_.study_description.to_dict(), 200 + return Response(status=204) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 8a8fb0f8..ee5e2f63 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -55,6 +55,7 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_design) def put(self, study_id: int): """Update study design metadata""" # Schema validation diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 7d2bebf7..32b1bc36 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -47,6 +47,7 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_eligibility) def put(self, study_id: int): """Update study eligibility metadata""" # Schema validation diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py deleted file mode 100644 index 01a3a0fa..00000000 --- a/apis/study_metadata/study_identification.py +++ /dev/null @@ -1,138 +0,0 @@ -"""API routes for study identification metadata""" - -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_identification = api.model( - "StudyIdentification", - { - "id": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_type": fields.String(required=True), - "identifier_domain": fields.String(required=True), - "identifier_link": fields.String(required=True), - "secondary": fields.Boolean(required=True), - }, -) - - -@api.route("/study//metadata/identification") -class StudyIdentificationResource(Resource): - """Study Identification Metadata""" - - @api.doc("identification") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") - # @api.marshal_with(study_identification) - def get(self, study_id: int): - """Get study identification metadata""" - study_ = model.Study.query.get(study_id) - identifiers = model.Identifiers(study_) - return identifiers.to_dict(), 200 - - @api.doc("identification add") - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.expect(study_identification) - def post(self, study_id: int): - """Create study identification metadata""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "primary": { - "type": "object", - "additionalProperties": False, - "properties": { - "identifier": {"type": "string", "minLength": 1}, - "identifier_type": { - "type": "string", - "minLength": 1, - }, - "identifier_domain": { - "type": "string", - }, - "identifier_link": { - "type": "string", - }, - }, - }, - "secondary": { - "type": "array", - }, - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - - data: typing.Union[dict, typing.Any] = request.json - identifiers = [i for i in study_obj.study_identification if not i.secondary] - primary_identifier = identifiers[0] if len(identifiers) else None - - primary: dict = data["primary"] - - if primary_identifier: - primary_identifier.update(primary) - else: - study_identification_ = model.StudyIdentification.from_data( - study_obj, primary, False - ) - model.db.session.add(study_identification_) - - for i in data["secondary"]: - i["secondary"] = True - if "id" in i and i["id"]: - study_identification_ = model.StudyIdentification.query.get(i["id"]) - study_identification_.update(i) - else: - study_identification_ = model.StudyIdentification.from_data( - study_obj, i, True - ) - model.db.session.add(study_identification_) - - model.db.session.commit() - - final_identifiers = model.Identifiers(study_obj) - - return final_identifiers.to_dict(), 201 - - @api.route("/study//metadata/identification/") - class StudyIdentificationdUpdate(Resource): - """Study Identification Metadata""" - - @api.doc("Delete Study Identifications") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, identification_id: int): - """Delete study identification metadata""" - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 - - study_identification_ = model.StudyIdentification.query.get( - identification_id - ) - if not study_identification_.secondary: - return "primary identifier can not be deleted", 400 - - model.db.session.delete(study_identification_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index fb3641a6..bb76984f 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -41,11 +41,11 @@ def get(self, study_id: int): sorted_study_intervention = sorted( study_intervention_, key=lambda x: x.created_at ) - return [s.to_dict() for s in sorted_study_intervention], 200 @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_intervention) def post(self, study_id: int): """Create study intervention metadata""" # Schema validation diff --git a/apis/study_metadata/study_keywords.py b/apis/study_metadata/study_keywords.py deleted file mode 100644 index 4e6c420b..00000000 --- a/apis/study_metadata/study_keywords.py +++ /dev/null @@ -1,107 +0,0 @@ -"""API routes for study other metadata""" - -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_keywords = api.model( - "StudyKeywords", - { - "id": fields.String(required=True), - "name": fields.Boolean(required=True), - "classification_code": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "keyword_uri": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/keywords") -class StudyKeywords(Resource): - """Study Keywords Metadata""" - - @api.doc("keywords") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_other) - def get(self, study_id: int): - """Get study keywords metadata""" - study_ = model.Study.query.get(study_id) - study_keywords = study_.study_keywords - - return [k.to_dict() for k in study_keywords], 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """Create study keywords metadata""" - # Schema validation - schema = { - "type": "array", - "additionalProperties": False, - "items": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "classification_code": {"type": "string"}, - "scheme": {"type": "string"}, - "scheme_uri": {"type": "string"}, - "keyword_uri": {"type": "string"}, - }, - "required": ["name", "classification_code", "keyword_uri"], - }, - } - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - - data: typing.Union[dict, typing.Any] = request.json - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - study_keywords_ = model.StudyKeywords.query.get(i["id"]) - if not study_keywords_: - return f"Study keywords {i['id']} Id is not found", 404 - study_keywords_.update(i) - list_of_elements.append(study_keywords_.to_dict()) - elif "id" not in i or not i["id"]: - study_keywords_ = model.StudyKeywords.from_data(study_obj, i) - model.db.session.add(study_keywords_) - list_of_elements.append(study_keywords_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//metadata/keywords/") -class StudyKeywordsDelete(Resource): - """Study keywords Metadata update""" - - @api.doc("Delete Study Keywords") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, keyword_id: int): - """Delete study conditions metadata""" - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 - - study_keywords_ = model.StudyKeywords.query.get(keyword_id) - - model.db.session.delete(study_keywords_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index b017e235..73f860b6 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -46,6 +46,7 @@ def get(self, study_id: int): @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_location) def post(self, study_id: int): """Create study location metadata""" # Schema validation diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py deleted file mode 100644 index 5355004d..00000000 --- a/apis/study_metadata/study_other.py +++ /dev/null @@ -1,256 +0,0 @@ -# """API routes for study other metadata""" -# -# import typing -# -# from flask import request, Response -# from flask_restx import Resource, fields -# from jsonschema import ValidationError, validate -# -# import model -# from apis.study_metadata_namespace import api -# -# from ..authentication import is_granted -# -# study_other = api.model( -# "StudyOther", -# { -# "id": fields.String(required=True), -# "oversight_has_dmc": fields.Boolean(required=True), -# "conditions": fields.String(required=True), -# "keywords": fields.String(required=True), -# "size": fields.String(required=True), -# }, -# ) -# -# -# @api.route("/study//metadata/oversight") -# class StudyOversightResource(Resource): -# """Study Oversight Metadata""" -# -# @api.doc("oversight") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_other) -# def get(self, study_id: int): -# """Get study oversight metadata""" -# study_ = model.Study.query.get(study_id) -# -# study_oversight_has_dmc = study_.study_oversight -# return study_oversight_has_dmc.to_dict(), 200 -# -# def put(self, study_id: int): -# """Update study oversight metadata""" -# # Schema validation -# schema = { -# "type": "object", -# "additionalProperties": False, -# "properties": {"oversight_has_dmc": {"type": "boolean"}}, -# "required": ["has_dmc"], -# } -# -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 -# -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# data: typing.Union[dict, typing.Any] = request.json -# study_oversight_ = study_obj.study_oversight.update(data) -# model.db.session.commit() -# return study_obj.study_oversight.to_dict(), 200 - - -# @api.route("/study//metadata/conditions") -# class StudyCondition(Resource): -# """Study Conditions Metadata""" -# -# @api.doc("conditions") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_other) -# def get(self, study_id: int): -# """Get study conditions metadata""" -# study_ = model.Study.query.get(study_id) -# -# study_conditions = study_.study_conditions -# -# return [s.to_dict() for s in study_conditions], 200 -# -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def post(self, study_id: int): -# """Create study condition metadata""" -# # Schema validation -# # schema = { -# # "type": "array", -# # "additionalProperties": False, -# # "items": { -# # "type": "object", -# # "properties": { -# # "id": {"type": "string"}, -# # "facility": {"type": "string", "minLength": 1}, -# # "status": { -# # "type": "string", -# # "enum": [ -# # "Withdrawn", -# # "Recruiting", -# # "Active, not recruiting", -# # "Not yet recruiting", -# # "Suspended", -# # "Enrolling by invitation", -# # "Completed", -# # "Terminated", -# # ], -# # }, -# # "city": {"type": "string", "minLength": 1}, -# # "state": {"type": "string"}, -# # "zip": {"type": "string"}, -# # "country": {"type": "string", "minLength": 1}, -# # }, -# # "required": ["facility", "status", "city", "country"], -# # }, -# # } -# # -# # try: -# # validate(request.json, schema) -# # except ValidationError as e: -# # return e.message, 400 -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# -# data: typing.Union[dict, typing.Any] = request.json -# list_of_elements = [] -# for i in data: -# if "id" in i and i["id"]: -# study_conditions_ = model.StudyConditions.query.get(i["id"]) -# if not study_conditions_: -# return f"Study condition {i['id']} Id is not found", 404 -# study_conditions_.update(i) -# list_of_elements.append(study_conditions_.to_dict()) -# elif "id" not in i or not i["id"]: -# study_conditions_ = model.StudyConditions.from_data(study_obj, i) -# model.db.session.add(study_conditions_) -# list_of_elements.append(study_conditions_.to_dict()) -# model.db.session.commit() -# return list_of_elements, 201 -# -# -# @api.route("/study//metadata/conditions/") -# class StudyConditionsUpdate(Resource): -# """Study Conditions Metadata update""" -# -# @api.doc("Delete Study Identifications") -# @api.response(204, "Success") -# @api.response(400, "Validation Error") -# def delete(self, study_id: int, condition_id: int): -# """Delete study conditions metadata""" -# study = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study): -# return "Access denied, you can not delete study", 403 -# -# study_conditions_ = model.StudyConditions.query.get(condition_id) -# -# model.db.session.delete(study_conditions_) -# model.db.session.commit() -# -# return Response(status=204) - - -# @api.route("/study//metadata/keywords") -# class StudyKeywords(Resource): -# """Study Keywords Metadata""" -# -# @api.doc("keywords") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_other) -# def get(self, study_id: int): -# """Get study keywords metadata""" -# study_ = model.Study.query.get(study_id) -# -# study_keywords = study_.study_keywords -# -# return [k.to_dict() for k in study_keywords], 200 -# -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def post(self, study_id: int): -# """Create study condition metadata""" -# # Schema validation -# # schema = { -# # "type": "array", -# # "additionalProperties": False, -# # "items": { -# # "type": "object", -# # "properties": { -# # "id": {"type": "string"}, -# # "facility": {"type": "string", "minLength": 1}, -# # "status": { -# # "type": "string", -# # "enum": [ -# # "Withdrawn", -# # "Recruiting", -# # "Active, not recruiting", -# # "Not yet recruiting", -# # "Suspended", -# # "Enrolling by invitation", -# # "Completed", -# # "Terminated", -# # ], -# # }, -# # "city": {"type": "string", "minLength": 1}, -# # "state": {"type": "string"}, -# # "zip": {"type": "string"}, -# # "country": {"type": "string", "minLength": 1}, -# # }, -# # "required": ["facility", "status", "city", "country"], -# # }, -# # } -# # -# # try: -# # validate(request.json, schema) -# # except ValidationError as e: -# # return e.message, 400 -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# -# data: typing.Union[dict, typing.Any] = request.json -# list_of_elements = [] -# for i in data: -# if "id" in i and i["id"]: -# study_keywords_ = model.StudyKeywords.query.get(i["id"]) -# if not study_keywords_: -# return f"Study keywords {i['id']} Id is not found", 404 -# study_keywords_.update(i) -# list_of_elements.append(study_keywords_.to_dict()) -# elif "id" not in i or not i["id"]: -# study_keywords_ = model.StudyKeywords.from_data(study_obj, i) -# model.db.session.add(study_keywords_) -# list_of_elements.append(study_keywords_.to_dict()) -# model.db.session.commit() -# return list_of_elements, 201 -# -# -# @api.route("/study//metadata/keywords/") -# class StudyKeywordsDelete(Resource): -# """Study keywords Metadata update""" -# -# @api.doc("Delete Study Keywords") -# @api.response(204, "Success") -# @api.response(400, "Validation Error") -# def delete(self, study_id: int, keyword_id: int): -# """Delete study conditions metadata""" -# study = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study): -# return "Access denied, you can not delete study", 403 -# -# study_keywords_ = model.StudyKeywords.query.get(keyword_id) -# -# model.db.session.delete(study_keywords_) -# model.db.session.commit() -# -# return Response(status=204) diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 6a11c576..e97181eb 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -15,9 +15,19 @@ "StudyOverallOfficial", { "id": fields.String(required=True), - "name": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "identifier": fields.String(required=False), + "identifier_scheme": fields.String(required=False), + "identifier_scheme_uri": fields.String(required=False), "affiliation": fields.String(required=True), - "role": fields.String(required=True), + "affiliation_identifier": fields.String(required=True), + "affiliation_identifier_scheme": fields.String(required=False), + "affiliation_identifier_scheme_uri": fields.String(required=False), + "role": fields.String( + required=True + ), # Allows null in JSON Schema but RESTx doesn't support nullable fields + "degree": fields.String(required=False), }, ) @@ -30,7 +40,7 @@ class StudyOverallOfficialResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - # @api.marshal_with(study_overall_official) + @api.marshal_with(study_overall_official) def get(self, study_id: int): """Get study overall official metadata""" study_ = model.Study.query.get(study_id) @@ -47,6 +57,7 @@ def get(self, study_id: int): @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_overall_official) def post(self, study_id: int): """Create study overall official metadata""" # Schema validation diff --git a/apis/study_metadata/study_oversight.py b/apis/study_metadata/study_oversight.py index 179f4384..9b7e008c 100644 --- a/apis/study_metadata/study_oversight.py +++ b/apis/study_metadata/study_oversight.py @@ -14,11 +14,10 @@ study_other = api.model( "StudyOversight", { - "id": fields.String(required=True), - "oversight_has_dmc": fields.Boolean(required=True), - "conditions": fields.String(required=True), - "keywords": fields.String(required=True), - "size": fields.String(required=True), + "has_dmc": fields.String(required=True), + "fda_regulated_drug": fields.String(required=True), + "fda_regulated_device": fields.String(required=True), + "human_subject_review_status": fields.String(required=True), }, ) @@ -30,7 +29,7 @@ class StudyOversightResource(Resource): @api.doc("oversight") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(study_other) + @api.marshal_with(study_other) def get(self, study_id: int): """Get study oversight metadata""" study_ = model.Study.query.get(study_id) @@ -38,6 +37,9 @@ def get(self, study_id: int): study_oversight_has_dmc = study_.study_oversight return study_oversight_has_dmc.to_dict(), 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(study_other) def put(self, study_id: int): """Update study oversight metadata""" # Schema validation diff --git a/apis/study_metadata/study_sponsors.py b/apis/study_metadata/study_sponsors.py deleted file mode 100644 index 1d4a66a3..00000000 --- a/apis/study_metadata/study_sponsors.py +++ /dev/null @@ -1,164 +0,0 @@ -"""API routes for study sponsors and collaborators metadata""" - -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_sponsors = api.model( - "StudySponsors", - { - "responsible_party_type": fields.String(required=True), - "responsible_party_investigator_first_name": fields.String(required=False), - "responsible_party_investigator_last_name": fields.String(required=True), - "responsible_party_investigator_title": fields.String(required=True), - "responsible_party_investigator_identifier_value": fields.String(required=True), - "responsible_party_investigator_identifier_scheme": fields.String( - required=True - ), - "responsible_party_investigator_identifier_scheme_uri": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_name": fields.String(required=True), - "responsible_party_investigator_affiliation_identifier_scheme": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_identifier_value": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String( - required=True - ), - "lead_sponsor_name": fields.String(required=True), - "lead_sponsor_identifier": fields.String(required=True), - "lead_sponsor_identifier_scheme": fields.String(required=True), - "lead_sponsor_identifier_scheme_uri": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/sponsor") -class StudySponsorsResource(Resource): - """Study Sponsors Metadata""" - - @api.doc("sponsors") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(study_sponsors) - def get(self, study_id: int): - """Get study sponsors metadata""" - study_ = model.Study.query.get(study_id) - - study_sponsors_ = study_.study_sponsors - - return study_sponsors_.to_dict(), 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int): - """Update study sponsors metadata""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "required": [ - "responsible_party_type", - "lead_sponsor_name", - "responsible_party_investigator_last_name", - "responsible_party_investigator_first_name", - "responsible_party_investigator_title", - ], - "properties": { - "responsible_party_type": { - "type": ["string", "null"], - "enum": [ - "Sponsor", - "Principal Investigator", - "Sponsor-Investigator", - ], - }, - "responsible_party_investigator_first_name": { - "type": "string", - }, - "responsible_party_investigator_last_name": { - "type": "string", - }, - "responsible_party_investigator_title": { - "type": "string", - }, - "responsible_party_investigator_identifier_value": { - "type": "string", - }, - "responsible_party_investigator_identifier_scheme": { - "type": "string", - }, - "responsible_party_investigator_identifier_scheme_uri": { - "type": "string", - }, - "responsible_party_investigator_affiliation_name": { - "type": "string", - }, - "responsible_party_investigator_affiliation_identifier_scheme": { - "type": "string", - }, - "responsible_party_investigator_affiliation_identifier_value": { - "type": "string", - }, - "responsible_party_investigator_affiliation_identifier_scheme_uri": { - "type": "string", - }, - "lead_sponsor_name": {"type": "string"}, - "lead_sponsor_identifier": {"type": "string"}, - "lead_sponsor_identifier_scheme": {"type": "string"}, - "lead_sponsor_identifier_scheme_uri": { - "type": "string", - }, - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - if data["responsible_party_type"] in [ - "Principal Investigator", - "Sponsor-Investigator", - ]: - if not data["responsible_party_investigator_last_name"]: - return "Principal Investigator name is required", 400 - if not data["responsible_party_investigator_first_name"]: - return "Principal Investigator name is required", 400 - - if not data["responsible_party_investigator_title"]: - return "Principal Investigator title is required", 400 - - investigator_first_name = data["responsible_party_investigator_first_name"] - investigator_last_name = data["responsible_party_investigator_last_name"] - investigator_title = data["responsible_party_investigator_title"] - - if investigator_first_name == "": - return "Principal Investigator first name cannot be empty", 400 - if investigator_last_name == "": - return "Principal Investigator last name cannot be empty", 400 - if investigator_title == "": - return "Principal Investigator title cannot be empty", 400 - - study_ = model.Study.query.get(study_id) - - # Check user permissions - if not is_granted("study_metadata", study_): - return "Access denied, you can not modify study", 403 - - study_.study_sponsors.update(data) - - model.db.session.commit() - - return study_.study_sponsors.to_dict(), 200 diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index d0edbfce..121240aa 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -3,7 +3,7 @@ import typing from flask import request -from flask_restx import Resource, fields +from flask_restx import Resource, fields, marshal from jsonschema import ValidationError, validate import model @@ -11,16 +11,15 @@ from ..authentication import is_granted -study_status = api.model( +study_status_model = api.model( "StudyStatus", { - "id": fields.String(required=True), - "overall_status": fields.String(required=True), + "overall_status": fields.String(required=False), "why_stopped": fields.String(required=True), - "start_date": fields.String(required=True), - "start_date_type": fields.String(required=True), - "completion_date": fields.String(required=True), - "completion_date_type": fields.String(required=True), + "start_date": fields.String(required=False), + "start_date_type": fields.String(required=False), + "completion_date": fields.String(required=False), + "completion_date_type": fields.String(required=False), }, ) @@ -33,7 +32,7 @@ class StudyStatusResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - @api.marshal_with(study_status) + @api.marshal_with(study_status_model) def get(self, study_id: int): """Get study status metadata""" study_ = model.Study.query.get(study_id) @@ -54,7 +53,6 @@ def put(self, study_id: int): "start_date", "start_date_type", "overall_status", - "why_stopped", "completion_date", "completion_date_type", ], @@ -94,20 +92,20 @@ def put(self, study_id: int): return e.message, 400 data: typing.Union[typing.Any, dict] = request.json - if data["overall_status"] in ["Completed", "Terminated", "Suspended"]: - if "why_stopped" not in data or not data["why_stopped"]: - return ( - f"why_stopped is required for overall_status: {data['overall_status']}", - 400, - ) - study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not modify study", 403 + if data.get("overall_status") in ["Completed", "Terminated", "Suspended"]: + why_stopped = data.get("why_stopped", "") + if not why_stopped or not why_stopped.strip(): + return { + "message": f"why_stopped is required for overall_status: {data['overall_status']}" + }, 400 study = model.Study.query.get(study_id) study.study_status.update(request.json) model.db.session.commit() - return study.study_status.to_dict(), 200 + result = marshal(study.study_status.to_dict(), study_status_model) + return result, 200 diff --git a/apis/study_metadata/study_team.py b/apis/study_metadata/study_team.py new file mode 100644 index 00000000..176beea8 --- /dev/null +++ b/apis/study_metadata/study_team.py @@ -0,0 +1,276 @@ +"""API routes for study sponsors and collaborators metadata""" + +import typing + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.study_metadata_namespace import api + +from ..authentication import is_granted + +study_team_metadata = api.model( + "StudyTeamMetadata", + { + "sponsors": fields.Nested( # Changed to Nested to make it a single object + api.model( + "StudySponsors", + { + "responsible_party_type": fields.String( + required=True, + enum=[ + "Sponsor", + "Principal Investigator", + "Sponsor-Investigator", + ], + ), + "responsible_party_investigator_first_name": fields.String( + required=True + ), + "responsible_party_investigator_last_name": fields.String( + required=True + ), + "responsible_party_investigator_title": fields.String( + required=True + ), + "lead_sponsor_name": fields.String(required=True), + "responsible_party_investigator_identifier_value": fields.String( + required=False + ), + "responsible_party_investigator_identifier_scheme": fields.String( + required=False + ), + "responsible_party_investigator_identifier_scheme_uri": fields.String( + required=False + ), + "responsible_party_investigator_affiliation_name": fields.String( + required=False + ), + "responsible_party_investigator_affiliation_identifier_scheme": fields.String( + required=False + ), + "responsible_party_investigator_affiliation_identifier_value": fields.String( + required=False + ), + "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String( + required=False + ), + "lead_sponsor_identifier": fields.String(required=False), + "lead_sponsor_identifier_scheme": fields.String(required=False), + "lead_sponsor_identifier_scheme_uri": fields.String(required=False), + }, + ) + ), + "collaborators": fields.List( + fields.Nested( + api.model( + "StudyCollaborators", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "identifier": fields.String(required=False), + "identifier_scheme": fields.String(required=False), + "identifier_scheme_uri": fields.String(required=False), + "created_at": fields.Integer(required=True), + }, + ) + ) + ), + }, +) + + +@api.route("/study//metadata/team") +class StudySponsorsResource(Resource): + """Study team Metadata""" + + @api.doc("sponsors") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(study_team_metadata) + def get(self, study_id: int): + """Get study team metadata""" + study_ = model.Study.query.get(study_id) + + study_sponsors_ = study_.study_sponsors + study_collaborators_ = study_.study_collaborators + # print(study_sponsors_.to_dict(),"ggg") + + return { + "sponsors": study_sponsors_.to_dict(), + "collaborators": [collab.to_dict() for collab in study_collaborators_], + } + 200 + + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(study_team_metadata) + def post(self, study_id: int): + """Update study team metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": ["collaborators", "sponsors"], + "properties": { + "collaborators": { + "type": "array", + "additionalProperties": False, + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + }, + "required": [ + "name", + "identifier", + "identifier_scheme", + ], + }, + }, + "sponsors": { + "type": "object", + "additionalProperties": False, + "properties": { + "responsible_party_type": { + "type": ["string", "null"], + "enum": [ + "Sponsor", + "Principal Investigator", + "Sponsor-Investigator", + ], + }, + "responsible_party_investigator_first_name": { + "type": "string", + }, + "responsible_party_investigator_last_name": { + "type": "string", + }, + "responsible_party_investigator_title": { + "type": "string", + }, + "responsible_party_investigator_identifier_value": { + "type": "string", + }, + "responsible_party_investigator_identifier_scheme": { + "type": "string", + }, + "responsible_party_investigator_identifier_scheme_uri": { + "type": "string", + }, + "responsible_party_investigator_affiliation_name": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_scheme": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_value": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_scheme_uri": { + "type": "string", + }, + "lead_sponsor_name": {"type": "string"}, + "lead_sponsor_identifier": {"type": "string"}, + "lead_sponsor_identifier_scheme": {"type": "string"}, + "lead_sponsor_identifier_scheme_uri": { + "type": "string", + }, + }, + "required": [ + "responsible_party_type", + "lead_sponsor_name", + "responsible_party_investigator_last_name", + "responsible_party_investigator_first_name", + "responsible_party_investigator_title", + ], + }, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: typing.Union[dict, typing.Any] = request.json + + if data["sponsors"]["responsible_party_type"] in [ + "Principal Investigator", + "Sponsor-Investigator", + ]: + if not data["sponsors"]["responsible_party_investigator_last_name"]: + return "Principal Investigator name is required", 400 + if not data["sponsors"]["responsible_party_investigator_first_name"]: + return "Principal Investigator name is required", 400 + + if not data["sponsors"]["responsible_party_investigator_title"]: + return "Principal Investigator title is required", 400 + + investigator_first_name = data["sponsors"][ + "responsible_party_investigator_first_name" + ] + investigator_last_name = data["sponsors"][ + "responsible_party_investigator_last_name" + ] + investigator_title = data["sponsors"][ + "responsible_party_investigator_title" + ] + + if investigator_first_name == "": + return "Principal Investigator first name cannot be empty", 400 + if investigator_last_name == "": + return "Principal Investigator last name cannot be empty", 400 + if investigator_title == "": + return "Principal Investigator title cannot be empty", 400 + + study_ = model.Study.query.get(study_id) + + # Check user permissions + if not is_granted("study_metadata", study_): + return "Access denied, you can not modify study", 403 + + list_of_elements = [] + for i in data["collaborators"]: + if "id" in i and i["id"]: + study_collaborators_ = model.StudyCollaborators.query.get(i["id"]) + study_collaborators_.update(i) + else: + study_collaborators_ = model.StudyCollaborators.from_data(study_, i) + model.db.session.add(study_collaborators_) + list_of_elements.append(study_collaborators_.to_dict()) + + study_.study_sponsors.update(data["sponsors"]) + + model.db.session.commit() + + return { + "collaborators": list_of_elements, + "sponsors": study_.study_sponsors.to_dict(), + }, 201 + + +@api.route("/study//metadata/collaborators/") +class StudyLocationUpdate(Resource): + """delete Study Collaborators Metadata""" + + @api.doc("delete study collaborators") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, collaborator_id: int): + """Delete study collaborators metadata""" + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + study_collaborators_ = model.StudyCollaborators.query.get(collaborator_id) + + model.db.session.delete(study_collaborators_) + + model.db.session.commit() + + return Response(status=204) diff --git a/apis/user.py b/apis/user.py index 68281ce7..44581a07 100644 --- a/apis/user.py +++ b/apis/user.py @@ -78,6 +78,7 @@ def validate_is_valid_email(instance): "properties": { "id": {"type": "string"}, "email_address": {"type": "string", "format": "valid_email"}, + "email_verified": {"type": "boolean"}, "username": {"type": "string", "minLength": 0}, "first_name": {"type": "string", "minLength": 0}, "last_name": {"type": "string", "minLength": 0}, @@ -104,7 +105,7 @@ def validate_is_valid_email(instance): data: Union[Any, dict] = request.json user = model.User.query.get(g.user.id) - # user.update(data) # don't update the username and email_address for now + # user.update(data) # don't update the email_address for now user_details = user.user_details user_details.update(data) model.db.session.commit() diff --git a/app.py b/app.py index 00f93415..0cf3a29e 100644 --- a/app.py +++ b/app.py @@ -11,8 +11,9 @@ from flask import Flask, g, request from flask_bcrypt import Bcrypt from flask_cors import CORS +from flask_mailman import Mail from growthbook import GrowthBook -from sqlalchemy import MetaData, inspect +from sqlalchemy import MetaData, inspect, text from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import DropTable from waitress import serve @@ -27,6 +28,7 @@ # from pyfairdatatools import __version__ bcrypt = Bcrypt() +mail = Mail() # Add Cascade to Table Drop Call in destroy-schema CLI command @@ -76,7 +78,9 @@ def create_app(config_module=None, loglevel="INFO"): bcrypt.init_app(app) caching.cache.init_app(app) + mail.init_app(app) cors_origins = [ + "https://witty-mushroom-.*-.*.centralus.4.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://staging.app.fairhub.io", "https://app.fairhub.io", @@ -125,16 +129,6 @@ def create_schema(): with engine.begin(): model.db.create_all() - @app.cli.command("destroy-schema") - def destroy_schema(): - """Create the database schema.""" - # If DB is Azure, Skip - if config.FAIRHUB_DATABASE_URL.find("azure") > -1: - return - engine = model.db.session.get_bind() - with engine.begin(): - model.db.drop_all() - @app.cli.command("cycle-schema") def cycle_schema(): """Destroy then re-create the database schema.""" @@ -159,6 +153,17 @@ def list_schemas(): for schema_name in schema_names: print(schema_name) + @app.cli.command("destroy-schema") + def destroy_schema(): + """Create the database schema.""" + # If DB is Azure, Skip + if config.FAIRHUB_DATABASE_URL.find("azure") > -1: + return + engine = model.db.session.get_bind() + with engine.begin() as conn: + model.db.drop_all() + conn.execute(text("DROP TABLE IF EXISTS alembic_version")) # type: ignore + @app.cli.command("inspect-schema") @click.argument("schema") def inspect_schema(schema=None): @@ -221,7 +226,7 @@ def on_after_request(resp): if request.path.startswith(route): return resp - if "token" not in request.cookies: + if "token" not in request.cookies or not g.token: return resp token: str = request.cookies.get("token") or "" # type: ignore @@ -254,15 +259,25 @@ def on_after_request(resp): if token_blacklist: resp.delete_cookie("token") return resp + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( minutes=180 ) - new_token = jwt.encode( - {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, - config.FAIRHUB_SECRET, - algorithm="HS256", + session = model.Session.query.get(g.token) + session_expires_at = datetime.datetime.fromtimestamp( + session.expires_at, timezone.utc ) - resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") + + if expired_in - session_expires_at < datetime.timedelta(minutes=90): + new_token = jwt.encode( + {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, + config.FAIRHUB_SECRET, + algorithm="HS256", + ) + resp.set_cookie( + "token", new_token, secure=True, httponly=True, samesite="None" + ) + session.expires_at = expired_in.timestamp() app.logger.info("after request") app.logger.info(request.headers.get("Origin")) @@ -278,7 +293,6 @@ def on_after_request(resp): # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, # Access-Control-Allow-Credentials" app.logger.info(resp.headers) - return resp @app.errorhandler(ValidationException) diff --git a/config.py b/config.py index a198d1c4..9652f7ef 100644 --- a/config.py +++ b/config.py @@ -21,6 +21,11 @@ def get_env(key): FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") +AZURE_CONTAINER = get_env("AZURE_CONTAINER") FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION = get_env("FAIRHUB_TEMP_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER = get_env("FAIRHUB_TEMP_BLOB_STORAGE_REDCAP_ETL_CONTAINER") FAIRHUB_GROWTHBOOK_CLIENT_KEY = get_env("FAIRHUB_GROWTHBOOK_CLIENT_KEY") +FAIRHUB_SMTP_CONNECTION_STRING = get_env("FAIRHUB_SMTP_CONNECTION_STRING") +FAIRHUB_SMTP_SENDER_EMAIL_ADDRESS = get_env("FAIRHUB_SMTP_SENDER_EMAIL_ADDRESS") +AZURE_STORAGE_CONNECTION_STRING = get_env("AZURE_STORAGE_CONNECTION_STRING") +FAIRHUB_FRONTEND_URL = get_env("FAIRHUB_FRONTEND_URL") \ No newline at end of file diff --git a/db.py b/db.py new file mode 100644 index 00000000..f0b13d6f --- /dev/null +++ b/db.py @@ -0,0 +1,3 @@ +from flask_sqlalchemy import SQLAlchemy + +db = SQLAlchemy() diff --git a/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py b/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py index 5fac771b..900bd0b9 100644 --- a/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py +++ b/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py @@ -9,7 +9,6 @@ from typing import Sequence, Union import alembic -import sqlalchemy as sa # revision identifiers, used by Alembic. diff --git a/model/__init__.py b/model/__init__.py index 33959375..742d6265 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,5 +1,5 @@ -from model.dataset_metadata.dataset_contributor import DatasetContributor -from model.dataset_metadata.dataset_related_identifier import DatasetRelatedIdentifier +from .dataset_metadata.dataset_contributor import DatasetContributor +from .dataset_metadata.dataset_related_identifier import DatasetRelatedIdentifier from .dataset import Dataset from .dataset_metadata.dataset_access import DatasetAccess @@ -17,6 +17,8 @@ from .dataset_metadata.dataset_title import DatasetTitle from .db import db from .email_verification import EmailVerification +from .file import FileStructure +from .folder import FolderStructure from .invited_study_contributor import StudyInvitedContributor from .notification import Notification from .participant import Participant @@ -48,6 +50,7 @@ from .user import User from .user_details import UserDetails from .version import Version +from .session import Session from .version_readme import VersionReadme __all__ = [ @@ -93,6 +96,8 @@ "StudyRedcap", "StudyDashboard", "StudyStatus", + "FileStructure", + "FolderStructure", "Identifiers", "Arm", "StudyInvitedContributor", @@ -102,4 +107,5 @@ "UserDetails", "Notification", "VersionReadme", + "Session", ] diff --git a/model/dataset.py b/model/dataset.py index 320f2929..e8e255d6 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -128,6 +128,7 @@ def to_dict(self): i.description if i.type == "Abstract" else None for i in self.dataset_description # type: ignore ][0], + "updated_on": self.updated_on, } def to_dict_dataset_metadata(self): diff --git a/model/email_verification.py b/model/email_verification.py index 8961b8f2..43a582de 100644 --- a/model/email_verification.py +++ b/model/email_verification.py @@ -1,17 +1,22 @@ import datetime +import random +import uuid from datetime import timezone from .db import db class EmailVerification(db.Model): # type: ignore - def __init__(self): + def __init__(self, user): self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.id = str(uuid.uuid4()) + self.token = str(random.randint(10 ** (7 - 1), (10**7) - 1)) + self.user = user __tablename__ = "email_verification" id = db.Column(db.CHAR(36), primary_key=True) - token = db.Column(db.CHAR(36), nullable=False) - created_at = db.Column(db.CHAR(36), nullable=False) + token = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) user = db.relationship("User", back_populates="email_verification") diff --git a/model/file.py b/model/file.py new file mode 100644 index 00000000..6b82f2f2 --- /dev/null +++ b/model/file.py @@ -0,0 +1,14 @@ +class FileStructure: # type: ignore + def __init__(self, name, content_length, updated_on, is_directory): + self.name = name + self.size = content_length + self.updated_on = updated_on + self.is_directory = is_directory + + def to_dict(self): + return { + "name": self.name, + "content_length": self.size, + "updated_on": self.updated_on.isoformat() if self.updated_on else None, + "is_directory": self.is_directory, + } diff --git a/model/folder.py b/model/folder.py new file mode 100644 index 00000000..91423a93 --- /dev/null +++ b/model/folder.py @@ -0,0 +1,23 @@ +import typing + +from .file import FileStructure + + +class FolderStructure: # type: ignore + files: typing.List[typing.Union["FileStructure", "FolderStructure"]] + + def __init__(self, name, content_length, updated_on, is_directory, files): + self.name = name + self.content_length = content_length + self.updated_on = updated_on + self.is_directory = is_directory + self.files = files + + def to_dict(self): + return { + "name": self.name, + "content_length": self.content_length, + "updated_on": self.updated_on.isoformat() if self.updated_on else None, + "is_directory": self.is_directory, + "files": [i.to_dict() for i in self.files], + } diff --git a/model/notification.py b/model/notification.py index b123c188..72fdfae4 100644 --- a/model/notification.py +++ b/model/notification.py @@ -6,17 +6,18 @@ class Notification(db.Model): # type: ignore - def __init__(self): + def __init__(self, user): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.user = user __tablename__ = "notification" id = db.Column(db.CHAR(36), primary_key=True) - title = db.Column(db.String, nullable=True) - message = db.Column(db.String, nullable=True) - type = db.Column(db.String, nullable=True) + title = db.Column(db.String, nullable=False) + message = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=False) target = db.Column(db.String, nullable=True) - read = db.Column(db.BOOLEAN, nullable=True) + read = db.Column(db.BOOLEAN, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) @@ -34,10 +35,10 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - user = Notification() - user.update(data) - return user + def from_data(user, data: dict): + notification = Notification(user) + notification.update(data) + return notification def update(self, data: dict): self.title = data["title"] diff --git a/model/session.py b/model/session.py new file mode 100644 index 00000000..aabe6cbb --- /dev/null +++ b/model/session.py @@ -0,0 +1,35 @@ +from . import User +from .db import db + + +class Session(db.Model): # type: ignore + def __init__(self, id, user: User): # pylint: disable=redefined-builtin + self.id = id + self.user = user + + __tablename__ = "session" + id = db.Column(db.CHAR(36), primary_key=True) + expires_at = db.Column(db.BigInteger, nullable=False) + + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) + + user = db.relationship( + "User", + back_populates="session", + ) + + def to_dict(self): + return { + "id": self.id, + "expires_at": self.expires_at, + "user_id": self.user_id, + } + + @staticmethod + def from_data(id, expires_at, user: User): # pylint: disable=redefined-builtin + session = Session(id, user) + session.update(expires_at) + return session + + def update(self, expires_at): + self.expires_at = expires_at diff --git a/model/study.py b/model/study.py index d09312a1..829a2e90 100644 --- a/model/study.py +++ b/model/study.py @@ -1,5 +1,7 @@ import datetime +import re import uuid +from typing import cast from flask import g @@ -25,7 +27,6 @@ def __init__(self): self.study_design = model.StudyDesign(self) self.study_eligibility = model.StudyEligibility(self) self.study_description = model.StudyDescription(self) - self.study_identification.append(model.StudyIdentification(self, False)) self.study_other = model.StudyOther(self) self.study_oversight = model.StudyOversight(self) @@ -34,7 +35,7 @@ def __init__(self): title = db.Column(db.String(300), nullable=False) image = db.Column(db.String, nullable=False) - acronym = db.Column(db.String(14), nullable=False) + short_description = db.Column(db.String(300), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) @@ -42,120 +43,120 @@ def __init__(self): dataset = db.relationship( "Dataset", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_contributors = db.relationship( "StudyContributor", back_populates="study", lazy="dynamic", - cascade="all, delete", + cascade="all, delete-orphan", ) participants = db.relationship( "Participant", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) invited_contributors = db.relationship( "StudyInvitedContributor", back_populates="study", lazy="dynamic", - cascade="all, delete", + cascade="all, delete-orphan", ) study_arm = db.relationship( "StudyArm", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_sponsors = db.relationship( "StudySponsors", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_central_contact = db.relationship( "StudyCentralContact", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_description = db.relationship( "StudyDescription", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_design = db.relationship( "StudyDesign", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_eligibility = db.relationship( "StudyEligibility", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_identification = db.relationship( "StudyIdentification", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) # NOTE: Has not been tested study_redcap = db.relationship( - "StudyRedcap", back_populates="study", cascade="all, delete" + "StudyRedcap", back_populates="study", cascade="all, delete-orphan" ) # NOTE: Has not been tested study_dashboard = db.relationship( - "StudyDashboard", back_populates="study", cascade="all, delete" + "StudyDashboard", back_populates="study", cascade="all, delete-orphan" ) study_intervention = db.relationship( "StudyIntervention", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_location = db.relationship( "StudyLocation", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_other = db.relationship( "StudyOther", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_keywords = db.relationship( "StudyKeywords", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_conditions = db.relationship( "StudyConditions", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_collaborators = db.relationship( "StudyCollaborators", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_oversight = db.relationship( "StudyOversight", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_overall_official = db.relationship( "StudyOverallOfficial", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_status = db.relationship( "StudyStatus", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) def to_dict(self): @@ -170,7 +171,7 @@ def to_dict(self): return { "id": self.id, "title": self.title, - "acronym": self.acronym, + "short_description": self.short_description, "image": self.image, "created_at": self.created_at, "updated_on": self.updated_on, @@ -179,7 +180,9 @@ def to_dict(self): self.study_description.brief_summary if self.study_description else None ), "owner": owner.to_dict()["id"] if owner else None, - "role": contributor_permission.to_dict()["role"], + "role": contributor_permission.to_dict()["role"] + if contributor_permission + else None, } def to_dict_study_metadata(self): @@ -240,22 +243,97 @@ def update(self, data: dict): """Updates the study from a dictionary""" if not data["title"]: raise exception.ValidationException("title is required") - if not data["image"]: - raise exception.ValidationException("image is required") self.title = data["title"] - self.image = data["image"] - self.acronym = data["acronym"] + self.short_description = data["short_description"] + if "image" in data and data["image"]: + self.image = data["image"] self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() - def validate(self): - """Validates the study""" - violations: list = [] - # if self.description.trim() == "": - # violations.push("A description is required") - # if self.keywords.length < 1: - # violations.push("At least one keyword must be specified") - return violations + def update_identification_id(self, data): + clinical_id = None + identifiers = [ + i + for i in cast(list, self.study_identification) + if re.match(r"^NCT\d{8}$", i.identifier) + ] + if not identifiers: + clinical_id = model.StudyIdentification(self, False) + self.study_identification.append(clinical_id) + model.db.session.add(clinical_id) + else: + clinical_id = identifiers[0] + + if clinical_id is not None: + clinical_id.updating_from_integration(data) + + def import_from_clinical_data(self, data): + """Updates the study from a dictionary""" + + self.study_description.updating_from_integration(data) + self.study_status.updating_from_integration(data) + self.study_eligibility.updating_from_integration(data) + self.study_sponsors.updating_from_integration(data) + self.study_design.updating_from_integration(data) + self.study_oversight.updating_from_integration(data) + self.title = data.get("identificationModule", {}).get("officialTitle", "") + interventions_data = data.get("armsInterventionsModule", {}).get( + "interventions", [] + ) + self.study_intervention.clear() + for intervention_dict in interventions_data: + intervention = model.StudyIntervention(self) + intervention.updating_from_integration(intervention_dict) + self.study_intervention.append(intervention) + + keywords_data = data.get("conditionsModule", {}).get("keywords", []) + self.study_keywords.clear() + for k_dict in keywords_data: + keywords = model.StudyKeywords(self) + keywords.updating_from_integration(k_dict) + self.study_keywords.append(keywords) + + conditions_data = data.get("conditionsModule", {}).get("conditions", []) + self.study_conditions.clear() + for conditions_dict in conditions_data: + conditions = model.StudyConditions(self) + conditions.updating_from_integration(conditions_dict) + self.study_conditions.append(conditions) + + collaborators_data = data.get("sponsorCollaboratorsModule", {}).get( + "collaborators", [] + ) + self.study_collaborators.clear() + + for collaborator_dict in collaborators_data: + collaborator = model.StudyCollaborators(self) + collaborator.updating_from_integration(collaborator_dict) + self.study_collaborators.append(collaborator) + + arms_data = data.get("armsInterventionsModule", {}).get("armGroups", []) + self.study_arm.clear() + + for arm_dict in arms_data: + arm = model.StudyArm(self) + arm.updating_from_integration(arm_dict) + self.study_arm.append(arm) + + overall_official_data = data.get("contactsLocationsModule", {}).get( + "overallOfficials", [] + ) + self.study_overall_official.clear() + + for oo_dict in overall_official_data: + o_o = model.StudyOverallOfficial(self) + o_o.updating_from_integration(oo_dict) + self.study_overall_official.append(o_o) + + location_data = data.get("contactsLocationsModule", {}).get("locations", []) + self.study_location.clear() + for location_dict in location_data: + location = model.StudyLocation(self) + location.updating_from_integration(location_dict) + self.study_location.append(location) def touch(self): self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() diff --git a/model/study_contributor.py b/model/study_contributor.py index 7ba8eb21..bc09153d 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -33,6 +33,9 @@ def to_dict(self): "name": ( self.user.user_details.first_name if self.user.user_details else None ), + "lastName": ( + self.user.user_details.last_name if self.user.user_details else None + ), "email_address": self.user.email_address, "orcid": self.user.user_details.orcid if self.user.user_details else None, "role": self.permission, diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index db4609cf..00e99028 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -66,7 +66,13 @@ def update(self, data: dict): self.intervention_list = data["intervention_list"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyArm from a dictionary""" + self.label = data.get("label", "") + self.description = data.get("description", "") + # self.type = "" + self.intervention_list = [] + self.type = (data.get("type") or "").replace("_", " ").title() + self.intervention_list = [ + i.replace("_", " ").title() for i in data.get("interventionNames", []) + ] diff --git a/model/study_metadata/study_central_contact.py b/model/study_metadata/study_central_contact.py index 9822ecc0..440cbc22 100644 --- a/model/study_metadata/study_central_contact.py +++ b/model/study_metadata/study_central_contact.py @@ -97,7 +97,9 @@ def update(self, data: dict): self.email_address = data["email_address"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyCentralContact from a dictionary""" + self.name = data.get("name", "") + self.identifier = "" + self.scheme = "" + self.scheme_uri = "" diff --git a/model/study_metadata/study_collaborators.py b/model/study_metadata/study_collaborators.py index ca02af5d..fabec477 100644 --- a/model/study_metadata/study_collaborators.py +++ b/model/study_metadata/study_collaborators.py @@ -64,7 +64,9 @@ def update(self, data: dict): self.scheme_uri = data["identifier_scheme_uri"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyCollaborators from a dictionary""" + self.name = data.get("name", "") + self.identifier = "" + self.scheme = "" + self.scheme_uri = "" diff --git a/model/study_metadata/study_conditions.py b/model/study_metadata/study_conditions.py index 6d0bf83b..30bbb0ac 100644 --- a/model/study_metadata/study_conditions.py +++ b/model/study_metadata/study_conditions.py @@ -67,7 +67,10 @@ def update(self, data: dict): self.condition_uri = data["condition_uri"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyConditions from a dictionary""" + self.name = data + self.classification_code = "" + self.scheme = "" + self.scheme_uri = "" + self.condition_uri = "" diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index f5115882..2cc2c041 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -50,7 +50,9 @@ def update(self, data: dict): self.detailed_description = data["detailed_description"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyDescription from a dictionary""" + self.brief_summary = data.get("descriptionModule", {}).get("briefSummary", "") + self.detailed_description = data.get("descriptionModule", {}).get( + "detailedDescription", "" + ) diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 40567ca0..e1433ce2 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -115,7 +115,69 @@ def update(self, data: dict): self.is_patient_registry = data["is_patient_registry"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyDesign from a dictionary""" + self.study_type = data.get("designModule", {}).get("studyType", "").capitalize() + self.design_observational_model_list = data.get("designModule", {}).get( + "observationalModel", "" + ) + self.design_time_perspective_list = data.get("designModule", {}).get( + "timePerspective", "" + ) + self.phase_list = data.get("designModule", {}).get("phases", []) + self.phase_list = [ + i.lower().replace("phase", "Phase ") for i in self.phase_list + ] + self.design_allocation = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("allocation", "") + .replace("_", "-") + .title() + ) + self.design_primary_purpose = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("primaryPurpose", "") + .capitalize() + ) + self.design_intervention_model = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("interventionModel", "") + .capitalize() + ) + val = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("maskingInfo", {}) + .get("masking", "") + ) + self.design_masking = ( + val + if val == "N/A" + else "None (Open Label)" + if val == "NONE" + else "Blinded (no details)" + if val.lower().startswith("blind") + else val.capitalize() + ) + + self.design_who_masked_list = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("maskingInfo", {}) + .get("whoMasked", []) + ) + self.design_who_masked_list = [ + i.replace("_", " ").title() for i in self.design_who_masked_list + ] + + self.enrollment_count = ( + data.get("designModule", {}).get("enrollmentInfo", {}).get("count", "") + ) + self.enrollment_type = ( + data.get("designModule", {}).get("enrollmentInfo", {}).get("type", "") + ).capitalize() + if self.enrollment_type == "Estimated": + self.enrollment_type = "Anticipated" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 8c6ee872..49bee3b3 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -99,7 +99,16 @@ def update(self, data: dict): self.sampling_method = data["sampling_method"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyEligibility from a dictionary""" + eligibility = data.get("eligibilityModule", {}) + self.sex = eligibility.get("sex", "").capitalize() + val = eligibility.get("healthyVolunteers", "false") + self.healthy_volunteers = "Yes" if str(val).lower() == "true" else "No" + self.study_population = eligibility.get("studyPopulation", "") + raw = eligibility.get("samplingMethod", "") + self.sampling_method = raw.replace("_", "-", 1).replace("_", " ").title() + min_age = eligibility.get("minimumAge", "").split() + max_age = eligibility.get("maximumAge", "").split() + self.minimum_age_value, self.minimum_age_unit = (min_age + ["", ""])[:2] + self.maximum_age_value, self.maximum_age_unit = (max_age + ["", ""])[:2] diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 299f20bd..33d7f677 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -65,7 +65,8 @@ def update(self, data: dict): self.identifier_link = data["identifier_link"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyIdentification from a dictionary""" + self.identifier_type = "Other Identifier" + self.identifier_domain = "clinicaltrials.gov" + self.identifier = data.get("identificationModule", {}).get("nctId", "").strip() diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 89ff4b90..373f7c7e 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -67,7 +67,9 @@ def update(self, data: dict): self.other_name_list = data["other_name_list"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyIntervention from a dictionary""" + self.name = data.get("name", "") + self.type = data.get("type", "").replace("_", " ").title() + self.description = data.get("description", "") + self.other_name_list = "" diff --git a/model/study_metadata/study_keywords.py b/model/study_metadata/study_keywords.py index 5ff213a9..698a4924 100644 --- a/model/study_metadata/study_keywords.py +++ b/model/study_metadata/study_keywords.py @@ -67,7 +67,10 @@ def update(self, data: dict): self.keyword_uri = data["keyword_uri"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyKeywords from a dictionary""" + self.name = data + self.classification_code = "" + self.scheme = "" + self.scheme_uri = "" + self.keyword_uri = "" diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index cbfe9f91..806f5f70 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -75,7 +75,11 @@ def update(self, data: dict): self.country = data["country"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyLocation from a dictionary""" + self.facility = data.get("facility", "") + self.city = data.get("city", "") + self.state = data.get("state", "") + self.zip = data.get("zip", "") + self.country = data.get("country", "") + self.status = "" diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index a6f6c561..474b4c7a 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -96,7 +96,21 @@ def update(self, data: dict): self.role = data["role"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyOverallOfficial from a dictionary""" + parts = data.get("name", "").strip().split() + self.first_name, self.last_name = ( + (parts[0], parts[1] if len(parts) > 1 else "") if parts else ("", "") + ) + + self.affiliation = data.get("affiliation", "") + self.role = data.get("role", "") + self.role = data.get("role", "").replace("_", " ").title() + + self.degree = "" + self.identifier = "" + self.identifier_scheme = "" + self.identifier_scheme_uri = "" + self.affiliation_identifier = "" + self.affiliation_identifier_scheme = "" + self.affiliation_identifier_scheme_uri = "" diff --git a/model/study_metadata/study_oversight.py b/model/study_metadata/study_oversight.py index 3fc0c0cb..966de04b 100644 --- a/model/study_metadata/study_oversight.py +++ b/model/study_metadata/study_oversight.py @@ -53,7 +53,18 @@ def update(self, data: dict): self.has_dmc = data["has_dmc"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyOversight from a dictionary""" + self.has_dmc = ( + "Yes" if (data.get("oversightModule", {}).get("oversightHasDmc")) else "No" + ) + self.fda_regulated_drug = ( + "Yes" + if (data.get("oversightModule", {}).get("isFdaRegulatedDrug")) + else "No" + ) + self.fda_regulated_device = ( + "Yes" + if (data.get("oversightModule", {}).get("isFdaRegulatedDevice")) + else "No" + ) diff --git a/model/study_metadata/study_sponsors.py b/model/study_metadata/study_sponsors.py index 1e7832ca..26deef1e 100644 --- a/model/study_metadata/study_sponsors.py +++ b/model/study_metadata/study_sponsors.py @@ -141,7 +141,33 @@ def update(self, data: dict): "lead_sponsor_identifier_scheme_uri" ] - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudySponsors from a dictionary""" + self.responsible_party_type = ( + data.get("sponsorCollaboratorsModule", {}) + .get("responsibleParty", {}) + .get("type", "") + .replace("_", " ") + .title() + ) + party = data.get("sponsorCollaboratorsModule", {}).get("responsibleParty", {}) + full_name = party.get("investigatorFullName", "").split(maxsplit=1) + self.responsible_party_investigator_first_name = ( + full_name[0] if full_name else "" + ) + self.responsible_party_investigator_last_name = ( + full_name[1] if len(full_name) > 1 else "" + ) + self.responsible_party_investigator_affiliation_name = ( + party.get("investigatorAffiliation", "").title().capitalize() + ) + self.responsible_party_investigator_affiliation_name = ( + data.get("sponsorCollaboratorsModule", {}) + .get("responsibleParty", {}) + .get("investigatorAffiliation", "") + ) + self.lead_sponsor_name = ( + data.get("sponsorCollaboratorsModule", {}) + .get("leadSponsor", {}) + .get("name", "") + ) diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index dd1ffe78..45ff48c6 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -1,3 +1,5 @@ +from datetime import datetime + from model import Study from ..db import db @@ -68,7 +70,59 @@ def update(self, data: dict): self.completion_date_type = data["completion_date_type"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyStatus from a dictionary""" + status_map = { + "WITHDRAWN": "Withdrawn", + "RECRUITING": "Recruiting", + "ACTIVE_NOT_RECRUITING": "Active, not recruiting", + "NOT_YET_RECRUITING": "Not yet recruiting", + "SUSPENDED": "Suspended", + "ENROLLING_BY_INVITATION": "Enrolling by invitation", + "COMPLETED": "Completed", + "TERMINATED": "Terminated", + } + raw_status = data.get("statusModule", {}).get("overallStatus", "") + self.overall_status = status_map.get(raw_status, "") + + self.overall_status = ( + data.get("statusModule", {}) + .get("overallStatus", "") + .replace("_", " ") + .title() + ) + s_d = data.get("statusModule", {}).get("startDateStruct", {}).get("date") + self.start_date = ( + datetime.strptime(s_d, "%Y-%m-%d") if s_d and len(s_d) == 10 else None + ) + + c_d = data.get("statusModule", {}).get("completionDateStruct", {}).get("date") + self.completion_date = ( + datetime.strptime(c_d, "%Y-%m-%d") if c_d and len(c_d) == 10 else None + ) + + self.start_date_type = ( + "Anticipated" + if data.get("statusModule", {}) + .get("startDateStruct", {}) + .get("type", "") + .lower() + == "estimated" + else data.get("statusModule", {}) + .get("startDateStruct", {}) + .get("type", "") + .capitalize() + ) + + self.completion_date_type = ( + "Anticipated" + if data.get("statusModule", {}) + .get("completionDateStruct", {}) + .get("type", "") + .lower() + == "estimated" + else data.get("statusModule", {}) + .get("completionDateStruct", {}) + .get("type", "") + .capitalize() + ) diff --git a/model/user.py b/model/user.py index 9e741efc..85351e51 100644 --- a/model/user.py +++ b/model/user.py @@ -15,28 +15,60 @@ def __init__(self, password): self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() self.set_password(password) self.user_details = model.UserDetails(self) + self.email_verified = True + db.Column(db.BigInteger, nullable=False) __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) username = db.Column(db.String, nullable=False, unique=True) hash = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - email_verified = db.Column(db.String, nullable=True) + email_verified = db.Column(db.BOOLEAN, nullable=True) + password_reset_token = db.Column(db.String, nullable=True) study_contributors = db.relationship("StudyContributor", back_populates="user") email_verification = db.relationship("EmailVerification", back_populates="user") user_details = db.relationship("UserDetails", uselist=False, back_populates="user") token_blacklist = db.relationship("TokenBlacklist", back_populates="user") notification = db.relationship("Notification", back_populates="user") + study_contributors = db.relationship( + "StudyContributor", + back_populates="user", + cascade="all, delete", + ) + email_verification = db.relationship( + "EmailVerification", + back_populates="user", + cascade="all, delete", + ) + user_details = db.relationship( + "UserDetails", + uselist=False, + back_populates="user", + cascade="all, delete", + ) + token_blacklist = db.relationship( + "TokenBlacklist", + back_populates="user", + cascade="all, delete", + ) + notification = db.relationship( + "Notification", + back_populates="user", + cascade="all, delete", + ) + session = db.relationship("Session", back_populates="user") def to_dict(self): + # latest_object = max(self.email_verification, key=lambda x: x.created_at) if self.email_verification else None return { "id": self.id, "email_address": self.email_address, "username": self.username, "first_name": self.user_details.first_name if self.user_details else None, "last_name": self.user_details.last_name if self.user_details else None, + "email_verified": self.email_verified, } @staticmethod @@ -51,9 +83,9 @@ def update(self, data): data["username"] if "username" in data else data["email_address"] ) # self.email_verified = data["email_verified"] - # self.username = data["username"] - # self.hash = data["hash"] - # self.created_at = data["created_at"] + + def update_password_reset(self, password_reset_token): + self.password_reset_token = password_reset_token def set_password(self, password: str): """setting bcrypt passwords""" @@ -66,3 +98,33 @@ def check_password(self, password: str): app.bcrypt.generate_password_hash(password).decode("utf-8") is_valid = app.bcrypt.check_password_hash(self.hash, password) return is_valid + + def verify_token(self, token: str) -> bool: + latest_object = ( + max(self.email_verification, key=lambda x: x.created_at) + if self.email_verification + else None + ) + if not latest_object or token != latest_object.token: + return False + current_time = datetime.datetime.now() + datetime_obj = datetime.datetime.fromtimestamp(latest_object.created_at) + formatted_time = datetime_obj.strftime("%Y-%m-%d %H:%M:%S.%f") + created_time = datetime.datetime.strptime( + formatted_time, "%Y-%m-%d %H:%M:%S.%f" + ) + return current_time - created_time < datetime.timedelta(minutes=15) + + def generate_token(self) -> str: + email_verification = model.EmailVerification(self) + db.session.add(email_verification) + db.session.commit() + return email_verification.token + + def change_email(self, email: str): + if email == self.email_address: + return + + self.email_verified = False + self.email_address = email + self.generate_token() diff --git a/modules/__init__.py b/modules/__init__.py index 1bf7a639..78d0903a 100644 --- a/modules/__init__.py +++ b/modules/__init__.py @@ -1 +1,8 @@ -from .etl import config, transforms, vtypes +from .etl import config +from .etl import transforms +from .etl import vtypes + +from .invitation import signin_notification +from .invitation import send_access_contributors +from .invitation import send_invitation_study +from .invitation import send_email_verification diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 7ab320a5..2086e269 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -15,100 +15,104 @@ # Utility Column Groups index_columns: List = [ - "record_id", + "record_id", ] # Data Column Groups data_columns: List = [ - "studyid", - "siteid", - "dm", - "genderid", - "scrsex", - "race", - "race2", - "ethnic", - "dvenvyn", - "dvenvstdat", - "dvenvcrcid", - "dvcgmyn", - "dvcgmstdat", - "dvcgmvrfy", - "dvamwyn", - "dvamwstdat", - "dvamwsn", - "dvrtmthd", - "dvrtnyn", - "dvrtnship", - "mhterm_dm1", - "mhterm_dm2", - "mhterm_predm", - "mh_dm_age", - "mh_a1c", - "cmtrt_a1c", - "cmtrt_insln", - "cmtrt_glcs", - "cmtrt_lfst", - "dricmpdat", + "studyid", + "siteid", + "dm", + "genderid", + "scrsex", + "race", + "race2", + "ethnic", + "dvenvyn", + "dvenvstdat", + "dvenvcrcid", + "dvcgmyn", + "dvcgmstdat", + "dvcgmvrfy", + "dvamwyn", + "dvamwstdat", + "dvamwsn", + "dvrtmthd", + "dvrtnyn", + "dvrtnship", + "mhterm_dm1", + "mhterm_dm2", + "mhterm_predm", + "mh_dm_age", + "mh_a1c", + "cmtrt_a1c", + "cmtrt_insln", + "cmtrt_glcs", + "cmtrt_lfst", + "pacmpdat", +] + +phase_2_columns: List = [ + "race_db", + "export_group", ] computed_columns: List = [ - "phenotypes", - "treatments", - "visitweek", - "visityear", - "visitdate", + "phenotypes", + "treatments", + "visitweek", + "visityear", + "visitdate", ] # Survey Column Groups survey_columns: List = [ - "screening_survey_complete", - "study_enrollment_complete", - "recruitment_survey_complete", - "faq_survey_complete", - "recruitment_survey_management_complete", - "device_distribution_complete", - "preconsent_survey_complete", - "consent_survey_complete", - "staff_consent_attestation_survey_complete", - "demographics_survey_complete", - "health_survey_complete", - "substance_use_survey_complete", - "cesd10_survey_complete", - "paid5_dm_survey_complete", - "diabetes_survey_complete", - "dietary_survey_complete", - "ophthalmic_survey_complete", - "px_sdoh_combined_survey_complete", - "px_food_insecurity_survey_complete", - "px_neighborhood_environment_survey_complete", - "px_racial_ethnic_discrimination_survey_complete", - "decline_participation_survey_complete", - "meds_assessment_complete", - "driving_record_complete", - "physical_assessment_complete", - "bcva_complete", - "photopic_mars_complete", - "mesopic_mars_complete", - "monofilament_complete", - "moca_complete", - "ecg_complete", - "retinal_imaging_v2_complete", - "lab_results_complete", - "device_return_complete", - "specimen_management_complete", - "disposition_complete", - "data_management_complete", + "screening_survey_complete", + "study_enrollment_complete", + "recruitment_survey_complete", + "faq_survey_complete", + "recruitment_survey_management_complete", + "device_distribution_complete", + "preconsent_survey_complete", + "consent_survey_complete", + "staff_consent_attestation_survey_complete", + "demographics_survey_complete", + "health_survey_complete", + "substance_use_survey_complete", + "cesd10_survey_complete", + "paid5_dm_survey_complete", + "diabetes_survey_complete", + "dietary_survey_complete", + "ophthalmic_survey_complete", + "px_sdoh_combined_survey_complete", + "px_food_insecurity_survey_complete", + "px_neighborhood_environment_survey_complete", + "px_racial_ethnic_discrimination_survey_complete", + "decline_participation_survey_complete", + "meds_assessment_complete", + "driving_record_complete", + "physical_assessment_complete", + "bcva_complete", + "photopic_mars_complete", + "mesopic_mars_complete", + "monofilament_complete", + "moca_complete", + "ecg_complete", + "lab_results_complete", + "device_return_complete", + "specimen_management_complete", + "disposition_complete", + "data_management_complete", ] # Repeat Survey Column Groups repeat_survey_columns: List = [ - "current_medications_complete", + "current_medications_complete", ] repeat_survey_data_columns: List = [ - "current_medications_complete", - "current_medications", + "current_medications_complete", + "current_medications", ] # @@ -116,32 +120,37 @@ # survey_instrument_map: Dict[str, str] = { - "2": "Complete", - "1": "Unverified", - "0": "Incomplete", - "": "Value Unavailable", + "2": "Complete", + "1": "Unverified", + "0": "Incomplete", + "": missing_value_generic, } phenotypes_column_map: Dict[str, str] = { - "mhterm_dm2": "Type II Diabetes", - "mhterm_predm": "Prediabetes", - # "mh_a1c": "Elevated A1C", + "mhterm_dm2": "Type II Diabetes", + "mhterm_predm": "Prediabetes", } -# sex_column_map: Dict[str, str] = { -# "M": "Male", -# "F": "Female", -# "I": "Intersex", -# "888": "Other", -# "777": "Prefer not to say", -# } +race_db_map: Dict[str, str] = { + "white": "White", + "black": "Black", + "hispanic": "Hispanic or Latino", + "asian": "Asian", + "unknown": "Unknown", + "": "Value Unavailable", + "Value Unavailable": "Value Unavailable", +} +export_group_map: Dict[str, str] = { + "pilot": "Pilot", + "year2": "Year 2", +} treatments_column_map: Dict[str, str] = { - "cmtrt_a1c": "Oral Medication", - "cmtrt_glcs": "Non-Insulin Injectable", - "cmtrt_insln": "Insulin Injectable", - "cmtrt_lfst": "Lifestyle Management", + "cmtrt_a1c": "Oral Medication", + "cmtrt_glcs": "Non-Insulin Injectable", + "cmtrt_insln": "Insulin Injectable", + "cmtrt_lfst": "Lifestyle Management", } # @@ -149,10 +158,10 @@ # redcap_report_merge_map: List[Tuple[str, Dict[str, Any]]] = [ - ("participant-list", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), - ("participant-values", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), - ("instrument-status", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), - ("repeat-instrument", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("participant-list", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("participant-values", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("instrument-status", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("repeat-instrument", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), ] # @@ -162,151 +171,153 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. redcapLiveTransformConfig: Dict[str, Any] = { - "redcap_data_dir": "storage/release/raw-storage", - "project_metadata": { - "filepath": "AI-READI/REDCap", - "filename": "Redcap_project_metadata.json", + "redcap_data_dir": "storage/release/raw-storage", + "project_metadata": { + "filepath": "AI-READI/REDCap", + "filename": "Redcap_project_metadata.json", + }, + "redcap_api_url": "", + "redcap_api_key": "", + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] + { + "key": "participant-list", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_247884.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [], }, - "redcap_api_url": "", - "redcap_api_key": "", - "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] - { - "key": "participant-list", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_247884.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [], - }, - { - "key": "participant-values", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_242544.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ("remap_values_by_columns", {"columns": data_columns}), - ("map_missing_values_by_columns", {"columns": data_columns}), - ( - "transform_values_by_column", - { - "column": "dricmpdat", - "new_column_name": "visitweek", - # ISO 8601 string format token for front-end: %V - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, - "missing_value": missing_value_generic, - }, - ), - ( - "transform_values_by_column", - { - "column": "dricmpdat", - "new_column_name": "visityear", - # ISO 8601 string format token for front-end: %Y - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, - "missing_value": missing_value_generic, - }, - ), - ( - "transform_values_by_column", - { - "column": "dricmpdat", - "new_column_name": "visitdate", - # ISO 8601 string format token for front-end: %Y - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), - "missing_value": missing_value_generic, - }, - ), - ( - "new_column_from_binary_columns_positive_class", - { - "column_name_map": phenotypes_column_map, - "new_column_name": "phenotypes", - "all_negative_value": "Control", - "default_value": missing_value_generic, - }, - ), - ( - "new_column_from_binary_columns_positive_class", - { - "column_name_map": treatments_column_map, - "new_column_name": "treatments", - "all_negative_value": "No Treatments", - "default_value": missing_value_generic, - }, - ), - ( - "keep_columns", - {"columns": index_columns + data_columns + computed_columns}, - ), - ], - }, - { - "key": "instrument-status", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_251954.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ( - "remap_values_by_columns", - {"columns": survey_columns, "value_map": survey_instrument_map}, - ), - ("map_missing_values_by_columns", {"columns": survey_columns}), - ("keep_columns", {"columns": index_columns + survey_columns}), - ], - }, - { - "key": "repeat-instrument", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_259920.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ("drop_rows", {"columns": repeat_survey_columns}), - ( - "aggregate_repeat_instrument_by_index", - {"aggregator": "max", "dtype": str}, - ), - ( - "keep_columns", - {"columns": index_columns + repeat_survey_data_columns}, - ), - ], - }, - ], - "post_transform_merge": ( - index_columns, redcap_report_merge_map - ), - "post_merge_transforms": [ + { + "key": "participant-values", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_242544.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ("remap_values_by_columns", {"columns": data_columns}), + ("remap_values_by_columns", {"columns": ["export_group"], "value_map": export_group_map}), + ("remap_values_by_columns", {"columns": ["race_db"], "value_map": race_db_map}), + ("map_missing_values_by_columns", {"columns": data_columns + phase_2_columns}), ( - "remap_values_by_columns", - {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visitweek", + # ISO 8601 string format token for front-end: %V + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, + "missing_value": missing_value_generic, + }, ), - ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), - ], - "index_columns": ["record_id"], - "missing_value_generic": missing_value_generic, + ( + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visityear", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, + "missing_value": missing_value_generic, + }, + ), + ( + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visitdate", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), + "missing_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": phenotypes_column_map, + "new_column_name": "phenotypes", + "all_negative_value": "Control", + "default_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": treatments_column_map, + "new_column_name": "treatments", + "all_negative_value": "No Treatments", + "default_value": missing_value_generic, + }, + ), + ( + "keep_columns", + {"columns": index_columns + data_columns + computed_columns + phase_2_columns}, + ), + ], + }, + { + "key": "instrument-status", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_251954.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ( + "remap_values_by_columns", + {"columns": survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": survey_columns}), + ("keep_columns", {"columns": index_columns + survey_columns}), + ], + }, + { + "key": "repeat-instrument", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_259920.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ("drop_rows", {"columns": repeat_survey_columns}), + ( + "aggregate_repeat_instrument_by_index", + {"aggregator": "max", "dtype": str}, + ), + ( + "keep_columns", + {"columns": index_columns + repeat_survey_data_columns}, + ), + ], + }, + ], + "post_transform_merge": ( + index_columns, redcap_report_merge_map + ), + "post_merge_transforms": [ + ( + "remap_values_by_columns", + {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), + ], + "index_columns": ["record_id"], + "missing_value_generic": missing_value_generic, } # @@ -316,149 +327,151 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. redcapReleaseTransformConfig: Dict[str, Any] = { - "redcap_data_dir": "storage/release/raw-storage", - "project_metadata": { - "filepath": "AI-READI/REDCap", - "filename": "Redcap_project_metadata.json", + "redcap_data_dir": "storage/release/raw-storage", + "project_metadata": { + "filepath": "AI-READI/REDCap", + "filename": "Redcap_project_metadata.json", + }, + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] + { + "key": "participant-list", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_247884.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [], }, - "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] - { - "key": "participant-list", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_247884.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [], - }, - { - "key": "participant-values", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_242544.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ("remap_values_by_columns", {"columns": data_columns}), - ("map_missing_values_by_columns", {"columns": data_columns}), - ( - "transform_values_by_column", - { - "column": "dricmpdat", - "new_column_name": "visitweek", - # ISO 8601 string format token for front-end: %V - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, - "missing_value": missing_value_generic, - }, - ), - ( - "transform_values_by_column", - { - "column": "dricmpdat", - "new_column_name": "visityear", - # ISO 8601 string format token for front-end: %Y - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, - "missing_value": missing_value_generic, - }, - ), - ( - "transform_values_by_column", - { - "column": "dricmpdat", - "new_column_name": "visitdate", - # ISO 8601 string format token for front-end: %Y - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), - "missing_value": missing_value_generic, - }, - ), - ( - "new_column_from_binary_columns_positive_class", - { - "column_name_map": phenotypes_column_map, - "new_column_name": "phenotypes", - "all_negative_value": "Control", - "default_value": missing_value_generic, - }, - ), - ( - "new_column_from_binary_columns_positive_class", - { - "column_name_map": treatments_column_map, - "new_column_name": "treatments", - "all_negative_value": "No Treatments", - "default_value": missing_value_generic, - }, - ), - ( - "keep_columns", - {"columns": index_columns + data_columns + computed_columns}, - ), - ], - }, - { - "key": "instrument-status", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_251954.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ( - "remap_values_by_columns", - {"columns": survey_columns, "value_map": survey_instrument_map}, - ), - ("map_missing_values_by_columns", {"columns": survey_columns}), - ("keep_columns", {"columns": index_columns + survey_columns}), - ], - }, - { - "key": "repeat-instrument", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_259920.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ("drop_rows", {"columns": repeat_survey_columns}), - ( - "aggregate_repeat_instrument_by_index", - {"aggregator": "max", "dtype": str}, - ), - ( - "keep_columns", - {"columns": index_columns + repeat_survey_data_columns}, - ), - ], - }, - ], - "post_transform_merge": ( - index_columns, redcap_report_merge_map - ), - "post_merge_transforms": [ + { + "key": "participant-values", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_242544.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ("remap_values_by_columns", {"columns": data_columns}), + ("remap_values_by_columns", {"columns": ["export_group"], "value_map": export_group_map}), + ("remap_values_by_columns", {"columns": ["race_db"], "value_map": race_db_map}), + ("map_missing_values_by_columns", {"columns": data_columns + phase_2_columns}), ( - "remap_values_by_columns", - {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visitweek", + # ISO 8601 string format token for front-end: %V + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, + "missing_value": missing_value_generic, + }, ), - ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), - ], - "index_columns": ["record_id"], - "missing_value_generic": missing_value_generic, + ( + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visityear", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, + "missing_value": missing_value_generic, + }, + ), + ( + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visitdate", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), + "missing_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": phenotypes_column_map, + "new_column_name": "phenotypes", + "all_negative_value": "Control", + "default_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": treatments_column_map, + "new_column_name": "treatments", + "all_negative_value": "No Treatments", + "default_value": missing_value_generic, + }, + ), + ( + "keep_columns", + {"columns": index_columns + data_columns + computed_columns + phase_2_columns}, + ), + ], + }, + { + "key": "instrument-status", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_251954.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ( + "remap_values_by_columns", + {"columns": survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": survey_columns}), + ("keep_columns", {"columns": index_columns + survey_columns}), + ], + }, + { + "key": "repeat-instrument", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_259920.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ("drop_rows", {"columns": repeat_survey_columns}), + ( + "aggregate_repeat_instrument_by_index", + {"aggregator": "max", "dtype": str}, + ), + ( + "keep_columns", + {"columns": index_columns + repeat_survey_data_columns}, + ), + ], + }, + ], + "post_transform_merge": ( + index_columns, redcap_report_merge_map + ), + "post_merge_transforms": [ + ( + "remap_values_by_columns", + {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), + ], + "index_columns": ["record_id"], + "missing_value_generic": missing_value_generic, } @@ -468,3894 +481,4188 @@ # Survey Completions surveyCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "survey-completion-status-by-site", - "strict": True, - "transforms": [ - { - "name": "Demographics Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "demographics_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Health Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "health_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Substance Use Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "substance_use_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "CES-D-10 Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "cesd10_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PAID-5 DM Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "paid5_dm_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Diabetes Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "diabetes_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Dietary Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "dietary_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Opthalmic Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "ophthalmic_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX SDOH Combined Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "px_sdoh_combined_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Food Insecurity Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "px_food_insecurity_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Neighborhood Environment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "px_neighborhood_environment_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "px_racial_ethnic_discrimination_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Medications Assessment", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "meds_assessment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "survey-completion-status-by-site", + "strict": True, + "transforms": [ + { + "name": "Demographics Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "demographics_survey_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "dietary_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Opthalmic Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX SDOH Combined Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Food Insecurity Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Neighborhood Environment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_neighborhood_environment_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Medications Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "meds_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Recruitment Operations recruitmentOperationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "recruitment-operations-status-by-site", - "strict": True, - "transforms": [{ - "name": "Recruitment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "recruitment_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "FAQ Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "faq_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Screening Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "screening_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Preconsent Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "preconsent_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Consent Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "consent_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Staff Consent Attestation Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "staff_consent_attestation_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Study Enrollment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "study_enrollment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Driving Record", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "driving_record_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Device Distribution", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_distribution_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "recruitment-operations-status-by-site", + "strict": True, + "transforms": [{ + "name": "Recruitment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "recruitment_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "FAQ Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "faq_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Screening Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "screening_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Preconsent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "preconsent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Consent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "consent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Staff Consent Attestation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "staff_consent_attestation_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, { - "name": "Data Management Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "data_management_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "name": "Study Enrollment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "study_enrollment_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Driving Record", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "driving_record_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Distribution", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Data Management Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "data_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype Recruitment Counts by Site phenotypeRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-recruitment", - "strict": True, - "transforms": [ - { - "name": "Phenotype Recruitment", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["phenotypes", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-recruitment", + "strict": True, + "transforms": [ + { + "name": "Phenotype Recruitment", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["phenotypes", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype Recruitment Counts by Site phenotypeRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-recruitment-by-site", - "strict": True, - "transforms": [ - { - "name": "Phenotype Recruitment by Site", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["siteid", "phenotypes", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-recruitment-by-site", + "strict": True, + "transforms": [ + { + "name": "Phenotype Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["siteid", "phenotypes", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Phenotype Recruitment Counts by Phase +phenotypeRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Phenotype Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "phenotypes", "visitdate"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race Recruitment Counts by Phase +raceRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "race_db", "visitdate"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) + # Race Recruitment Counts raceRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-recruitment", - "strict": True, - "transforms": [ - { - "name": "Race Recruitment", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["race", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-recruitment", + "strict": True, + "transforms": [ + { + "name": "Race Recruitment", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["race_db", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race Recruitment Counts by Site raceRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-recruitment-by-site", - "strict": True, - "transforms": [ - { - "name": "Race Recruitment by Site", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["siteid", "race", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-recruitment-by-site", + "strict": True, + "transforms": [ + { + "name": "Race Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["siteid", "race_db", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex Recruitment Counts sexRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-recruitment", - "strict": True, - "transforms": [ - { - "name": "Sex Recruitment", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["scrsex", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-recruitment", + "strict": True, + "transforms": [ + { + "name": "Sex Recruitment", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["scrsex", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex Recruitment Counts By Site sexRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-recruitment-by-site", - "strict": True, - "transforms": [ - { - "name": "Sex Recruitment by Site", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["siteid", "scrsex", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-recruitment-by-site", + "strict": True, + "transforms": [ + { + "name": "Sex Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["siteid", "scrsex", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) +# Sex Counts by Phase +sexRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "sex-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Sex Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "scrsex", "visitdate"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + + + # Race & Sex Counts by Race raceSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-sex-by-site", - "strict": True, - "transforms": [ - { - "name": "Race & Sex by Site", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["scrsex", "race", "siteid"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-sex-by-site", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "race_db", "siteid"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Sex Counts by Race phenotypeSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-sex-by-site", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Sex by Site", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["scrsex", "phenotypes", "siteid"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-sex-by-site", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Sex by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "phenotypes", "siteid"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Site Counts by Sex phenotypeSiteBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-site-by-sex", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Site by Sex", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["scrsex", "phenotypes", "siteid"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-site-by-sex", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Site by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "phenotypes", "siteid"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Race Counts by Sex phenotypeRaceBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-race-by-sex", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Race by Sex", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-race-by-sex", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Race by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Phenotype & Race Counts by Phase +phenotypeRaceByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-race-by-phase", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Race by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "export_group"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Sex Counts by Race phenotypeSexByRaceTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-sex-by-race", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Sex by Race", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-sex-by-race", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Sex by Race", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex & Phenotype Counts by Race sexPhenotypeByRaceTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-phenotype-by-race", - "strict": True, - "transforms": [ - { - "name": "Sex & Phenotype by Race", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-phenotype-by-race", + "strict": True, + "transforms": [ + { + "name": "Sex & Phenotype by Race", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex & Race Counts by Phenotype sexRaceByPhenotypeTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-race-by-phenotype", - "strict": True, - "transforms": [ - { - "name": "Sex & Race by Phenotype", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-race-by-phenotype", + "strict": True, + "transforms": [ + { + "name": "Sex & Race by Phenotype", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race & Sex Counts by Phase +raceSexByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-sex-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Phase", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["export_group", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race & Sex Counts by Phenotype raceSexByPhenotypeTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-sex-by-phenotype", - "strict": True, - "transforms": [ - { - "name": "Race & Sex by Phenotype", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-sex-by-phenotype", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Phenotype", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race & Phenotype Counts by Sex racePhenotypeBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-phenotype-by-sex", - "strict": True, - "transforms": [ - { - "name": "Race & Phenotype by Sex", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-phenotype-by-sex", + "strict": True, + "transforms": [ + { + "name": "Race & Phenotype by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race & Phenotype Counts by Phase +racePhenotypeByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-phenotype-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race & Phenotype by Phase", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "export_group"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) currentMedicationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "current-medications-by-site", - "strict": True, - "transforms": [ - { - "name": "Current Medications by Site", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "current_medications", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Current Medication Count", - "field": "current_medications", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Participants (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - } + "simpleTransform", + { + "key": "current-medications-by-site", + "strict": True, + "transforms": [ + { + "name": "Current Medications by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "current_medications", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Current Medication Count", + "field": "current_medications", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Participants (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + } + ], + }, ) # Overview deviceCollectionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "device-collection-status-by-site", - "strict": True, - "transforms": [ - { - "name": "Device Distribution", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_distribution_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "BCVA", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "bcva_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Photopic MARS", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "photopic_mars_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Mesopic MARS", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "mesopic_mars_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Monofilament", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "monofilament_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "ECG Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "ecg_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Lab Results Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "lab_results_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Specimen Management", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "specimen_management_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Device Return", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_return_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "device-collection-status-by-site", + "strict": True, + "transforms": [ + { + "name": "Device Distribution", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "BCVA", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "bcva_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Photopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "photopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Mesopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "mesopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Monofilament", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "monofilament_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "ECG Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ecg_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Lab Results Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "lab_results_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Specimen Management", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "specimen_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Return", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_return_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Overview instrumentCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "instrument-completion-status-by-site", - "strict": True, - "transforms": [ - { - "name": "Recruitment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "recruitment_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "FAQ Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "faq_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Screening Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "screening_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Preconsent Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "preconsent_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Consent Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "consent_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Staff Consent Attestation Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "staff_consent_attestation_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Demographics Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "demographics_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Health Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "health_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Substance Use Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "substance_use_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "CES-D-10 Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "cesd10_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PAID-5 DM Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "paid5_dm_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Diabetes Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "diabetes_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Dietary Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "dietary_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Opthalmic Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "ophthalmic_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX SDOH Combined Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "px_sdoh_combined_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Food Insecurity Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "px_food_insecurity_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Neighborhood Environment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "px_neighborhood_environment_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "px_racial_ethnic_discrimination_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Decline Participation Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "decline_participation_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Study Enrollment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "study_enrollment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Driving Record", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "driving_record_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Device Distribution", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_distribution_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Medications Assessment", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "meds_assessment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Physical Assessment", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "physical_assessment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Physical Assessment", - "field": "physical_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Physical Assessment", - "field": "physical_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "BCVA", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "bcva_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Photopic MARS", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "photopic_mars_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Mesopic MARS", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "mesopic_mars_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Monofilament", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "monofilament_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "MOCA", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "moca_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "MOCA", - "field": "moca_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "MOCA", - "field": "moca_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "ECG Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "ecg_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Lab Results Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "lab_results_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Specimen Management", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "specimen_management_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Device Return", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_return_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Disposition Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "disposition_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Disposition Survey", - "field": "disposition_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Disposition Survey", - "field": "disposition_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Data Management Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "data_management_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "instrument-completion-status-by-site", + "strict": True, + "transforms": [ + { + "name": "Recruitment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "recruitment_survey_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "FAQ Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "faq_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Screening Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "screening_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Preconsent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "preconsent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Consent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "consent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Staff Consent Attestation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "staff_consent_attestation_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Demographics Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "demographics_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "dietary_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Opthalmic Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX SDOH Combined Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Food Insecurity Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Neighborhood Environment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_neighborhood_environment_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Decline Participation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "decline_participation_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Study Enrollment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "study_enrollment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Driving Record", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "driving_record_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Distribution", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Medications Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "meds_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Physical Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "physical_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "BCVA", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "bcva_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Photopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "photopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Mesopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "mesopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Monofilament", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "monofilament_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "MOCA", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "moca_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "MOCA", + "field": "moca_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "MOCA", + "field": "moca_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "ECG Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ecg_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Lab Results Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "lab_results_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Specimen Management", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "specimen_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Return", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_return_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Disposition Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "disposition_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Disposition Survey", + "field": "disposition_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Disposition Survey", + "field": "disposition_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Data Management Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "data_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Overview surveyCompletionStatusTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "instrument-completion-status", - "strict": True, - "transforms": [ - { - "name": "Demographics Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["demographics_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Health Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["health_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Substance Use Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["substance_use_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "CES-D-10 Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["cesd10_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PAID-5 DM Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["paid5_dm_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Diabetes Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["diabetes_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Dietary Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["dietary_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Opthalmic Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["ophthalmic_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX SDOH Combined Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["px_sdoh_combined_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Food Insecurity Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["px_food_insecurity_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Neighborhood Environment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["px_neighborhood_environment_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["px_racial_ethnic_discrimination_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Medications Assessment", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["meds_assessment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "instrument-completion-status", + "strict": True, + "transforms": [ + { + "name": "Demographics Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["demographics_survey_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["dietary_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Opthalmic Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX SDOH Combined Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Food Insecurity Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Neighborhood Environment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["px_neighborhood_environment_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["px_racial_ethnic_discrimination_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Medications Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["meds_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) moduleTransformConfigs: Dict[str, Any] = { - "device-collection-status-by-site": deviceCollectionStatusBySiteTransformConfig, - "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, - "survey-completion-status": surveyCompletionStatusTransformConfig, - "survey-completion-status-by-site": surveyCompletionStatusBySiteTransformConfig, - "recruitment-operations-status-by-site": recruitmentOperationsBySiteTransformConfig, - "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, - "phenotype-site-by-sex": phenotypeSiteBySexTransformConfig, - "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, - "phenotype-sex-by-race": phenotypeSexByRaceTransformConfig, - "race-phenotype-by-sex": racePhenotypeBySexTransformConfig, - "race-sex-by-phenotype": raceSexByPhenotypeTransformConfig, - "sex-phenotype-by-race": sexPhenotypeByRaceTransformConfig, - "sex-race-by-phenotype": sexRaceByPhenotypeTransformConfig, - "phenotype-recruitment": phenotypeRecruitmentTransformConfig, - "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, - "race-recruitment": raceRecruitmentTransformConfig, - "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, - "sex-recruitment": sexRecruitmentTransformConfig, - "sex-recruitment-by-site": sexRecruitmentBySiteTransformConfig, - "race-sex-by-site": raceSexBySiteTransformConfig, - "current-medications-by-site": currentMedicationsBySiteTransformConfig, + "device-collection-status-by-site": deviceCollectionStatusBySiteTransformConfig, + "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, + "survey-completion-status": surveyCompletionStatusTransformConfig, + "survey-completion-status-by-site": surveyCompletionStatusBySiteTransformConfig, + "recruitment-operations-status-by-site": recruitmentOperationsBySiteTransformConfig, + "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, + "phenotype-site-by-sex": phenotypeSiteBySexTransformConfig, + "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, + "phenotype-race-by-phase": phenotypeRaceByPhaseTransformConfig, + "phenotype-sex-by-race": phenotypeSexByRaceTransformConfig, + "race-phenotype-by-sex": racePhenotypeBySexTransformConfig, + "race-phenotype-by-phase": racePhenotypeByPhaseTransformConfig, + "race-sex-by-phase": raceSexByPhaseTransformConfig, + "race-sex-by-phenotype": raceSexByPhenotypeTransformConfig, + "sex-phenotype-by-race": sexPhenotypeByRaceTransformConfig, + "sex-race-by-phenotype": sexRaceByPhenotypeTransformConfig, + "phenotype-recruitment": phenotypeRecruitmentTransformConfig, + "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, + "phenotype-recruitment-by-phase": phenotypeRecruitmentByPhaseTransformConfig, + "race-recruitment": raceRecruitmentTransformConfig, + "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, + "race-recruitment-by-phase": raceRecruitmentByPhaseTransformConfig, + "sex-recruitment": sexRecruitmentTransformConfig, + "sex-recruitment-by-site": sexRecruitmentBySiteTransformConfig, + "sex-recruitment-by-phase": sexRecruitmentByPhaseTransformConfig, + "race-sex-by-site": raceSexBySiteTransformConfig, + "current-medications-by-site": currentMedicationsBySiteTransformConfig, } diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index b568aa4b..679bc096 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -1,11 +1,11 @@ # Library Modules from typing import Any, Callable, Union, List, Dict, Tuple from datetime import datetime -import logging, re, copy +import logging, copy, os import modules.etl.vtypes as vtypes # Third-Party Modules -import pandas as pd +import polars as pl class ModuleTransform(object): @@ -14,9 +14,6 @@ def __init__( config: Dict[str, Any], logging_config: Dict[str, str] = {}, ) -> None: - # - # Logging - # # Logging Config Checks self.logging_config = ( @@ -56,22 +53,21 @@ def __init__( raise ValueError( f"ModuleTransform argument transforms in config must be a list or dict type" ) - elif len(self.transforms) < 1: + if len(self.transforms) < 1: self.valid = False raise ValueError( f"ModuleTransform instantiation missing transforms in config argument" ) - else: - # Transform attribute is there and has one of the correct types (list, dict) - pass # Normalize Transforms to List Type, Check Validity, and Warn on Missing Attributes + valid = True for indexed_transform in enumerate(self.transforms): - self.valid = True if self._transformIsValid(indexed_transform) else False - if self.strict and not self.valid: - raise ValueError( - f"{self.key}:Missing properties in transforms argument, see log at {self.logging_config['filename']} for details" - ) + if self.strict and not self._transformIsValid(indexed_transform): + valid = False + raise ValueError( + f"{self.key}:Missing properties in transforms argument, see log at {self.logging_config['filename']} for details" + ) + self.valid = valid self.logger.info(f"{self.key}:Initialized") @@ -161,40 +157,37 @@ def _setValueType( return pvalue - def simpleTransform(self, df: pd.DataFrame) -> object: + def simpleTransform(self, df: pl.DataFrame) -> object: """ - Performs a pd.DataFrame.groupby transform. The - df is first subset to the relevant fields. A - groupby function is then applied to the subset - to create a multi-index (hierarchy) by the - groups. An aggregate function is then applied - to the non-grouped column (e.g. count, sum). - - One transform for one VType. A single - visualization is then rendered to a single - visualization module. + Performs a group_by transform. The df is first subset to the + relevant fields. A group_by function is then applied to the + subset. """ self.transformed = [] transform: Dict[str, Any] = ( self.transforms.pop() - ) # simple transforms have only one transform object + ) # simple transforms have only one transform object name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), transform["methods"], transform["accessors"], ) + if vtype.isvalid(df, accessors): - temp = df[ - list(set(accessor["field"] for key, accessor in accessors.items())) - ] + + # Select and Group + cols_to_select = list(set(accessor["field"] for key, accessor in accessors.items())) + temp = df.select(cols_to_select) for method in methods: groups, value, func = method["groups"], method["value"], method["func"] - grouped = temp.groupby(groups, as_index=False) - temp = getattr(grouped, func)() - transformed = temp + temp = temp.group_by(groups).agg( + getattr(pl.all().exclude(groups), func)() + ) - for record in transformed.to_dict("records"): + # Row-wise Transformation + transformed = temp + for record in transformed.to_dicts(): record = { key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() @@ -211,20 +204,10 @@ def simpleTransform(self, df: pd.DataFrame) -> object: return self - def compoundTransform(self, df: pd.DataFrame) -> object: + def compoundTransform(self, df: pl.DataFrame) -> object: """ - For each transform, performs a pd.DataFrame.groupby - transform. The df is first subset to the relevant - fields. A groupby function is then applied to the - subset to create a multi-index (hierarchy) by the - groups. An aggregate function is then applied to the - non-grouped column (e.g. count, sum). - - All transforms are combined into a single flat - transform. Transforms must be identical VType, - (e.g. [transformA, transformB, ...]). A single - (aggregated) visualization is then rendered to - a single visualization module. + For each transform, performs a group_by transform. + All transforms are combined into a single flat transform list. """ self.transformed = [] @@ -236,20 +219,23 @@ def compoundTransform(self, df: pd.DataFrame) -> object: transform["accessors"], ) if vtype.isvalid(df, accessors): - temp = df[ - list(set(accessor["field"] for key, accessor in accessors.items())) - ] + + # Select and Group + cols_to_select = list(set(accessor["field"] for key, accessor in accessors.items())) + temp = df.select(cols_to_select) for method in methods: groups, value, func = ( method["groups"], method["value"], method["func"], ) - grouped = temp.groupby(groups, as_index=False) - temp = getattr(grouped, func)() - transformed = temp + temp = temp.group_by(groups).agg( + getattr(pl.all().exclude(groups), func)() + ) - for record in transformed.to_dict("records"): + # Row-wise Transformation + transformed = temp + for record in transformed.to_dicts(): record = { key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() @@ -266,20 +252,10 @@ def compoundTransform(self, df: pd.DataFrame) -> object: return self - def mixedTransform(self, df: pd.DataFrame) -> object: + def mixedTransform(self, df: pl.DataFrame) -> object: """ - For each transform, performs a pd.DataFrame.groupby - transform. The df is first subset to the relevant - fields. A groupby function is then applied to the - subset to create a multi-index (hierarchy) by the - groups. An aggregate function is then applied to the - non-grouped column (e.g. count, sum). - - Transforms are kept distinct and inserted into a - dictionary, e.g. {nameA: transformA, nameB: transformB, - ...}. Transforms can be heterogenous VTypes. - Multiple visualizations are then rendered in the same - visualization module. + For each transform, performs a group_by transform. + Transforms are kept distinct and inserted into a dictionary by name. """ self.transformed = {} for transform in self.transforms: @@ -290,21 +266,24 @@ def mixedTransform(self, df: pd.DataFrame) -> object: transform["accessors"], ) if vtype.isvalid(df, accessors): - temp = df[ - list(set(accessor["field"] for key, accessor in accessors.items())) - ] + + # Select and Group + cols_to_select = list(set(accessor["field"] for key, accessor in accessors.items())) + temp = df.select(cols_to_select) for method in methods: groups, value, func = ( method["groups"], method["value"], method["func"], ) - grouped = temp.groupby(groups, as_index=False) - temp = getattr(grouped, func)() - transformed = temp + temp = temp.group_by(groups).agg( + getattr(pl.all().exclude(groups), func)() + ) + # Row-wise Transformation + transformed = temp subtransform = [] - for record in transformed.to_dict("records"): + for record in transformed.to_dicts(): record = { key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() diff --git a/modules/etl/transforms/redcap_live_transform.py b/modules/etl/transforms/redcap_live_transform.py index 8079547a..be0cf6ea 100644 --- a/modules/etl/transforms/redcap_live_transform.py +++ b/modules/etl/transforms/redcap_live_transform.py @@ -1,69 +1,69 @@ # Library Modules -from typing import Any, Callable, Union, List, Dict, Tuple -import re, os, csv, json, logging +from typing import Any, Callable, Union, List, Dict, Tuple, Literal +import re, os, logging, copy # Third Party Modules from redcap import Project -import pandas as pd +import polars as pl import numpy as np - class RedcapLiveTransform(object): - def __init__(self, config: dict) -> None: - - print("REDCap Live Transform") + def __init__(self, config: dict): # # Config # + # + self.config = copy.deepcopy(config) + # Get CWD self.cwd = os.getcwd() # REDCap API Config - self.redcap_api_url = config["redcap_api_url"] - self.redcap_api_key = config["redcap_api_key"] + self.redcap_api_url = self.config["redcap_api_url"] + self.redcap_api_key = self.config["redcap_api_key"] # Data Config self.index_columns = ( - config["index_columns"] if "index_columns" in config else ["record_id"] + self.config["index_columns"] if "index_columns" in self.config else ["record_id"] ) # REDCap Reports Config - self.reports_configs = config["reports"] if "reports" in config else [] + self.reports_configs = self.config["reports"] if "reports" in self.config else [] # Report Merging self.post_transform_merge = ( - config["post_transform_merge"] - if "post_transform_merge" in config + self.config["post_transform_merge"] + if "post_transform_merge" in self.config else ([], []) ) # Post Merge Transforms self.post_merge_transforms = ( - config["post_merge_transforms"] if "post_merge_transforms" in config else [] + self.config["post_merge_transforms"] if "post_merge_transforms" in self.config else [] ) # Column Value Separator self.multivalue_separator = ( - config["multivalue_separator"] if "multivalue_separator" in config else "|" + self.config["multivalue_separator"] if "multivalue_separator" in self.config else "|" ) # CSV Float Format (Default: "%.2f") self.csv_float_format = ( - config["csv_float_format"] if "csv_float_format" in config else "%.2f" + self.config["csv_float_format"] if "csv_float_format" in self.config else "%.2f" ) self.missing_value_generic = ( - config["missing_value_generic"] - if "missing_value_generic" in config + self.config["missing_value_generic"] + if "missing_value_generic" in self.config else "Value Unavailable" ) # Logging Config self.logging_config = ( - config["logging_config"] - if "logging_config" in config + self.config["logging_config"] + if "logging_config" in self.config else { "encoding": "utf-8", "filename": "REDCapETL.log", @@ -73,7 +73,7 @@ def __init__(self, config: dict) -> None: # Configure Logging logging.basicConfig(**self.logging_config) - self.logger = logging.getLogger("RedcapTransform") + self.logger = logging.getLogger("RedcapTransform:Live") # # REDCap Parsing Variables @@ -94,9 +94,10 @@ def __init__(self, config: dict) -> None: } # General Parsing Variables + # Note: Polars handles nulls differently (null vs NaN). + # We map standard "empty" markers to the generic missing value. self.none_values = [ np.nan, - pd.NaT, None, "nan", "NaN", @@ -105,26 +106,9 @@ def __init__(self, config: dict) -> None: self.missing_value_generic, ] self.none_map = {key: self.missing_value_generic for key in self.none_values} - self.survey_instrument_map = { - "2": "Complete", - "1": "Unverified", - "0": "Incomplete", - "": self.missing_value_generic, - } self.logger.info(f"Initialized") - # - # PyCap Initialization - # - - # Initialize PyCap Objects - self.logger.info(f"Retrieving REDCap project data") - self.project = Project(self.redcap_api_url, self.redcap_api_key) - - # Load REDCap Project Metadata - self.metadata = self.project.export_metadata() - # # Setup Reports & Apply Transforms # @@ -136,85 +120,104 @@ def __init__(self, config: dict) -> None: "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "", + "csv_delimiter": ",", } - # Get & Structure Report + + self.project: Any = None + self.reports: Dict[str, Any] = {} + self.merged: pl.DataFrame = pl.DataFrame([]) + + def run(self): + """ + Execute ETL + """ + + # + # PyCap Initialization + # + + # Initialize PyCap Objects + self.logger.info(f"Retrieving REDCap project data") + self.project = Project(self.redcap_api_url, self.redcap_api_key) + + # Load REDCap Project Metadata + self.metadata: Any = self.project.export_metadata() + self.logger.info(f"API Request to metadata received metadata with length: {len(self.metadata)}") + self.logger.info(f"Retrieving Live REDCap reports") - self.reports = {} for report_config in self.reports_configs: # Get Report report_key = report_config["key"] report_kwdargs = report_config["kwdargs"] | self._default_report_kwdargs report_transforms = report_config["transforms"] - report = self.project.export_report(**report_kwdargs) - pd.DataFrame(report, dtype = str).to_csv(f"~/Downloads/etl-redcap-export-live-{report_kwdargs['report_id']}") + + # PyCap returns a list of dicts by default. + report_data: Any = self.project.export_report(**report_kwdargs) + self.logger.info(f"API Request to report {report_key} received data with length: {len(report_data)}") + + # Convert to Polars ensuring all columns are input as UTF8 Strings + if not report_data: + df = pl.DataFrame([]) + else: + # Calculate schema to force Utf8 to prevent type inference issues on ragged data + schema = {key: pl.Utf8 for key in set().union(*(d.keys() for d in report_data))} + df = pl.from_dicts(report_data, schema=schema) + # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], - "df": pd.DataFrame(report, dtype = str), + "df": df.rechunk(), "transforms": report_transforms, "transformed": None, - "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), + "annotation": self._get_redcap_type_metadata(df), } - # Apply Pre-Merge Report Transforms - self.logger.info(f"Applying REDCap report transforms") - for report_key, report_object in self.reports.items(): - self._apply_report_transforms(report_key) - - # Merge Reports - self.logger.info(f"Merging REDCap reports") - index_columns, merge_steps = self.post_transform_merge - self.merged = self._merge_reports(index_columns, merge_steps) - - # Apply Post-Merge Transforms - self.logger.info(f"Applying REDCap report post-merge transforms") - for transform, transform_kwdargs in self.post_merge_transforms: - self.merged = self.apply_transform( - self.merged, transform, transform_kwdargs - ) + try: - self.logger.info(f"REDCap transforms complete") + # Apply Pre-Merge Report Transforms + self.logger.info(f"Applying REDCap report transforms") + for report_key, report_object in self.reports.items(): + self._apply_report_transforms(report_key) - return + # Merge Reports + self.logger.info(f"Merging REDCap reports") + index_columns, merge_steps = self.post_transform_merge + self.merged = self._merge_reports(index_columns, merge_steps) + + # Apply Post-Merge Transforms + self.logger.info(f"Applying REDCap report post-merge transforms") + for transform, transform_kwdargs in self.post_merge_transforms: + self.merged = self.apply_transform( + self.merged, transform, transform_kwdargs + ) + + self.logger.info(f"REDCap transforms complete") + + except Exception as error: + self.logger.error(error) + self.logger.error("An error occurred during REDCap ETL. See above stacktrace.") + + return self # # Getters # def get_report_id(self, report_key: str) -> str: - """ - Returns a str instance of the REDCap report ID. - """ return self.reports[report_key]["id"] - def get_report_df(self, report_key: str) -> pd.DataFrame: - """ - Returns a pd.DataFrame instance containing the report. - """ + def get_report_df(self, report_key: str) -> pl.DataFrame: return self.reports[report_key]["df"] - def get_report_transformed_df(self, report_key: str) -> pd.DataFrame: - """ - Returns a pd.DataFrame instance containing the report - with normalization transforms applied. - """ + def get_report_transformed_df(self, report_key: str) -> pl.DataFrame: return self.reports[report_key]["transformed"] def get_report_transforms( self, report_key: str ) -> List[Tuple[str, Dict[str, Any]]]: - """ - Returns a list of transforms that will be applied to - the report - """ return self.reports[report_key]["transforms"] def get_report_annotations(self, report_key: str) -> List[Dict[str, Any]]: - """ - Returns a list of annotations generated from the - REDCap metadata API call. - """ return self.reports[report_key]["annotations"] # @@ -225,25 +228,41 @@ def _merge_reports( self, index_columns: List[str], merge_steps: List[Tuple[str, Dict[str, Any]]], - ) -> pd.DataFrame: - """ - Performs N - 1 merge transforms on N reports. - """ + ) -> pl.DataFrame: receiving_report_key, _ = merge_steps[0] - df_receiving_report = self.reports[receiving_report_key]["transformed"][ + df_receiving_report = self.reports[receiving_report_key]["transformed"].select( index_columns - ] + ) if len(merge_steps) > 0: - for providing_report_key, merge_kwdargs in merge_steps: + for merge_step in merge_steps: + providing_report_key, merge_kwdargs = merge_step df_providing_report = self.reports[providing_report_key]["transformed"] - df_receiving_report = df_receiving_report.merge( - df_providing_report, **merge_kwdargs + + # Map Pandas merge args to Polars join args + how = merge_kwdargs.get("how", "inner") + + # Handle on/left_on/right_on + on = merge_kwdargs.get("on", None) + left_on = merge_kwdargs.get("left_on", None) + right_on = merge_kwdargs.get("right_on", None) + + if not on and not left_on: + on = index_columns + df_receiving_report, df_providing_report = df_receiving_report.rechunk(), df_providing_report.rechunk() + df_receiving_report = df_receiving_report.join( + df_providing_report, + on=on, + left_on=left_on, + right_on=right_on, + how=how, + suffix=merge_kwdargs.get("suffixes", ("_x", "_y"))[1] + if "suffixes" in merge_kwdargs else "_right" ) else: self.logger.warn( - f"Unable to Merge – No merge steps provided, returning receiving_report pd.DataFrame." + f"Unable to Merge – No merge steps provided, returning receiving_report pl.DataFrame." ) return df_receiving_report @@ -252,30 +271,25 @@ def _merge_reports( # Transform Applicator # - # Applies Declared Transforms to Reports def _apply_report_transforms(self, report_key: str) -> None: - """ - Interal method that applies the transforms to each - report as an idempotent transform stack. - """ report = self.reports[report_key] annotation = report["annotation"] - report["transformed"] = report["df"] + # Clone to avoid mutating original reference + report["transformed"] = report["df"].clone() for transform in report["transforms"]: transform_name, transform_kwdargs = transform transform_kwdargs = transform_kwdargs | {"annotation": annotation} report["transformed"] = self.apply_transform( report["transformed"], transform_name, transform_kwdargs ) - return def apply_transform( self, - df: pd.DataFrame, + df: pl.DataFrame, transform_name: str, transform_kwdargs: Dict[str, Any] = {}, - ) -> pd.DataFrame: + ) -> pl.DataFrame: return getattr(self, f"_{transform_name}")(df, **transform_kwdargs) # @@ -288,18 +302,16 @@ def apply_transform( def _drop_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df = df.drop(columns=columns) + if columns: + df = df.drop(columns) return df - def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: - """ - Drop columns from pd.DataFrame. - """ + def drop_columns(self, df: pl.DataFrame, columns: List[str]) -> pl.DataFrame: return self._drop_columns(df=df, columns=columns) # @@ -308,21 +320,15 @@ def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: def _keep_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - columns = list( - set(df.columns) - - set(self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns)) - ) - df = df.drop(columns=columns) + ) -> pl.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns) + df = df.select(columns) return df - def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: - """ - Keep only selected columns in pd.DataFrame. - """ + def keep_columns(self, df: pl.DataFrame, columns: List[str]) -> pl.DataFrame: return self._keep_columns(df=df, columns=columns) # @@ -331,33 +337,24 @@ def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: def _append_column_suffix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], suffix: str = "", separator: str = "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df[columns] = df[columns].rename( - mapper=lambda name: f"{name}{separator}{suffix}" - ) + rename_map = {col: f"{col}{separator}{suffix}" for col in columns} + df = df.rename(rename_map) return df def append_column_suffix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], suffix: str = "", separator: str = "", - ) -> pd.DataFrame: - """ - Append a suffix to columns of pd.DataFrame. Note: If no - columns parameter is provided, the suffix is applied every - column. If no suffix is provided, the column names remain - unchanged. A separator argument allows for the expansion - of column names by one or more characters, e.g. "_" for - snakecase. - """ + ) -> pl.DataFrame: return self._append_column_suffix( df=df, columns=columns, suffix=suffix, separator=separator ) @@ -368,33 +365,24 @@ def append_column_suffix( def _prepend_column_prefix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], prefix: str = "", separator: str = "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df[columns] = df[columns].rename( - mapper=lambda name: f"{prefix}{separator}{name}" - ) + rename_map = {col: f"{prefix}{separator}{col}" for col in columns} + df = df.rename(rename_map) return df def prepend_column_prefix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], prefix: str = "", separator: str = "", - ) -> pd.DataFrame: - """ - Append a prefix to columns of pd.DataFrame. Note: If no - columns parameter is provided, the prefix is applied every - column. If no prefix is provided, the column names remain - unchanged. A separator argument allows for the expansion - of column names by one or more characters, e.g. "_" for - snakecase. - """ + ) -> pl.DataFrame: return self._prepend_column_prefix( df=df, columns=columns, prefix=prefix, separator=separator ) @@ -405,12 +393,12 @@ def prepend_column_prefix( def _remap_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - # Resolve Mappable Fields and Available Value Maps + ) -> pl.DataFrame: + # Resolve Mappable Fields columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) mappable_fields: List[Dict[str, Any]] @@ -425,52 +413,44 @@ def _remap_values_by_columns( if len(field["options"]) > 0 and field["name"] in columns ] + # Vectorized Re-mapping + expressions = [] + for mappable_field in mappable_fields: - column, value_map = mappable_field["name"], mappable_field["options"] - for i, value in enumerate(df[column]): - subvalues = [ - subvalue.strip() - for subvalue in str(value).split(",") - if len(subvalue) > 0 - ] - df.loc[i, column] = self.multivalue_separator.join( - [ - value_map[subvalue] - for subvalue in subvalues - if subvalue in value_map.keys() - ] + column_name = mappable_field["name"] + mapping_options = mappable_field["options"] + + # Ensure keys in mapping are strings for replacement + str_mapping = {str(k): str(v) for k, v in mapping_options.items()} + + # 1. Split string by comma (handling potential multivalue fields) + # 2. Replace values in the list using the mapping (default to original if not found) + # 3. Join back with the configured separator + expr = ( + pl.col(column_name) + .cast(pl.Utf8) # Ensure string for splitting + .str.split(",") + .list.eval( + pl.element() + .str.strip_chars() # Strip whitespace from CSV parsing "1, 2" -> "2" + .replace(str_mapping, default=pl.element()) ) + .list.join(self.multivalue_separator) + .alias(column_name) + ) + expressions.append(expr) + + if expressions: + df = df.with_columns(expressions) return df def remap_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, - ) -> pd.DataFrame: - """ - Remap values by column using a list of annotations. - Each annotation is a dictionary containing a the - following keys: "name", "type", and "options". Key - to this method are then "name" and "options" entries. - The value of the "name" corresponds to the - pd.DataFrame column name. The value of the"options" - entry is a value_map object generated from the - REDCapo metadata API request: - - annotation = { - "name": field["field_name"], - "type": field["field_type"], - "options": field["field_options"] - } - - If multiple values are found in the field, they will - be mapped with a separator. The default separator is - a pipe (i.e. "|"). - - Returns a transformed pd.DataFrame - """ + ) -> pl.DataFrame: return self._remap_values_by_columns( df=df, columns=columns, value_map=value_map ) @@ -481,31 +461,55 @@ def remap_values_by_columns( def _transform_values_by_column( self, - df: pd.DataFrame, + df: pl.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - df[new_column_name] = df.loc[df[column] != missing_value, column].apply( - transform + ) -> pl.DataFrame: + # In Polars, using an arbitrary python callable (lambda) via map_elements + # is the equivalent of pandas apply. + + # FIX: The user's transform lambda (e.g., date functions) might return Integers/Floats. + # Polars map_elements with return_dtype=pl.Utf8 strictly enforces string returns. + # We wrap the transform in a helper that forces string conversion before returning to Polars. + + def safe_string_transform(val): + # If the value coming in is our known missing value, return it immediately + if val == str(missing_value): + return str(missing_value) + try: + # Apply user transform + result = transform(val) + # Force cast to string to satisfy pl.Utf8 return type + return str(result) if result is not None else str(missing_value) + except Exception: + # If transformation fails (e.g. date parse error), return missing value + return str(missing_value) + + df = df.with_columns( + pl.when(pl.col(column) != str(missing_value)) + .then( + pl.col(column).map_elements(safe_string_transform, return_dtype=pl.Utf8) + ) + .otherwise(pl.lit(str(missing_value))) + .alias(new_column_name) ) - df[new_column_name] = df[new_column_name].fillna(missing_value) + + # Ensure no actual nulls slip through + df = df.with_columns(pl.col(new_column_name).fill_null(str(missing_value))) + return df def transform_values_by_column( self, - df: pd.DataFrame, + df: pl.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, - ) -> pd.DataFrame: - """ - Replace 0-length values or values with keys in - self.none_map with self.missing_value_generic. - """ + ) -> pl.DataFrame: return self._transform_values_by_column( df=df, column=column, @@ -520,31 +524,44 @@ def transform_values_by_column( def _map_missing_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], missing_value: Any = None, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) missing_value = ( missing_value if missing_value is not None else self.missing_value_generic ) - for column in columns: - for i, value in enumerate(df[column]): - if (len(str(value)) == 0) or (value in self.none_map.keys()): - df.loc[i, column] = missing_value - else: - continue + + # Vectorized update + expressions = [] + none_keys = list(self.none_map.keys()) + + for col_name in columns: + # Check for null, empty string, or "nan"/"NaN" string matches + is_missing = ( + pl.col(col_name).is_null() | + (pl.col(col_name) == "") | + (pl.col(col_name).is_in([str(k) for k in none_keys])) + ) + + expr = ( + pl.when(is_missing) + .then(pl.lit(str(missing_value))) # Ensure literal is string + .otherwise(pl.col(col_name)) + .alias(col_name) + ) + expressions.append(expr) + + if expressions: + df = df.with_columns(expressions) return df def map_missing_values_by_columns( - self, df: pd.DataFrame, columns: List[str], missing_value: Any - ) -> pd.DataFrame: - """ - Replace 0-length values or values with keys in - self.none_map with self.missing_value_generic. - """ + self, df: pl.DataFrame, columns: List[str], missing_value: Any + ) -> pl.DataFrame: return self._map_missing_values_by_columns( df=df, columns=columns, missing_value=missing_value ) @@ -559,67 +576,92 @@ def map_missing_values_by_columns( def _drop_rows( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], - condition: Callable = lambda column: column == "", + condition: Callable[[str], pl.Expr] = lambda col_name: pl.col(col_name) == "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df = df[~df[columns].apply(lambda column: column.apply(condition)).any(axis=1)] + if not columns: + return df + expressions = [condition(col) for col in columns] + mask = pl.any_horizontal(expressions).fill_null(False) + df = df.filter(~mask) + return df def drop_rows( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], - condition: Callable = lambda column: column == "", - ) -> pd.DataFrame: - """ - Drop rows from pd.DataFrame. - """ - return self._drop_rows(df=df, columns=columns) + condition: Callable[[str], pl.Expr] = lambda col_name: pl.col(col_name) == "", + ) -> pl.DataFrame: + return self._drop_rows(df=df, columns=columns, condition=condition) # # Transforms - Aggregation # - # ... - # # Transforms - Aggregate Repeat Instruments by Index # def _aggregate_repeat_instrument_by_index( self, - df: pd.DataFrame, - aggregator: str = "max", + df: pl.DataFrame, + aggregator: Literal['min', 'max', 'first', 'last', 'sum', 'mean', 'median', 'len'] | pl.Expr | None = "max", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - new_columns = df["redcap_repeat_instrument"].unique() - pivot = pd.pivot_table( - df, - index=self.index_columns, - columns=["redcap_repeat_instrument"], + ) -> pl.DataFrame: + + # Check if repeat instrument exists + if "redcap_repeat_instrument" not in df.columns: + return df + + df = df.filter( + pl.col("redcap_repeat_instrument").is_not_null() & + pl.all_horizontal(pl.col(c).is_not_null() for c in self.index_columns) + ) + + # Create the pivoted dataframe + df = df.rechunk() # Avoid Polars/Rust race condition + pivot_df = df.pivot( values="redcap_repeat_instance", - aggfunc=aggregator, - fill_value=self.missing_value_generic, + index=self.index_columns, + on="redcap_repeat_instrument", + aggregate_function=aggregator ) - df = df.merge(pivot, how="outer", on=self.index_columns) - df = df.drop_duplicates(self.index_columns, keep="first") + + # The pivot might introduce nulls, fill with missing generic + pivot_df = pivot_df.fill_null(self.missing_value_generic) + + # Merge back to original (outer join) + df_unique = df.unique(subset=self.index_columns, keep="first") + df_unique.rechunk() + df = df_unique.join(pivot_df, on=self.index_columns, how="left") + + # Cast new columns (all columns in pivot_df except index) + new_columns = [c for c in pivot_df.columns if c not in self.index_columns] + + # Map python types to Polars types + pl_type = pl.Float64 if dtype is float else pl.Int64 if dtype is int else pl.Utf8 + if dtype is int: pl_type = pl.Int64 + for column in new_columns: - df[column] = df[column].astype(dtype) + df = df.with_columns( + pl.when(pl.col(column) == self.missing_value_generic) + .then(None) + .otherwise(pl.col(column)) + .cast(pl_type, strict=False) + .alias(column) + ) + return df def aggregate_repeat_instrument_by_index( - self, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + self, df: pl.DataFrame, aggregator: Literal['min', 'max', 'first', 'last', 'sum', 'mean', 'median', 'len'] | pl.Expr | None = "max", dtype: Callable = float + ) -> pl.DataFrame: return self._aggregate_repeat_instrument_by_index( df=df, aggregator=aggregator, dtype=dtype ) @@ -630,87 +672,103 @@ def aggregate_repeat_instrument_by_index( def _new_column_from_binary_columns_positive_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", all_negative_value: str = "", default_value: str | None = "Value Unavailable", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = "" - for column_name, column_value in column_name_map.items(): - df.loc[ - df[column_name] == "Yes", new_column_name - ] += f"{column_value}{self.multivalue_separator}" - for column_name, column_value in column_name_map.items(): - df.loc[ - (df[column_name] == default_value) & (df[new_column_name] == ""), - new_column_name, - ] = default_value - df.loc[df[new_column_name] == "", new_column_name] = all_negative_value - # Remove delimiter character if column ends with it - rgx = f"\\{self.multivalue_separator}$" - df[new_column_name] = df[new_column_name].str.replace(rgx, "", regex=True) + + # Build a list of expressions: If Col == Yes then "Label|" else "" + concat_exprs = [] + for col_name, label in column_name_map.items(): + concat_exprs.append( + pl.when(pl.col(col_name) == "Yes") + .then(pl.lit(f"{label}{self.multivalue_separator}")) + .otherwise(pl.lit("")) + ) + + # Concatenate them all + full_str_col = pl.concat_str(concat_exprs) + + # Check for default value presence + any_default = pl.any_horizontal([ + pl.col(c) == default_value for c in column_name_map.keys() + ]) + + df = df.with_columns( + pl.when((full_str_col == "") & any_default) + .then(pl.lit(default_value)) + .when(full_str_col == "") + .then(pl.lit(all_negative_value)) + .otherwise( + # Remove trailing separator + full_str_col.str.strip_chars_end(self.multivalue_separator) + ) + .alias(new_column_name) + ) return df def new_column_from_binary_columns_positive_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", all_negative_value: str = "", default_value: str | None = "Value Unavailable", dtype: Callable = float, - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + ) -> pl.DataFrame: return self._new_column_from_binary_columns_positive_class( df=df, column_name_map=column_name_map, new_column_name=new_column_name, + all_negative_value=all_negative_value, default_value=default_value, dtype=dtype, ) def _new_column_from_binary_columns_negative_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, - ) -> pd.DataFrame: + ) -> pl.DataFrame: new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = df[list(column_name_map.keys())].idxmin(axis=1) + + target_cols = list(column_name_map.keys()) + idx_to_col = {i: name for i, name in enumerate(target_cols)} + + df = df.with_columns( + pl.concat_list([ + pl.col(c).cast(pl.Float64, strict=False) for c in target_cols + ]) + .list.arg_min() # Returns index of min value + .replace(idx_to_col) # Map index back to column name + .alias(new_column_name) + ) + return df def new_column_from_binary_columns_negative_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + ) -> pl.DataFrame: return self._new_column_from_binary_columns_negative_class( df=df, column_name_map=column_name_map, @@ -722,26 +780,25 @@ def new_column_from_binary_columns_negative_class( # Utilities # - # Transform Prelude - Get Applicable Transform Columns def _resolve_columns_with_dataframe( - self, df: pd.DataFrame, columns: List[str], default_columns: List[str] + self, df: pl.DataFrame, columns: List[str], default_columns: List[str] ) -> List[str]: """ Internal utility function. Uses set logic to ensure requested columns are available within the target - pd.DataFrame. + pl.DataFrame. """ available_columns, requested_columns = set(df.columns), set(columns) resolved_columns = [] if len(requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – columns parameter has no values. Defaulting to all df.columns" + f"Unexpected Transform – columns parameter has no values. Defaulting to provided default_columns" ) resolved_columns = default_columns elif len(available_columns & requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to all df.columns" + f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to provided default_columns" ) resolved_columns = default_columns elif len(requested_columns - available_columns) > 0: @@ -754,15 +811,14 @@ def _resolve_columns_with_dataframe( return resolved_columns - # Extract REDCap Type Metadata - def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: + def _get_redcap_type_metadata(self, df: pl.DataFrame) -> List[Dict[str, Any]]: """ Extracts REDCap field name, type, and options (the - metadata) for each column in the target pd.DataFrame + metadata) for each column in the target pl.DataFrame """ # REDCap Internal Variable Metadata - metadata = [ + metadata: List[Dict[str, Any]] = [ {"name": "redcap_data_access_group", "type": "text", "options": {}}, {"name": "redcap_repeat_instrument", "type": "text", "options": {}}, {"name": "redcap_repeat_instance", "type": "number", "options": {}}, @@ -775,14 +831,16 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: skip_types = {"file", "calc", "descriptive", "notes"} # Get Column Metadata - columns = df.columns.tolist() + columns = df.columns for field in sorted(self.metadata, key=lambda f: f["field_name"]): if field["field_name"] in columns: field_type = field["field_type"] options: dict = {} if field_type in complex_types: rgx = self._field_rgx[field_type] + # Parse choices string: "1, Yes | 2, No" for option in field["select_choices_or_calculations"].split("|"): + if "," not in option: continue k, v = ( option.split(",")[0], (",".join(option.split(",")[1:])).strip(), @@ -837,12 +895,12 @@ def export_raw( for report_key, report_object in self.reports.items(): filename = f"{report_key}_raw{filetype}" filepath = os.path.join(self.cwd, path, filename) - transformed = report_object["df"] - transformed.to_csv( + transformed: pl.DataFrame = report_object["df"] + transformed.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 # Approx match to %.2f ) return self @@ -853,31 +911,29 @@ def export_transformed( for report_key, report_object in self.reports.items(): filename = f"{report_key}_transformed{filetype}" filepath = os.path.join(self.cwd, path, filename) - transformed = report_object["transformed"] - transformed.to_csv( + transformed: pl.DataFrame = report_object["transformed"] + transformed.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 ) return self # Export Merged Transforms def export_merged_transformed( - self, path: str = "", separator: str = "\t", filetype: str = ".tsv" + self, filepath: str = "transformed-merged_redcap-extract.tsv", separator: str = "\t" ) -> object: - filename = f"transformed-merged_redcap-extract{filetype}" - filepath = os.path.join(self.cwd, path, filename) - self.merged.to_csv( + filepath = os.path.join(self.cwd, filepath) + self.merged.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 ) return self - if __name__ == "__main__": - pass + pass else: - pass + pass diff --git a/modules/etl/transforms/redcap_release_transform.py b/modules/etl/transforms/redcap_release_transform.py index ed4f12ff..d69ae905 100644 --- a/modules/etl/transforms/redcap_release_transform.py +++ b/modules/etl/transforms/redcap_release_transform.py @@ -1,70 +1,69 @@ # Library Modules -from typing import Any, Callable, Union, List, Dict, Tuple -import re, os, csv, json, logging, datetime +from typing import Any, Callable, Union, List, Dict, Tuple, Literal +import re, os, logging, copy, json, io # Third Party Modules from azure.storage.blob import BlobServiceClient from redcap import Project -import pandas as pd +import polars as pl import numpy as np - class RedcapReleaseTransform(object): - def __init__(self, config: dict) -> None: - - print("REDCap Release Transform") + def __init__(self, config: dict): # # Config # + # + self.config = copy.deepcopy(config) + # Get CWD self.cwd = os.getcwd() - # REDCap Azure Storage Access Config - self.redcap_data_dir = config["redcap_data_dir"] + # REDCap API Config self.redcap_metadata_config = config["project_metadata"] # Data Config self.index_columns = ( - config["index_columns"] if "index_columns" in config else ["record_id"] + self.config["index_columns"] if "index_columns" in self.config else ["record_id"] ) # REDCap Reports Config - self.reports_configs = config["reports"] if "reports" in config else [] + self.reports_configs = self.config["reports"] if "reports" in self.config else [] # Report Merging self.post_transform_merge = ( - config["post_transform_merge"] - if "post_transform_merge" in config + self.config["post_transform_merge"] + if "post_transform_merge" in self.config else ([], []) ) # Post Merge Transforms self.post_merge_transforms = ( - config["post_merge_transforms"] if "post_merge_transforms" in config else [] + self.config["post_merge_transforms"] if "post_merge_transforms" in self.config else [] ) # Column Value Separator self.multivalue_separator = ( - config["multivalue_separator"] if "multivalue_separator" in config else "|" + self.config["multivalue_separator"] if "multivalue_separator" in self.config else "|" ) # CSV Float Format (Default: "%.2f") self.csv_float_format = ( - config["csv_float_format"] if "csv_float_format" in config else "%.2f" + self.config["csv_float_format"] if "csv_float_format" in self.config else "%.2f" ) self.missing_value_generic = ( - config["missing_value_generic"] - if "missing_value_generic" in config + self.config["missing_value_generic"] + if "missing_value_generic" in self.config else "Value Unavailable" ) # Logging Config self.logging_config = ( - config["logging_config"] - if "logging_config" in config + self.config["logging_config"] + if "logging_config" in self.config else { "encoding": "utf-8", "filename": "REDCapETL.log", @@ -74,7 +73,7 @@ def __init__(self, config: dict) -> None: # Configure Logging logging.basicConfig(**self.logging_config) - self.logger = logging.getLogger("RedcapTransform") + self.logger = logging.getLogger("RedcapTransform:Release") # # REDCap Parsing Variables @@ -95,9 +94,10 @@ def __init__(self, config: dict) -> None: } # General Parsing Variables + # Note: Polars handles nulls differently (null vs NaN). + # We map standard "empty" markers to the generic missing value. self.none_values = [ np.nan, - pd.NaT, None, "nan", "NaN", @@ -106,19 +106,29 @@ def __init__(self, config: dict) -> None: self.missing_value_generic, ] self.none_map = {key: self.missing_value_generic for key in self.none_values} - self.survey_instrument_map = { - "2": "Complete", - "1": "Unverified", - "0": "Incomplete", - "": self.missing_value_generic, - } self.logger.info(f"Initialized") # - # PyCap Initialization + # Setup Reports & Apply Transforms # + # Internal Defaults + # - Key Assumptions for Transform Functions + # – Only Update if REDCap API and/or PyCap Update + self._default_report_kwdargs = { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "|", + } + + self.project: Any = None + self.reports: Dict[str, Any] = {} + self.merged: pl.DataFrame = pl.DataFrame([]) + + def run (self): + # Initialize PyCap Objects self.logger.info(f"Retrieving REDCap project data") @@ -129,22 +139,9 @@ def __init__(self, config: dict) -> None: f"{self.redcap_metadata_config['filepath']}/{self.redcap_metadata_config['filename']}" ) - # - # Setup Reports & Apply Transforms - # - - # Internal Defaults - # - Key Assumptions for Transform Functions - # – Only Update if REDCap API and/or PyCap Update - self._default_report_kwdargs = { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - } # Get & Structure Report self.logger.info(f"Retrieving Stored REDCap reports") - self.reports = {} + for report_config in self.reports_configs: # Get Report report_key = report_config["key"] @@ -152,42 +149,47 @@ def __init__(self, config: dict) -> None: report_transforms = report_config["transforms"] # Load Release REDCap Reports - report = report_dataframe = self.get_stored_report( + df = self.get_stored_report( os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") or "", os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER") or "", f"{report_config['filepath']}/{report_config['filename']}" ) - report.to_csv(f"~/Downloads/etl-redcap-export-release-{report_kwdargs['report_id']}") # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], - "df": report_dataframe, + "df": df.rechunk(), "transforms": report_transforms, "transformed": None, - "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), + "annotation": self._get_redcap_type_metadata(df), } - # Apply Pre-Merge Report Transforms - self.logger.info(f"Applying REDCap report transforms") - for report_key, report_object in self.reports.items(): - self._apply_report_transforms(report_key) - - # Merge Reports - self.logger.info(f"Merging REDCap reports") - index_columns, merge_steps = self.post_transform_merge - self.merged = self._merge_reports(index_columns, merge_steps) - - # Apply Post-Merge Transforms - self.logger.info(f"Applying REDCap report post-merge transforms") - for transform, transform_kwdargs in self.post_merge_transforms: - self.merged = self.apply_transform( - self.merged, transform, transform_kwdargs - ) + try: - self.logger.info(f"REDCap transforms complete") + # Apply Pre-Merge Report Transforms + self.logger.info(f"Applying REDCap report transforms") + for report_key, report_object in self.reports.items(): + self._apply_report_transforms(report_key) - return + # Merge Reports + self.logger.info(f"Merging REDCap reports") + index_columns, merge_steps = self.post_transform_merge + self.merged = self._merge_reports(index_columns, merge_steps) + + # Apply Post-Merge Transforms + self.logger.info(f"Applying REDCap report post-merge transforms") + for transform, transform_kwdargs in self.post_merge_transforms: + self.merged = self.apply_transform( + self.merged, transform, transform_kwdargs + ) + + self.logger.info(f"REDCap transforms complete") + + except Exception as error: + self.logger.error(error) + self.logger.error("An error occurred during REDCap ETL. See above stacktrace.") + + return self # # Getters @@ -205,50 +207,65 @@ def get_stored_project_metadata(self, connection_string: str, container_name: st return json.loads(download_stream.readall()) - def get_stored_report(self, connection_string: str, container_name: str, blob_path: str) -> pd.DataFrame: + def get_stored_report(self, connection_string: str, container_name: str, blob_path: str) -> pl.DataFrame: + + self.logger.info(f"Downloading report blob: {blob_path}") + try: + blob_service_client = BlobServiceClient.from_connection_string(connection_string) + container_client = blob_service_client.get_container_client(container_name) + blob_client = container_client.get_blob_client(blob_path) + + # Get Blob as bytes stream + download_stream = blob_client.download_blob() + blob_bytes = download_stream.readall() + + # Use io.BytesIO to wrap the bytes content into a file-like object + # Polars can read directly from this buffer. + report_buffer = io.BytesIO(blob_bytes) + + # Read CSV directly into a Polars DataFrame + if not report_buffer: + df = pl.DataFrame([]) + else: + # Load DataFrame + df = pl.read_csv( + report_buffer, + separator="|", + infer_schema_length=0 + ) + # Convert all columns to Utf8 + df = df.select([pl.col(c).cast(pl.Utf8) for c in df.columns]) - # Connect to Azure Blog Storage - blob_service_client = BlobServiceClient.from_connection_string(connection_string) - container_client = blob_service_client.get_container_client(container_name) - blob_client = container_client.get_blob_client(blob_path) - # Get Blob - df = pd.read_csv(blob_client.download_blob(), dtype = str) - return df + self.logger.info(f"Successfully loaded report into Polars DataFrame with shape {df.shape}") + return df + + except Exception as e: + self.logger.error(f"Failed to retrieve or read Azure Blob report at {blob_path}: {e}") + # Return an empty Polars DataFrame on failure + return pl.DataFrame({}) + + + + # + # Getters + # def get_report_id(self, report_key: str) -> str: - """ - Returns a str instance of the REDCap report ID. - """ return self.reports[report_key]["id"] - def get_report_df(self, report_key: str) -> pd.DataFrame: - """ - Returns a pd.DataFrame instance containing the report. - """ + def get_report_df(self, report_key: str) -> pl.DataFrame: return self.reports[report_key]["df"] - def get_report_transformed_df(self, report_key: str) -> pd.DataFrame: - """ - Returns a pd.DataFrame instance containing the report - with normalization transforms applied. - """ + def get_report_transformed_df(self, report_key: str) -> pl.DataFrame: return self.reports[report_key]["transformed"] def get_report_transforms( self, report_key: str ) -> List[Tuple[str, Dict[str, Any]]]: - """ - Returns a list of transforms that will be applied to - the report - """ return self.reports[report_key]["transforms"] def get_report_annotations(self, report_key: str) -> List[Dict[str, Any]]: - """ - Returns a list of annotations generated from the - REDCap metadata API call. - """ return self.reports[report_key]["annotations"] # @@ -259,25 +276,46 @@ def _merge_reports( self, index_columns: List[str], merge_steps: List[Tuple[str, Dict[str, Any]]], - ) -> pd.DataFrame: - """ - Performs N - 1 merge transforms on N reports. - """ + ) -> pl.DataFrame: receiving_report_key, _ = merge_steps[0] - df_receiving_report = self.reports[receiving_report_key]["transformed"][ + df_receiving_report = self.reports[receiving_report_key]["transformed"].select( index_columns - ] + ) if len(merge_steps) > 0: - for providing_report_key, merge_kwdargs in merge_steps: + for merge_step in merge_steps: + providing_report_key, merge_kwdargs = merge_step df_providing_report = self.reports[providing_report_key]["transformed"] - df_receiving_report = df_receiving_report.merge( - df_providing_report, **merge_kwdargs + + # Ensure Reports Aren't None + if df_receiving_report is None or df_providing_report is None: + self.logger.warn(f"Skipping merge step: {merge_step}") + continue + + # Map Pandas merge args to Polars join args + how = merge_kwdargs.get("how", "inner") + + # Handle on/left_on/right_on + on = merge_kwdargs.get("on", None) + left_on = merge_kwdargs.get("left_on", None) + right_on = merge_kwdargs.get("right_on", None) + + if not on and not left_on: + on = index_columns + df_receiving_report, df_providing_report = df_receiving_report.rechunk(), df_providing_report.rechunk() + df_receiving_report = df_receiving_report.join( + df_providing_report, + on=on, + left_on=left_on, + right_on=right_on, + how=how, + suffix=merge_kwdargs.get("suffixes", ("_x", "_y"))[1] + if "suffixes" in merge_kwdargs else "_right" ) else: self.logger.warn( - f"Unable to Merge – No merge steps provided, returning receiving_report pd.DataFrame." + f"Unable to Merge – No merge steps provided, returning receiving_report pl.DataFrame." ) return df_receiving_report @@ -286,31 +324,27 @@ def _merge_reports( # Transform Applicator # - # Applies Declared Transforms to Reports def _apply_report_transforms(self, report_key: str) -> None: - """ - Interal method that applies the transforms to each - report as an idempotent transform stack. - """ report = self.reports[report_key] annotation = report["annotation"] - report["transformed"] = report["df"] + # Clone to avoid mutating original reference + report["transformed"] = report["df"].clone() for transform in report["transforms"]: transform_name, transform_kwdargs = transform transform_kwdargs = transform_kwdargs | {"annotation": annotation} report["transformed"] = self.apply_transform( report["transformed"], transform_name, transform_kwdargs ) - return def apply_transform( self, - df: pd.DataFrame, + df: pl.DataFrame, transform_name: str, transform_kwdargs: Dict[str, Any] = {}, - ) -> pd.DataFrame: - return getattr(self, f"_{transform_name}")(df, **transform_kwdargs) + ) -> pl.DataFrame: + df = getattr(self, f"_{transform_name}")(df, **transform_kwdargs) + return df # # Transforms - Columns @@ -322,18 +356,16 @@ def apply_transform( def _drop_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df = df.drop(columns=columns) + if columns: + df = df.drop(columns) return df - def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: - """ - Drop columns from pd.DataFrame. - """ + def drop_columns(self, df: pl.DataFrame, columns: List[str]) -> pl.DataFrame: return self._drop_columns(df=df, columns=columns) # @@ -342,21 +374,15 @@ def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: def _keep_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - columns = list( - set(df.columns) - - set(self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns)) - ) - df = df.drop(columns=columns) + ) -> pl.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns) + df = df.select(columns) return df - def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: - """ - Keep only selected columns in pd.DataFrame. - """ + def keep_columns(self, df: pl.DataFrame, columns: List[str]) -> pl.DataFrame: return self._keep_columns(df=df, columns=columns) # @@ -365,33 +391,24 @@ def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: def _append_column_suffix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], suffix: str = "", separator: str = "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df[columns] = df[columns].rename( - mapper=lambda name: f"{name}{separator}{suffix}" - ) + rename_map = {col: f"{col}{separator}{suffix}" for col in columns} + df = df.rename(rename_map) return df def append_column_suffix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], suffix: str = "", separator: str = "", - ) -> pd.DataFrame: - """ - Append a suffix to columns of pd.DataFrame. Note: If no - columns parameter is provided, the suffix is applied every - column. If no suffix is provided, the column names remain - unchanged. A separator argument allows for the expansion - of column names by one or more characters, e.g. "_" for - snakecase. - """ + ) -> pl.DataFrame: return self._append_column_suffix( df=df, columns=columns, suffix=suffix, separator=separator ) @@ -402,33 +419,24 @@ def append_column_suffix( def _prepend_column_prefix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], prefix: str = "", separator: str = "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df[columns] = df[columns].rename( - mapper=lambda name: f"{prefix}{separator}{name}" - ) + rename_map = {col: f"{prefix}{separator}{col}" for col in columns} + df = df.rename(rename_map) return df def prepend_column_prefix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], prefix: str = "", separator: str = "", - ) -> pd.DataFrame: - """ - Append a prefix to columns of pd.DataFrame. Note: If no - columns parameter is provided, the prefix is applied every - column. If no prefix is provided, the column names remain - unchanged. A separator argument allows for the expansion - of column names by one or more characters, e.g. "_" for - snakecase. - """ + ) -> pl.DataFrame: return self._prepend_column_prefix( df=df, columns=columns, prefix=prefix, separator=separator ) @@ -439,12 +447,12 @@ def prepend_column_prefix( def _remap_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - # Resolve Mappable Fields and Available Value Maps + ) -> pl.DataFrame: + # Resolve Mappable Fields columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) mappable_fields: List[Dict[str, Any]] @@ -459,52 +467,44 @@ def _remap_values_by_columns( if len(field["options"]) > 0 and field["name"] in columns ] + # Vectorized Re-mapping + expressions = [] + for mappable_field in mappable_fields: - column, value_map = mappable_field["name"], mappable_field["options"] - for i, value in enumerate(df[column]): - subvalues = [ - subvalue.strip() - for subvalue in str(value).split(",") - if len(subvalue) > 0 - ] - df.loc[i, column] = self.multivalue_separator.join( - [ - value_map[subvalue] - for subvalue in subvalues - if subvalue in value_map.keys() - ] + column_name = mappable_field["name"] + mapping_options = mappable_field["options"] + + # Ensure keys in mapping are strings for replacement + str_mapping = {str(k): str(v) for k, v in mapping_options.items()} + + # 1. Split string by comma (handling potential multivalue fields) + # 2. Replace values in the list using the mapping (default to original if not found) + # 3. Join back with the configured separator + expr = ( + pl.col(column_name) + .cast(pl.Utf8) # Ensure string for splitting + .str.split(",") + .list.eval( + pl.element() + .str.strip_chars() # Strip whitespace from CSV parsing "1, 2" -> "2" + .replace(str_mapping, default=pl.element()) ) + .list.join(self.multivalue_separator) + .alias(column_name) + ) + expressions.append(expr) + + if expressions: + df = df.with_columns(expressions) return df def remap_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, - ) -> pd.DataFrame: - """ - Remap values by column using a list of annotations. - Each annotation is a dictionary containing a the - following keys: "name", "type", and "options". Key - to this method are then "name" and "options" entries. - The value of the "name" corresponds to the - pd.DataFrame column name. The value of the"options" - entry is a value_map object generated from the - REDCapo metadata API request: - - annotation = { - "name": field["field_name"], - "type": field["field_type"], - "options": field["field_options"] - } - - If multiple values are found in the field, they will - be mapped with a separator. The default separator is - a pipe (i.e. "|"). - - Returns a transformed pd.DataFrame - """ + ) -> pl.DataFrame: return self._remap_values_by_columns( df=df, columns=columns, value_map=value_map ) @@ -515,31 +515,55 @@ def remap_values_by_columns( def _transform_values_by_column( self, - df: pd.DataFrame, + df: pl.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - df[new_column_name] = df.loc[df[column] != missing_value, column].apply( - transform + ) -> pl.DataFrame: + # In Polars, using an arbitrary python callable (lambda) via map_elements + # is the equivalent of pandas apply. + + # FIX: The user's transform lambda (e.g., date functions) might return Integers/Floats. + # Polars map_elements with return_dtype=pl.Utf8 strictly enforces string returns. + # We wrap the transform in a helper that forces string conversion before returning to Polars. + + def safe_string_transform(val): + # If the value coming in is our known missing value, return it immediately + if val == str(missing_value): + return str(missing_value) + try: + # Apply user transform + result = transform(val) + # Force cast to string to satisfy pl.Utf8 return type + return str(result) if result is not None else str(missing_value) + except Exception: + # If transformation fails (e.g. date parse error), return missing value + return str(missing_value) + + df = df.with_columns( + pl.when(pl.col(column) != str(missing_value)) + .then( + pl.col(column).map_elements(safe_string_transform, return_dtype=pl.Utf8) + ) + .otherwise(pl.lit(str(missing_value))) + .alias(new_column_name) ) - df[new_column_name] = df[new_column_name].fillna(missing_value) + + # Ensure no actual nulls slip through + df = df.with_columns(pl.col(new_column_name).fill_null(str(missing_value))) + return df def transform_values_by_column( self, - df: pd.DataFrame, + df: pl.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, - ) -> pd.DataFrame: - """ - Replace 0-length values or values with keys in - self.none_map with self.missing_value_generic. - """ + ) -> pl.DataFrame: return self._transform_values_by_column( df=df, column=column, @@ -554,31 +578,44 @@ def transform_values_by_column( def _map_missing_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], missing_value: Any = None, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) missing_value = ( missing_value if missing_value is not None else self.missing_value_generic ) - for column in columns: - for i, value in enumerate(df[column]): - if (len(str(value)) == 0) or (value in self.none_map.keys()): - df.loc[i, column] = missing_value - else: - continue + + # Vectorized update + expressions = [] + none_keys = list(self.none_map.keys()) + + for col_name in columns: + # Check for null, empty string, or "nan"/"NaN" string matches + is_missing = ( + pl.col(col_name).is_null() | + (pl.col(col_name) == "") | + (pl.col(col_name).is_in([str(k) for k in none_keys])) + ) + + expr = ( + pl.when(is_missing) + .then(pl.lit(str(missing_value))) # Ensure literal is string + .otherwise(pl.col(col_name)) + .alias(col_name) + ) + expressions.append(expr) + + if expressions: + df = df.with_columns(expressions) return df def map_missing_values_by_columns( - self, df: pd.DataFrame, columns: List[str], missing_value: Any - ) -> pd.DataFrame: - """ - Replace 0-length values or values with keys in - self.none_map with self.missing_value_generic. - """ + self, df: pl.DataFrame, columns: List[str], missing_value: Any + ) -> pl.DataFrame: return self._map_missing_values_by_columns( df=df, columns=columns, missing_value=missing_value ) @@ -593,67 +630,92 @@ def map_missing_values_by_columns( def _drop_rows( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], - condition: Callable = lambda column: column == "", + condition: Callable[[str], pl.Expr] = lambda col_name: pl.col(col_name) == "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df = df[~df[columns].apply(lambda column: column.apply(condition)).any(axis=1)] + if not columns: + return df + expressions = [condition(col) for col in columns] + mask = pl.any_horizontal(expressions).fill_null(False) + df = df.filter(~mask) + return df def drop_rows( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], - condition: Callable = lambda column: column == "", - ) -> pd.DataFrame: - """ - Drop rows from pd.DataFrame. - """ - return self._drop_rows(df=df, columns=columns) + condition: Callable[[str], pl.Expr] = lambda col_name: pl.col(col_name) == "", + ) -> pl.DataFrame: + return self._drop_rows(df=df, columns=columns, condition=condition) # # Transforms - Aggregation # - # ... - # # Transforms - Aggregate Repeat Instruments by Index # def _aggregate_repeat_instrument_by_index( self, - df: pd.DataFrame, - aggregator: str = "max", + df: pl.DataFrame, + aggregator: Literal['min', 'max', 'first', 'last', 'sum', 'mean', 'median', 'len'] | pl.Expr | None = "max", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - new_columns = [column for column in df["redcap_repeat_instrument"].unique() if column is not np.nan] - pivot = pd.pivot_table( - df, - index=self.index_columns, - columns=["redcap_repeat_instrument"], + ) -> pl.DataFrame: + + # Check if repeat instrument exists + if "redcap_repeat_instrument" not in df.columns: + return df + + df = df.filter( + pl.col("redcap_repeat_instrument").is_not_null() & + pl.all_horizontal(pl.col(c).is_not_null() for c in self.index_columns) + ) + + # Create the pivoted dataframe + df = df.rechunk() # Avoid Polars/Rust race condition + pivot_df = df.pivot( values="redcap_repeat_instance", - aggfunc=aggregator, - fill_value=self.missing_value_generic, + index=self.index_columns, + on="redcap_repeat_instrument", + aggregate_function=aggregator ) - df = df.merge(pivot, how="outer", on=self.index_columns) - df = df.drop_duplicates(self.index_columns, keep="first") + + # The pivot might introduce nulls, fill with missing generic + pivot_df = pivot_df.fill_null(self.missing_value_generic) + + # Merge back to original (outer join) + df_unique = df.unique(subset=self.index_columns, keep="first") + df_unique.rechunk() + df = df_unique.join(pivot_df, on=self.index_columns, how="left") + + # Cast new columns (all columns in pivot_df except index) + new_columns = [c for c in pivot_df.columns if c not in self.index_columns] + + # Map python types to Polars types + pl_type = pl.Float64 if dtype is float else pl.Int64 if dtype is int else pl.Utf8 + if dtype is int: pl_type = pl.Int64 + for column in new_columns: - df[column] = df[column].astype(dtype) + df = df.with_columns( + pl.when(pl.col(column) == self.missing_value_generic) + .then(None) + .otherwise(pl.col(column)) + .cast(pl_type, strict=False) + .alias(column) + ) + return df def aggregate_repeat_instrument_by_index( - self, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + self, df: pl.DataFrame, aggregator: Literal['min', 'max', 'first', 'last', 'sum', 'mean', 'median', 'len'] | pl.Expr | None = "max", dtype: Callable = float + ) -> pl.DataFrame: return self._aggregate_repeat_instrument_by_index( df=df, aggregator=aggregator, dtype=dtype ) @@ -664,88 +726,103 @@ def aggregate_repeat_instrument_by_index( def _new_column_from_binary_columns_positive_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", all_negative_value: str = "", default_value: str | None = "Value Unavailable", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = "" - for column_name, column_value in column_name_map.items(): - df.loc[ - df[column_name] == "Yes", new_column_name - ] += f"{column_value}{self.multivalue_separator}" - for column_name, column_value in column_name_map.items(): - df.loc[ - (df[column_name] == default_value) & (df[new_column_name] == ""), - new_column_name, - ] = default_value - df.loc[df[new_column_name] == "", new_column_name] = all_negative_value - # Remove delimiter character if column ends with it - rgx = f"\\{self.multivalue_separator}$" - df[new_column_name] = df[new_column_name].str.replace(rgx, "", regex=True) - return df + # Build a list of expressions: If Col == Yes then "Label|" else "" + concat_exprs = [] + for col_name, label in column_name_map.items(): + concat_exprs.append( + pl.when(pl.col(col_name) == "Yes") + .then(pl.lit(f"{label}{self.multivalue_separator}")) + .otherwise(pl.lit("")) + ) + # Concatenate them all + full_str_col = pl.concat_str(concat_exprs) + + # Check for default value presence + any_default = pl.any_horizontal([ + pl.col(c) == default_value for c in column_name_map.keys() + ]) + + df = df.with_columns( + pl.when((full_str_col == "") & any_default) + .then(pl.lit(default_value)) + .when(full_str_col == "") + .then(pl.lit(all_negative_value)) + .otherwise( + # Remove trailing separator + full_str_col.str.strip_chars_end(self.multivalue_separator) + ) + .alias(new_column_name) + ) + + return df def new_column_from_binary_columns_positive_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", all_negative_value: str = "", default_value: str | None = "Value Unavailable", dtype: Callable = float, - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + ) -> pl.DataFrame: return self._new_column_from_binary_columns_positive_class( df=df, column_name_map=column_name_map, new_column_name=new_column_name, + all_negative_value=all_negative_value, default_value=default_value, dtype=dtype, ) def _new_column_from_binary_columns_negative_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, - ) -> pd.DataFrame: + ) -> pl.DataFrame: new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = df[list(column_name_map.keys())].idxmin(axis=1) + + target_cols = list(column_name_map.keys()) + idx_to_col = {i: name for i, name in enumerate(target_cols)} + + df = df.with_columns( + pl.concat_list([ + pl.col(c).cast(pl.Float64, strict=False) for c in target_cols + ]) + .list.arg_min() # Returns index of min value + .replace(idx_to_col) # Map index back to column name + .alias(new_column_name) + ) + return df def new_column_from_binary_columns_negative_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + ) -> pl.DataFrame: return self._new_column_from_binary_columns_negative_class( df=df, column_name_map=column_name_map, @@ -757,26 +834,25 @@ def new_column_from_binary_columns_negative_class( # Utilities # - # Transform Prelude - Get Applicable Transform Columns def _resolve_columns_with_dataframe( - self, df: pd.DataFrame, columns: List[str], default_columns: List[str] + self, df: pl.DataFrame, columns: List[str], default_columns: List[str] ) -> List[str]: """ Internal utility function. Uses set logic to ensure requested columns are available within the target - pd.DataFrame. + pl.DataFrame. """ available_columns, requested_columns = set(df.columns), set(columns) resolved_columns = [] if len(requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – columns parameter has no values. Defaulting to all df.columns" + f"Unexpected Transform – columns parameter has no values. Defaulting to provided default_columns" ) resolved_columns = default_columns elif len(available_columns & requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to all df.columns" + f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to provided default_columns" ) resolved_columns = default_columns elif len(requested_columns - available_columns) > 0: @@ -789,15 +865,14 @@ def _resolve_columns_with_dataframe( return resolved_columns - # Extract REDCap Type Metadata - def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: + def _get_redcap_type_metadata(self, df: pl.DataFrame) -> List[Dict[str, Any]]: """ Extracts REDCap field name, type, and options (the - metadata) for each column in the target pd.DataFrame + metadata) for each column in the target pl.DataFrame """ # REDCap Internal Variable Metadata - metadata = [ + metadata: List[Dict[str, Any]] = [ {"name": "redcap_data_access_group", "type": "text", "options": {}}, {"name": "redcap_repeat_instrument", "type": "text", "options": {}}, {"name": "redcap_repeat_instance", "type": "number", "options": {}}, @@ -810,14 +885,16 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: skip_types = {"file", "calc", "descriptive", "notes"} # Get Column Metadata - columns = df.columns.tolist() + columns = df.columns for field in sorted(self.metadata, key=lambda f: f["field_name"]): if field["field_name"] in columns: field_type = field["field_type"] options: dict = {} if field_type in complex_types: rgx = self._field_rgx[field_type] + # Parse choices string: "1, Yes | 2, No" for option in field["select_choices_or_calculations"].split("|"): + if "," not in option: continue k, v = ( option.split(",")[0], (",".join(option.split(",")[1:])).strip(), @@ -872,12 +949,12 @@ def export_raw( for report_key, report_object in self.reports.items(): filename = f"{report_key}_raw{filetype}" filepath = os.path.join(self.cwd, path, filename) - transformed = report_object["df"] - transformed.to_csv( + transformed: pl.DataFrame = report_object["df"] + transformed.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 # Approx match to %.2f ) return self @@ -888,30 +965,29 @@ def export_transformed( for report_key, report_object in self.reports.items(): filename = f"{report_key}_transformed{filetype}" filepath = os.path.join(self.cwd, path, filename) - transformed = report_object["transformed"] - transformed.to_csv( + transformed: pl.DataFrame = report_object["transformed"] + transformed.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 ) return self # Export Merged Transforms def export_merged_transformed( - self, path: str = "", separator: str = "\t", filetype: str = ".tsv" + self, filepath: str = "transformed-merged_redcap-extract.tsv", separator: str = "\t" ) -> object: - filename = f"transformed-merged_redcap-extract{filetype}" - filepath = os.path.join(self.cwd, path, filename) - self.merged.to_csv( + filepath = os.path.join(self.cwd, filepath) + self.merged.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 ) return self if __name__ == "__main__": - pass + pass else: - pass + pass diff --git a/modules/etl/vtypes/compound.py b/modules/etl/vtypes/compound.py index 82ce4366..5104b638 100644 --- a/modules/etl/vtypes/compound.py +++ b/modules/etl/vtypes/compound.py @@ -8,8 +8,6 @@ DoubleContinuousTimeseries, ) from typing import Tuple, List, Dict, Callable, Any -import pandas as pd - class Compound(ComplexVType): def __init__(self) -> None: diff --git a/modules/etl/vtypes/mixed.py b/modules/etl/vtypes/mixed.py index fe24f477..f5220b60 100644 --- a/modules/etl/vtypes/mixed.py +++ b/modules/etl/vtypes/mixed.py @@ -9,8 +9,6 @@ DoubleContinuousTimeseries, ) from .compound import Compound -import pandas as pd - class Mixed(ComplexVType): def __init__(self) -> None: diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py index e7dba483..1d54e778 100644 --- a/modules/etl/vtypes/timeseries.py +++ b/modules/etl/vtypes/timeseries.py @@ -1,5 +1,4 @@ from .vtype import SimpleVType -import pandas as pd from datetime import datetime @@ -12,7 +11,7 @@ def __init__(self) -> None: ("group", str), ("x", datetime), ], - pd._libs.tslibs.nattype.NaTType, + str, ) @@ -26,7 +25,7 @@ def __init__(self) -> None: ("x", str), ("y", int), ], - pd._libs.tslibs.nattype.NaTType, + str, ) @@ -40,7 +39,7 @@ def __init__(self) -> None: ("x", str), ("y", float), ], - pd._libs.tslibs.nattype.NaTType, + str, ) diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py index b565f829..81ca6992 100644 --- a/modules/etl/vtypes/vtype.py +++ b/modules/etl/vtypes/vtype.py @@ -1,99 +1,88 @@ -from typing import Any, Callable, Union, List, Dict, Tuple -from datetime import datetime -import pandas as pd +from typing import Any, Callable, List, Dict, Tuple, Type, Union +import polars as pl +# A property on a SimpleVType: ("value", int), ("filterby", str), etc. +VTypeProp = Tuple[str, Callable[..., Any]] -class SimpleVType(object): +# A child vtype class included inside ComplexVType +VTypeClass = Type["BaseVType"] + +# ComplexVType may accept either real props (VTypeProp) +# or child vtype classes (VTypeClass) +PropsList = List[Union[VTypeProp, VTypeClass]] + + +class BaseVType: def __init__( self, name: str, - props: List[Tuple[str, Callable]], - missing_value: Callable, + props: PropsList, + missing_value: Callable[..., Any], ) -> None: self.name = name self.props = props self.missing_value = missing_value - # References self.validation_errors: List[str] = [] - def __str__(self): + def __str__(self) -> str: return f"{self.__dict__}" - def isvalid(self, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]]) -> bool: - columns = df.columns - for pname, ptype in self.props: - if pname in accessors.keys(): - column = accessors[pname]["field"] - if column not in columns: - self.validation_errors.append( - f"VType {self.name.title()} pd.DataFrame argument (df) is missing column defined in accessors argument, {column}" - ) - return False - else: - continue - else: + def _validate_single_accessor( + self, + df_cols: set, + accessors: Dict[str, Dict[str, str]] + ) -> bool: + ok = True + vname = self.name.title() + + for item in self.props: + # ComplexVType entries may be classes, skip them + if not isinstance(item, tuple): + continue + + pname, _ = item + + field_info = accessors.get(pname) + if not field_info: self.validation_errors.append( - f"VType {self.name.title()} accessors argument is missing required property, {pname}" + f"VType {vname} accessors argument is missing required property, {pname}" ) - return False - return True + ok = False + continue + column = field_info["field"] + if column not in df_cols: + self.validation_errors.append( + f"VType {vname} pl.DataFrame argument (df) is missing column " + f"defined in accessors argument, {column}" + ) + ok = False -class ComplexVType(object): - def __init__( - self, - name: str, - props: List[Any], - missing_value: Callable, - ) -> None: - self.name = name - self.props = props - self.missing_value = missing_value - # References - self.validation_errors: List[str] = [] + return ok - def __str__(self): - return f"{self.__dict__}" - # def isvalid( - # self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] - # ) -> bool: - # """ - # Extends the VType.isvalid method to operate on a list - # of pd.DataFrames and accessors. - # """ - # valid = True - # for accessors in accessorsList: - # if not super(Compound, self).isvalid(df, accessors): - # self.validation_errors.append( - # f"VType {self.name.title()} has invalid accessors. See additional details above." - # ) - # valid = False - # else: - # continue - # return valid +class SimpleVType(BaseVType): + def isvalid( + self, + df: pl.DataFrame, + accessors: Dict[str, Dict[str, str]] + ) -> bool: + return self._validate_single_accessor(set(df.columns), accessors) + +class ComplexVType(BaseVType): def isvalid( - self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] + self, + df: pl.DataFrame, + accessors_list: List[Dict[str, Dict[str, str]]] ) -> bool: + df_cols = set(df.columns) valid = True - columns = df.columns - for accessors in accessorsList: - for pname, ptype in self.props: - if pname in accessors.keys(): - column = accessors[pname]["field"] - if column not in columns: - self.validation_errors.append( - f"VType {self.name.title()} pd.DataFrame argument (df) is missing column defined in accessors argument, {column}" - ) - valid = False - else: - continue - else: - self.validation_errors.append( - f"VType {self.name.title()} accessors argument is missing required property, {pname}" - ) - valid = False + + for accessors in accessors_list: + if not self._validate_single_accessor(df_cols, accessors): + valid = False + return valid diff --git a/modules/invitation.py b/modules/invitation.py new file mode 100644 index 00000000..fd792a2f --- /dev/null +++ b/modules/invitation.py @@ -0,0 +1,222 @@ +from flask import render_template +import config +from azure.communication.email import EmailClient + + +def azure_email_connection(html_content, subject: str, to: str): + connection_string = config.FAIRHUB_SMTP_CONNECTION_STRING + email_client = EmailClient.from_connection_string(connection_string) + message = { + "content": { + "subject": subject, + "html": html_content + }, + "recipients": { + "to": [ + { + "address": to, + "displayName": "Customer Name" + } + ] + }, + "senderAddress": config.FAIRHUB_SMTP_SENDER_EMAIL_ADDRESS + } + + email_client.begin_send(message) + # poller = email_client.begin_send(message) + # result = poller.result() + + +def send_invitation_study(to, token, study_name, role): + accept_url = f"{config.FAIRHUB_FRONTEND_URL}auth/signup?code={token}&email={to}" + html_content = render_template( + "accept_study_invitation.html", + token=token, + accept_url=accept_url, + study_name=study_name, + role=role, + to=to, + ) + subject, from_email, to = ( + f"You have been invited to {study_name} invitation", + "aydan.gasimova2@example.com", + to, + ) + + # msg = EmailMessage(subject, html_content, from_email, [to]) + # msg.content_subtype = "html" + # msg.send() + azure_email_connection(html_content, subject, to) + + +def send_access_contributors(to, study, first_name, last_name, role): + accept_url = f"{config.FAIRHUB_FRONTEND_URL}study/{study.id}/overview" + html_content = render_template( + "invite_contributors.html", + accept_url=accept_url, + first_name=first_name, + last_name=last_name, + study_name=study.title, + study_id=study.id, + role=role, + ) + subject, from_email, to = ( + f"You have been invited to {study.title} invitation", + "aydan.gasimova2@example.com", + to, + ) + # msg = EmailMessage(subject, html_content, from_email, [to]) + # msg.content_subtype = "html" + # msg.send() + azure_email_connection(html_content, subject, to) + + +def forgot_password(to, first_name, last_name, token): + reset_password_ = f"{config.FAIRHUB_FRONTEND_URL}auth/reset-password?token={token}" + + html_content = render_template( + "forgot_password.html", + forgot_password_=reset_password_, + email=to, + first_name=first_name, + last_name=last_name + ) + subject, from_email, to = ( + f" Password Change", + "aydan.gasimova2@gmail.com", + to, + ) + azure_email_connection(html_content, subject, to) + + +def reset_password(to, first_name, last_name): + reset_password_ = f"{config.FAIRHUB_FRONTEND_URL}/user/profile" + + html_content = render_template( + "reset_password.html", + reset_password_=reset_password_, + email=to, + first_name=first_name, + last_name=last_name + ) + subject, from_email, to = ( + f" Password Change", + "aydan.gasimova2@gmail.com", + to, + ) + azure_email_connection(html_content, subject, to) + + + +def send_email_verification(email_address, token): + verification_url = ( + f"{config.FAIRHUB_FRONTEND_URL}auth/verify-email?email={email_address}&token={token}" + ) + subject, from_email, to = ( + f"Verify email address", + "aydan.gasimova@example.com", + email_address, + ) + html_content = render_template( + "email_verification.html", + token=token, + verification_url=verification_url, + email=email_address, + ) + # msg = EmailMessage(subject, html_content, from_email, [email_address]) + # msg.content_subtype = "html" + # msg.send() + azure_email_connection(html_content, subject, to) + + +def signin_notification(user, device_ip): + user_profile_url = f"{config.FAIRHUB_FRONTEND_URL}studies" + subject, from_email, to = ( + f"Login notification", + "aydan.gasimova2@example.com", + user.email_address, + ) + html_content = render_template( + "device_notification.html", + user_profile_url=user_profile_url, + device_ip=device_ip, + ) + # msg = EmailMessage(subject, html_content, from_email, [user.email_address]) + # msg.content_subtype = "html" + # msg.send() + azure_email_connection(html_content, subject, to) + + +# def get_config(): +# if os.environ.get("FLASK_ENV") == "testing": +# config_module_name = "pytest_config" +# else: +# config_module_name = "config" +# +# config_module = importlib.import_module(config_module_name) +# +# if os.environ.get("FLASK_ENV") == "testing": +# # If testing, use the 'TestConfig' class for accessing 'secret' +# config = config_module.TestConfig +# else: +# config = config_module +# +# return config + + +# Get list of user ids that have previously authenticated on this device +# def get_device_user_list() -> list[str]: +# # FIX THE TYPE OF THE TOKEN["USERS"], IT WAS GETTING ERROR SINCE token returns a dict instead of list +# # Check if cookie exists +# if "token_device" not in request.cookies: +# return [] +# +# # Get value from cookie +# cookie = request.cookies.get("token_device") +# if not cookie: +# return [] +# +# token = {} +# config = get_config() +# try: +# token = jwt.decode(cookie, config.FAIRHUB_SECRET, algorithms=["HS256"]) +# except jwt.ExpiredSignatureError: +# return [] +# +# if "users" not in token: +# return [] +# +# return token["users"] +# +# +# def add_user_to_device_list(response: Response, user) -> None: +# users = get_device_user_list() +# if user.id not in users: +# users.append(user.id) +# +# config = get_config() +# expiration = datetime.datetime.now(timezone.utc) + datetime.timedelta(days=365) +# cookie = jwt.encode( +# { +# "users": users, +# "exp": expiration, +# }, +# config.FAIRHUB_SECRET, +# algorithm="HS256", +# ) +# +# response.set_cookie( +# "token_device", +# cookie, +# secure=True, +# httponly=True, +# samesite="None", +# expires=expiration, +# ) + +# +# def check_trusted_device() -> bool: +# users = get_device_user_list() +# for user in users: +# print("User known: " + user) +# return g.user.id in users diff --git a/poetry.lock b/poetry.lock index 7b826e9c..7844474f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,35 +1,207 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.13.2" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.13.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2372b15a5f62ed37789a6b383ff7344fc5b9f243999b0cd9b629d8bc5f5b4155"}, + {file = "aiohttp-3.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7f8659a48995edee7229522984bd1009c1213929c769c2daa80b40fe49a180c"}, + {file = "aiohttp-3.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:939ced4a7add92296b0ad38892ce62b98c619288a081170695c6babe4f50e636"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6315fb6977f1d0dd41a107c527fee2ed5ab0550b7d885bc15fee20ccb17891da"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6e7352512f763f760baaed2637055c49134fd1d35b37c2dedfac35bfe5cf8725"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e09a0a06348a2dd73e7213353c90d709502d9786219f69b731f6caa0efeb46f5"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a09a6d073fb5789456545bdee2474d14395792faa0527887f2f4ec1a486a59d3"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b59d13c443f8e049d9e94099c7e412e34610f1f49be0f230ec656a10692a5802"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:20db2d67985d71ca033443a1ba2001c4b5693fe09b0e29f6d9358a99d4d62a8a"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:960c2fc686ba27b535f9fd2b52d87ecd7e4fd1cf877f6a5cba8afb5b4a8bd204"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6c00dbcf5f0d88796151e264a8eab23de2997c9303dd7c0bf622e23b24d3ce22"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fed38a5edb7945f4d1bcabe2fcd05db4f6ec7e0e82560088b754f7e08d93772d"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:b395bbca716c38bef3c764f187860e88c724b342c26275bc03e906142fc5964f"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:204ffff2426c25dfda401ba08da85f9c59525cdc42bda26660463dd1cbcfec6f"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:05c4dd3c48fb5f15db31f57eb35374cb0c09afdde532e7fb70a75aede0ed30f6"}, + {file = "aiohttp-3.13.2-cp310-cp310-win32.whl", hash = "sha256:e574a7d61cf10351d734bcddabbe15ede0eaa8a02070d85446875dc11189a251"}, + {file = "aiohttp-3.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:364f55663085d658b8462a1c3f17b2b84a5c2e1ba858e1b79bff7b2e24ad1514"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2a9ea08e8c58bb17655630198833109227dea914cd20be660f52215f6de5613"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53b07472f235eb80e826ad038c9d106c2f653584753f3ddab907c83f49eedead"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e736c93e9c274fce6419af4aac199984d866e55f8a4cec9114671d0ea9688780"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ff5e771f5dcbc81c64898c597a434f7682f2259e0cd666932a913d53d1341d1a"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3b6fb0c207cc661fa0bf8c66d8d9b657331ccc814f4719468af61034b478592"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:97a0895a8e840ab3520e2288db7cace3a1981300d48babeb50e7425609e2e0ab"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9e8f8afb552297aca127c90cb840e9a1d4bfd6a10d7d8f2d9176e1acc69bad30"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:ed2f9c7216e53c3df02264f25d824b079cc5914f9e2deba94155190ef648ee40"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:99c5280a329d5fa18ef30fd10c793a190d996567667908bef8a7f81f8202b948"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8"}, + {file = "aiohttp-3.13.2-cp311-cp311-win32.whl", hash = "sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec"}, + {file = "aiohttp-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248"}, + {file = "aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e"}, + {file = "aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23"}, + {file = "aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254"}, + {file = "aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a"}, + {file = "aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940"}, + {file = "aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c"}, + {file = "aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734"}, + {file = "aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7fbdf5ad6084f1940ce88933de34b62358d0f4a0b6ec097362dcd3e5a65a4989"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7c3a50345635a02db61792c85bb86daffac05330f6473d524f1a4e3ef9d0046d"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e87dff73f46e969af38ab3f7cb75316a7c944e2e574ff7c933bc01b10def7f5"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2adebd4577724dcae085665f294cc57c8701ddd4d26140504db622b8d566d7aa"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e036a3a645fe92309ec34b918394bb377950cbb43039a97edae6c08db64b23e2"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:23ad365e30108c422d0b4428cf271156dd56790f6dd50d770b8e360e6c5ab2e6"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1f9b2c2d4b9d958b1f9ae0c984ec1dd6b6689e15c75045be8ccb4011426268ca"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a92cf4b9bea33e15ecbaa5c59921be0f23222608143d025c989924f7e3e0c07"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:070599407f4954021509193404c4ac53153525a19531051661440644728ba9a7"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:29562998ec66f988d49fb83c9b01694fa927186b781463f376c5845c121e4e0b"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4dd3db9d0f4ebca1d887d76f7cdbcd1116ac0d05a9221b9dad82c64a62578c4d"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d7bc4b7f9c4921eba72677cd9fedd2308f4a4ca3e12fab58935295ad9ea98700"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:dacd50501cd017f8cccb328da0c90823511d70d24a323196826d923aad865901"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8b2f1414f6a1e0683f212ec80e813f4abef94c739fd090b66c9adf9d2a05feac"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04c3971421576ed24c191f610052bcb2f059e395bc2489dd99e397f9bc466329"}, + {file = "aiohttp-3.13.2-cp39-cp39-win32.whl", hash = "sha256:9f377d0a924e5cc94dc620bc6366fc3e889586a7f18b748901cf016c916e2084"}, + {file = "aiohttp-3.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:9c705601e16c03466cb72011bd1af55d68fa65b045356d8f96c216e5f6db0fa5"}, + {file = "aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.5.0" +aiosignal = ">=1.4.0" +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "backports.zstd ; platform_python_implementation == \"CPython\" and python_version < \"3.14\"", "brotlicffi ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiosignal" +version = "1.4.0" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, + {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" +typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""} [[package]] name = "alembic" -version = "1.13.1" +version = "1.17.2" description = "A database migration tool for SQLAlchemy." -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6"}, + {file = "alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e"}, ] [package.dependencies] Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" +SQLAlchemy = ">=1.4.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.12" [package.extras] -tz = ["backports.zoneinfo"] +tz = ["tzdata"] [[package]] name = "aniso8601" -version = "9.0.1" +version = "10.0.1" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, - {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, + {file = "aniso8601-10.0.1-py2.py3-none-any.whl", hash = "sha256:eb19717fd4e0db6de1aab06f12450ab92144246b257423fe020af5748c0cb89e"}, + {file = "aniso8601-10.0.1.tar.gz", hash = "sha256:25488f8663dd1528ae1f54f94ac1ea51ae25b4d531539b8bc707fed184d16845"}, ] [package.extras] @@ -37,34 +209,33 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "4.2.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "dev" +version = "4.11.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"}, + {file = "anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +trio = ["trio (>=0.31.0)"] [[package]] name = "appnope" version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -72,32 +243,27 @@ files = [ [[package]] name = "argon2-cffi" -version = "23.1.0" +version = "25.1.0" description = "Argon2 for Python" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, + {file = "argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741"}, + {file = "argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1"}, ] [package.dependencies] argon2-cffi-bindings = "*" -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - [[package]] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "python_version >= \"3.14\"" files = [ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, @@ -129,36 +295,76 @@ cffi = ">=1.0.1" dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] +[[package]] +name = "argon2-cffi-bindings" +version = "25.1.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.14\"" +files = [ + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:3d3f05610594151994ca9ccb3c771115bdb4daef161976a266f0dd8aa9996b8f"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8b8efee945193e667a396cbc7b4fb7d357297d6234d30a489905d96caabde56b"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3c6702abc36bf3ccba3f802b799505def420a1b7039862014a65db3205967f5a"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1c70058c6ab1e352304ac7e3b52554daadacd8d453c1752e547c76e9c99ac44"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2fd3bfbff3c5d74fef31a722f729bf93500910db650c925c2d6ef879a7e51cb"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4f9665de60b1b0e99bcd6be4f17d90339698ce954cfd8d9cf4f91c995165a92"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ba92837e4a9aa6a508c8d2d7883ed5a8f6c308c89a4790e1e447a220deb79a85"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win32.whl", hash = "sha256:84a461d4d84ae1295871329b346a97f68eade8c53b6ed9a7ca2d7467f3c8ff6f"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b55aec3565b65f56455eebc9b9f34130440404f27fe21c3b375bf1ea4d8fbae6"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:87c33a52407e4c41f3b70a9c2d3f6056d88b10dad7695be708c5021673f55623"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6dca33a9859abf613e22733131fc9194091c1fa7cb3e131c143056b4856aa47e"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21378b40e1b8d1655dd5310c84a40fc19a9aa5e6366e835ceb8576bf0fea716d"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d588dec224e2a83edbdc785a5e6f3c6cd736f46bfd4b441bbb5aa1f5085e584"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5acb4e41090d53f17ca1110c3427f0a130f944b896fc8c83973219c97f57b690"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:da0c79c23a63723aa5d782250fbf51b768abca630285262fb5144ba5ae01e520"}, + {file = "argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d"}, +] + +[package.dependencies] +cffi = {version = ">=1.0.1", markers = "python_version < \"3.14\""} + [[package]] name = "arrow" -version = "1.3.0" +version = "1.4.0" description = "Better dates & times for Python" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, + {file = "arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205"}, + {file = "arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7"}, ] [package.dependencies] python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" +tzdata = {version = "*", markers = "python_version >= \"3.9\""} [package.extras] doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2025.2)", "simplejson (==3.*)"] [[package]] name = "art" -version = "6.1" +version = "6.5" description = "ASCII Art Library For Python" -category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "art-6.1-py3-none-any.whl", hash = "sha256:159819c418001467f8d79616fa0814277deac97c8a363d1eb3e7c0a31526bfc3"}, - {file = "art-6.1.tar.gz", hash = "sha256:6ab3031e3b7710039e73497b0e750cadfe04d4c1279ce3a123500dbafb9e1b64"}, + {file = "art-6.5-py3-none-any.whl", hash = "sha256:70706408144c45c666caab690627d5c74aea7b6c7ce8cc968408ddeef8d84afd"}, + {file = "art-6.5.tar.gz", hash = "sha256:a98d77b42c278697ec6cf4b5bdcdfd997f6b2425332da078d4e31e31377d1844"}, ] [package.extras] @@ -168,9 +374,9 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" +groups = ["dev"] files = [ {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, @@ -186,161 +392,213 @@ wrapt = [ [[package]] name = "asttokens" -version = "2.4.1" +version = "3.0.0" description = "Annotate AST trees with source code positions" -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, ] -[package.dependencies] -six = ">=1.12.0" - [package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "async-lru" -version = "2.0.4" +version = "2.0.5" description = "Simple LRU cache for asyncio" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, + {file = "async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943"}, + {file = "async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb"}, ] [package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.1" description = "Timeout context manager for asyncio programs" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] +markers = "python_full_version < \"3.11.3\"" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] name = "attrs" -version = "23.2.0" +version = "25.4.0" description = "Classes Without Boilerplate" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, ] -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +[[package]] +name = "azure-communication-email" +version = "1.1.0" +description = "Microsoft Azure MyService Management Client Library for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "azure_communication_email-1.1.0-py3-none-any.whl", hash = "sha256:9212153f21cf7e68734c32ebfe8702b43398bd01df2dddb0ca52cd5a8bbd5024"}, + {file = "azure_communication_email-1.1.0.tar.gz", hash = "sha256:6a4af8281024327c3ab18a4996919069a99a69aad3a19c40f7852a6682493327"}, +] + +[package.dependencies] +azure-core = ">=1.30.0" +isodate = ">=0.6.1" +typing-extensions = ">=4.6.0" [[package]] name = "azure-core" -version = "1.30.1" +version = "1.36.0" description = "Microsoft Azure Core Library for Python" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"}, - {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"}, + {file = "azure_core-1.36.0-py3-none-any.whl", hash = "sha256:fee9923a3a753e94a259563429f3644aaf05c486d45b1215d098115102d91d3b"}, + {file = "azure_core-1.36.0.tar.gz", hash = "sha256:22e5605e6d0bf1d229726af56d9e92bc37b6e726b141a18be0b4d424131741b7"}, ] [package.dependencies] requests = ">=2.21.0" -six = ">=1.11.0" typing-extensions = ">=4.6.0" [package.extras] aio = ["aiohttp (>=3.0)"] +tracing = ["opentelemetry-api (>=1.26,<2.0)"] [[package]] name = "azure-storage-blob" -version = "12.19.1" +version = "12.27.1" description = "Microsoft Azure Blob Storage Client Library for Python" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "azure-storage-blob-12.19.1.tar.gz", hash = "sha256:13e16ba42fc54ac2c7e8f976062173a5c82b9ec0594728e134aac372965a11b0"}, - {file = "azure_storage_blob-12.19.1-py3-none-any.whl", hash = "sha256:c5530dc51c21c9564e4eb706cd499befca8819b10dd89716d3fc90d747556243"}, + {file = "azure_storage_blob-12.27.1-py3-none-any.whl", hash = "sha256:65d1e25a4628b7b6acd20ff7902d8da5b4fde8e46e19c8f6d213a3abc3ece272"}, + {file = "azure_storage_blob-12.27.1.tar.gz", hash = "sha256:a1596cc4daf5dac9be115fcb5db67245eae894cf40e4248243754261f7b674a6"}, ] [package.dependencies] -azure-core = ">=1.28.0,<2.0.0" +azure-core = ">=1.30.0" cryptography = ">=2.1.4" isodate = ">=0.6.1" -typing-extensions = ">=4.3.0" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["azure-core[aio] (>=1.30.0)"] + +[[package]] +name = "azure-storage-file-datalake" +version = "12.22.0" +description = "Microsoft Azure File DataLake Storage Client Library for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "azure_storage_file_datalake-12.22.0-py3-none-any.whl", hash = "sha256:dba235d2fa21135205dbcbba884ea5f8a3aff800a8f89205a1a5a404843a1fc5"}, + {file = "azure_storage_file_datalake-12.22.0.tar.gz", hash = "sha256:9aed0d35f3327baeeb11b1950b140f97b356cfd368fc2cd105c32c820c49af77"}, +] + +[package.dependencies] +azure-core = ">=1.30.0" +azure-storage-blob = ">=12.27.0" +isodate = ">=0.6.1" +typing-extensions = ">=4.6.0" [package.extras] -aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] +aio = ["azure-core[aio] (>=1.30.0)"] [[package]] name = "babel" -version = "2.14.0" +version = "2.17.0" description = "Internationalization utilities" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "bcrypt" -version = "4.1.2" +version = "4.3.0" description = "Modern password hashing for your software and your servers" -category = "main" optional = false -python-versions = ">=3.7" -files = [ - {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, - {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, - {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, - {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, - {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, - {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, - {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, - {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, - {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, - {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, - {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"}, + {file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"}, + {file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"}, + {file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"}, + {file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"}, + {file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"}, + {file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"}, + {file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"}, ] [package.extras] @@ -349,18 +607,19 @@ typecheck = ["mypy"] [[package]] name = "beautifulsoup4" -version = "4.12.3" +version = "4.14.2" description = "Screen-scraping library" -category = "dev" optional = false -python-versions = ">=3.6.0" +python-versions = ">=3.7.0" +groups = ["dev"] files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, + {file = "beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515"}, + {file = "beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e"}, ] [package.dependencies] soupsieve = ">1.2" +typing-extensions = ">=4.0.0" [package.extras] cchardet = ["cchardet"] @@ -373,9 +632,9 @@ lxml = ["lxml"] name = "black" version = "23.12.1" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, @@ -412,125 +671,140 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "bleach" -version = "6.1.0" +version = "6.3.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, + {file = "bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6"}, + {file = "bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22"}, ] [package.dependencies] -six = ">=1.9.0" +tinycss2 = {version = ">=1.1.0,<1.5", optional = true, markers = "extra == \"css\""} webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] +css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "blinker" -version = "1.7.0" +version = "1.9.0" description = "Fast, simple object-to-object and broadcast signaling" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, - {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, + {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, + {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, ] [[package]] name = "cachelib" -version = "0.9.0" +version = "0.13.0" description = "A collection of cache libraries in the same API interface." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, - {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, + {file = "cachelib-0.13.0-py3-none-any.whl", hash = "sha256:8c8019e53b6302967d4e8329a504acf75e7bc46130291d30188a6e4e58162516"}, + {file = "cachelib-0.13.0.tar.gz", hash = "sha256:209d8996e3c57595bee274ff97116d1d73c4980b2fd9a34c7846cd07fd2e1a48"}, ] [[package]] name = "certifi" -version = "2024.2.2" +version = "2025.11.12" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +groups = ["main", "dev"] files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +groups = ["main", "dev"] +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -538,114 +812,137 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.3.0" description = "Composable command line interface toolkit" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" +groups = ["main", "dev"] files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"}, + {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"}, ] [package.dependencies] @@ -655,110 +952,148 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\""} [[package]] name = "comm" -version = "0.2.1" +version = "0.2.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, - {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, + {file = "comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417"}, + {file = "comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971"}, ] -[package.dependencies] -traitlets = ">=4" - [package.extras] test = ["pytest"] [[package]] name = "coverage" -version = "7.4.1" +version = "7.11.3" description = "Code coverage measurement for Python" -category = "dev" optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "coverage-7.11.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c986537abca9b064510f3fd104ba33e98d3036608c7f2f5537f869bc10e1ee5"}, + {file = "coverage-7.11.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:28c5251b3ab1d23e66f1130ca0c419747edfbcb4690de19467cd616861507af7"}, + {file = "coverage-7.11.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4f2bb4ee8dd40f9b2a80bb4adb2aecece9480ba1fa60d9382e8c8e0bd558e2eb"}, + {file = "coverage-7.11.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e5f4bfac975a2138215a38bda599ef00162e4143541cf7dd186da10a7f8e69f1"}, + {file = "coverage-7.11.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f4cbfff5cf01fa07464439a8510affc9df281535f41a1f5312fbd2b59b4ab5c"}, + {file = "coverage-7.11.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:31663572f20bf3406d7ac00d6981c7bbbcec302539d26b5ac596ca499664de31"}, + {file = "coverage-7.11.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9799bd6a910961cb666196b8583ed0ee125fa225c6fdee2cbf00232b861f29d2"}, + {file = "coverage-7.11.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:097acc18bedf2c6e3144eaf09b5f6034926c3c9bb9e10574ffd0942717232507"}, + {file = "coverage-7.11.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:6f033dec603eea88204589175782290a038b436105a8f3637a81c4359df27832"}, + {file = "coverage-7.11.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dd9ca2d44ed8018c90efb72f237a2a140325a4c3339971364d758e78b175f58e"}, + {file = "coverage-7.11.3-cp310-cp310-win32.whl", hash = "sha256:900580bc99c145e2561ea91a2d207e639171870d8a18756eb57db944a017d4bb"}, + {file = "coverage-7.11.3-cp310-cp310-win_amd64.whl", hash = "sha256:c8be5bfcdc7832011b2652db29ed7672ce9d353dd19bce5272ca33dbcf60aaa8"}, + {file = "coverage-7.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:200bb89fd2a8a07780eafcdff6463104dec459f3c838d980455cfa84f5e5e6e1"}, + {file = "coverage-7.11.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8d264402fc179776d43e557e1ca4a7d953020d3ee95f7ec19cc2c9d769277f06"}, + {file = "coverage-7.11.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:385977d94fc155f8731c895accdfcc3dd0d9dd9ef90d102969df95d3c637ab80"}, + {file = "coverage-7.11.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0542ddf6107adbd2592f29da9f59f5d9cff7947b5bb4f734805085c327dcffaa"}, + {file = "coverage-7.11.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d60bf4d7f886989ddf80e121a7f4d140d9eac91f1d2385ce8eb6bda93d563297"}, + {file = "coverage-7.11.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0a3b6e32457535df0d41d2d895da46434706dd85dbaf53fbc0d3bd7d914b362"}, + {file = "coverage-7.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:876a3ee7fd2613eb79602e4cdb39deb6b28c186e76124c3f29e580099ec21a87"}, + {file = "coverage-7.11.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a730cd0824e8083989f304e97b3f884189efb48e2151e07f57e9e138ab104200"}, + {file = "coverage-7.11.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:b5cd111d3ab7390be0c07ad839235d5ad54d2ca497b5f5db86896098a77180a4"}, + {file = "coverage-7.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:074e6a5cd38e06671580b4d872c1a67955d4e69639e4b04e87fc03b494c1f060"}, + {file = "coverage-7.11.3-cp311-cp311-win32.whl", hash = "sha256:86d27d2dd7c7c5a44710565933c7dc9cd70e65ef97142e260d16d555667deef7"}, + {file = "coverage-7.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:ca90ef33a152205fb6f2f0c1f3e55c50df4ef049bb0940ebba666edd4cdebc55"}, + {file = "coverage-7.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:56f909a40d68947ef726ce6a34eb38f0ed241ffbe55c5007c64e616663bcbafc"}, + {file = "coverage-7.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b771b59ac0dfb7f139f70c85b42717ef400a6790abb6475ebac1ecee8de782f"}, + {file = "coverage-7.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:603c4414125fc9ae9000f17912dcfd3d3eb677d4e360b85206539240c96ea76e"}, + {file = "coverage-7.11.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:77ffb3b7704eb7b9b3298a01fe4509cef70117a52d50bcba29cffc5f53dd326a"}, + {file = "coverage-7.11.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4d4ca49f5ba432b0755ebb0fc3a56be944a19a16bb33802264bbc7311622c0d1"}, + {file = "coverage-7.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:05fd3fb6edff0c98874d752013588836f458261e5eba587afe4c547bba544afd"}, + {file = "coverage-7.11.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0e920567f8c3a3ce68ae5a42cf7c2dc4bb6cc389f18bff2235dd8c03fa405de5"}, + {file = "coverage-7.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4bec8c7160688bd5a34e65c82984b25409563134d63285d8943d0599efbc448e"}, + {file = "coverage-7.11.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:adb9b7b42c802bd8cb3927de8c1c26368ce50c8fdaa83a9d8551384d77537044"}, + {file = "coverage-7.11.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:c8f563b245b4ddb591e99f28e3cd140b85f114b38b7f95b2e42542f0603eb7d7"}, + {file = "coverage-7.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e2a96fdc7643c9517a317553aca13b5cae9bad9a5f32f4654ce247ae4d321405"}, + {file = "coverage-7.11.3-cp312-cp312-win32.whl", hash = "sha256:e8feeb5e8705835f0622af0fe7ff8d5cb388948454647086494d6c41ec142c2e"}, + {file = "coverage-7.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:abb903ffe46bd319d99979cdba350ae7016759bb69f47882242f7b93f3356055"}, + {file = "coverage-7.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:1451464fd855d9bd000c19b71bb7dafea9ab815741fb0bd9e813d9b671462d6f"}, + {file = "coverage-7.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84b892e968164b7a0498ddc5746cdf4e985700b902128421bb5cec1080a6ee36"}, + {file = "coverage-7.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f761dbcf45e9416ec4698e1a7649248005f0064ce3523a47402d1bff4af2779e"}, + {file = "coverage-7.11.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1410bac9e98afd9623f53876fae7d8a5db9f5a0ac1c9e7c5188463cb4b3212e2"}, + {file = "coverage-7.11.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:004cdcea3457c0ea3233622cd3464c1e32ebba9b41578421097402bee6461b63"}, + {file = "coverage-7.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f067ada2c333609b52835ca4d4868645d3b63ac04fb2b9a658c55bba7f667d3"}, + {file = "coverage-7.11.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:07bc7745c945a6d95676953e86ba7cebb9f11de7773951c387f4c07dc76d03f5"}, + {file = "coverage-7.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8bba7e4743e37484ae17d5c3b8eb1ce78b564cb91b7ace2e2182b25f0f764cb5"}, + {file = "coverage-7.11.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbffc22d80d86fbe456af9abb17f7a7766e7b2101f7edaacc3535501691563f7"}, + {file = "coverage-7.11.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:0dba4da36730e384669e05b765a2c49f39514dd3012fcc0398dd66fba8d746d5"}, + {file = "coverage-7.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ae12fe90b00b71a71b69f513773310782ce01d5f58d2ceb2b7c595ab9d222094"}, + {file = "coverage-7.11.3-cp313-cp313-win32.whl", hash = "sha256:12d821de7408292530b0d241468b698bce18dd12ecaf45316149f53877885f8c"}, + {file = "coverage-7.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:6bb599052a974bb6cedfa114f9778fedfad66854107cf81397ec87cb9b8fbcf2"}, + {file = "coverage-7.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:bb9d7efdb063903b3fdf77caec7b77c3066885068bdc0d44bc1b0c171033f944"}, + {file = "coverage-7.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:fb58da65e3339b3dbe266b607bb936efb983d86b00b03eb04c4ad5b442c58428"}, + {file = "coverage-7.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8d16bbe566e16a71d123cd66382c1315fcd520c7573652a8074a8fe281b38c6a"}, + {file = "coverage-7.11.3-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8258f10059b5ac837232c589a350a2df4a96406d6d5f2a09ec587cbdd539655"}, + {file = "coverage-7.11.3-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4c5627429f7fbff4f4131cfdd6abd530734ef7761116811a707b88b7e205afd7"}, + {file = "coverage-7.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:465695268414e149bab754c54b0c45c8ceda73dd4a5c3ba255500da13984b16d"}, + {file = "coverage-7.11.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4ebcddfcdfb4c614233cff6e9a3967a09484114a8b2e4f2c7a62dc83676ba13f"}, + {file = "coverage-7.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13b2066303a1c1833c654d2af0455bb009b6e1727b3883c9964bc5c2f643c1d0"}, + {file = "coverage-7.11.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d8750dd20362a1b80e3cf84f58013d4672f89663aee457ea59336df50fab6739"}, + {file = "coverage-7.11.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ab6212e62ea0e1006531a2234e209607f360d98d18d532c2fa8e403c1afbdd71"}, + {file = "coverage-7.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a6b17c2b5e0b9bb7702449200f93e2d04cb04b1414c41424c08aa1e5d352da76"}, + {file = "coverage-7.11.3-cp313-cp313t-win32.whl", hash = "sha256:426559f105f644b69290ea414e154a0d320c3ad8a2bb75e62884731f69cf8e2c"}, + {file = "coverage-7.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:90a96fcd824564eae6137ec2563bd061d49a32944858d4bdbae5c00fb10e76ac"}, + {file = "coverage-7.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:1e33d0bebf895c7a0905fcfaff2b07ab900885fc78bba2a12291a2cfbab014cc"}, + {file = "coverage-7.11.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fdc5255eb4815babcdf236fa1a806ccb546724c8a9b129fd1ea4a5448a0bf07c"}, + {file = "coverage-7.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fe3425dc6021f906c6325d3c415e048e7cdb955505a94f1eb774dafc779ba203"}, + {file = "coverage-7.11.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4ca5f876bf41b24378ee67c41d688155f0e54cdc720de8ef9ad6544005899240"}, + {file = "coverage-7.11.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9061a3e3c92b27fd8036dafa26f25d95695b6aa2e4514ab16a254f297e664f83"}, + {file = "coverage-7.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:abcea3b5f0dc44e1d01c27090bc32ce6ffb7aa665f884f1890710454113ea902"}, + {file = "coverage-7.11.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:68c4eb92997dbaaf839ea13527be463178ac0ddd37a7ac636b8bc11a51af2428"}, + {file = "coverage-7.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:149eccc85d48c8f06547534068c41d69a1a35322deaa4d69ba1561e2e9127e75"}, + {file = "coverage-7.11.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:08c0bcf932e47795c49f0406054824b9d45671362dfc4269e0bc6e4bff010704"}, + {file = "coverage-7.11.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:39764c6167c82d68a2d8c97c33dba45ec0ad9172570860e12191416f4f8e6e1b"}, + {file = "coverage-7.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3224c7baf34e923ffc78cb45e793925539d640d42c96646db62dbd61bbcfa131"}, + {file = "coverage-7.11.3-cp314-cp314-win32.whl", hash = "sha256:c713c1c528284d636cd37723b0b4c35c11190da6f932794e145fc40f8210a14a"}, + {file = "coverage-7.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:c381a252317f63ca0179d2c7918e83b99a4ff3101e1b24849b999a00f9cd4f86"}, + {file = "coverage-7.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:3e33a968672be1394eded257ec10d4acbb9af2ae263ba05a99ff901bb863557e"}, + {file = "coverage-7.11.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f9c96a29c6d65bd36a91f5634fef800212dff69dacdb44345c4c9783943ab0df"}, + {file = "coverage-7.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2ec27a7a991d229213c8070d31e3ecf44d005d96a9edc30c78eaeafaa421c001"}, + {file = "coverage-7.11.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:72c8b494bd20ae1c58528b97c4a67d5cfeafcb3845c73542875ecd43924296de"}, + {file = "coverage-7.11.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:60ca149a446da255d56c2a7a813b51a80d9497a62250532598d249b3cdb1a926"}, + {file = "coverage-7.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb5069074db19a534de3859c43eec78e962d6d119f637c41c8e028c5ab3f59dd"}, + {file = "coverage-7.11.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac5d5329c9c942bbe6295f4251b135d860ed9f86acd912d418dce186de7c19ac"}, + {file = "coverage-7.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e22539b676fafba17f0a90ac725f029a309eb6e483f364c86dcadee060429d46"}, + {file = "coverage-7.11.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:2376e8a9c889016f25472c452389e98bc6e54a19570b107e27cde9d47f387b64"}, + {file = "coverage-7.11.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:4234914b8c67238a3c4af2bba648dc716aa029ca44d01f3d51536d44ac16854f"}, + {file = "coverage-7.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f0b4101e2b3c6c352ff1f70b3a6fcc7c17c1ab1a91ccb7a33013cb0782af9820"}, + {file = "coverage-7.11.3-cp314-cp314t-win32.whl", hash = "sha256:305716afb19133762e8cf62745c46c4853ad6f9eeba54a593e373289e24ea237"}, + {file = "coverage-7.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:9245bd392572b9f799261c4c9e7216bafc9405537d0f4ce3ad93afe081a12dc9"}, + {file = "coverage-7.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:9a1d577c20b4334e5e814c3d5fe07fa4a8c3ae42a601945e8d7940bab811d0bd"}, + {file = "coverage-7.11.3-py3-none-any.whl", hash = "sha256:351511ae28e2509c8d8cae5311577ea7dd511ab8e746ffc8814a0896c3d33fbe"}, + {file = "coverage-7.11.3.tar.gz", hash = "sha256:0f59387f5e6edbbffec2281affb71cdc85e0776c1745150a3ab9b6c1d016106b"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "coveragespace" -version = "6.0.2" +version = "6.1" description = "A place to track your code coverage metrics." -category = "dev" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" +groups = ["dev"] files = [ - {file = "coveragespace-6.0.2-py3-none-any.whl", hash = "sha256:e4900e6eac340d2ea8b6f831a75984392eb9090f9d811c94b7f4c9496214d9b5"}, - {file = "coveragespace-6.0.2.tar.gz", hash = "sha256:07ff131408bcd37cfe3b142092a289d949102d36221e40f066a557121be7cae7"}, + {file = "coveragespace-6.1-py3-none-any.whl", hash = "sha256:ca6ccd5eb32eb6ce5fe78de6c052353b9fbb378a886fde0838480defe33406a8"}, + {file = "coveragespace-6.1.tar.gz", hash = "sha256:049c0b7b629ad43d72692f0f99b9f8a97936ad596f7f27c1af61323fba90ebef"}, ] [package.dependencies] @@ -772,9 +1107,9 @@ requests = ">=2.28,<3.0" name = "cryptography" version = "3.4.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, @@ -810,55 +1145,63 @@ test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)" [[package]] name = "debugpy" -version = "1.8.1" +version = "1.8.17" description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"}, - {file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"}, - {file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"}, - {file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"}, - {file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"}, - {file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"}, - {file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"}, - {file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"}, - {file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"}, - {file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"}, - {file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"}, - {file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"}, - {file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"}, - {file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"}, - {file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"}, - {file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"}, - {file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"}, - {file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"}, - {file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"}, - {file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"}, - {file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"}, - {file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"}, +groups = ["dev"] +files = [ + {file = "debugpy-1.8.17-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:c41d2ce8bbaddcc0009cc73f65318eedfa3dbc88a8298081deb05389f1ab5542"}, + {file = "debugpy-1.8.17-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:1440fd514e1b815edd5861ca394786f90eb24960eb26d6f7200994333b1d79e3"}, + {file = "debugpy-1.8.17-cp310-cp310-win32.whl", hash = "sha256:3a32c0af575749083d7492dc79f6ab69f21b2d2ad4cd977a958a07d5865316e4"}, + {file = "debugpy-1.8.17-cp310-cp310-win_amd64.whl", hash = "sha256:a3aad0537cf4d9c1996434be68c6c9a6d233ac6f76c2a482c7803295b4e4f99a"}, + {file = "debugpy-1.8.17-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:d3fce3f0e3de262a3b67e69916d001f3e767661c6e1ee42553009d445d1cd840"}, + {file = "debugpy-1.8.17-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:c6bdf134457ae0cac6fb68205776be635d31174eeac9541e1d0c062165c6461f"}, + {file = "debugpy-1.8.17-cp311-cp311-win32.whl", hash = "sha256:e79a195f9e059edfe5d8bf6f3749b2599452d3e9380484cd261f6b7cd2c7c4da"}, + {file = "debugpy-1.8.17-cp311-cp311-win_amd64.whl", hash = "sha256:b532282ad4eca958b1b2d7dbcb2b7218e02cb934165859b918e3b6ba7772d3f4"}, + {file = "debugpy-1.8.17-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d"}, + {file = "debugpy-1.8.17-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc"}, + {file = "debugpy-1.8.17-cp312-cp312-win32.whl", hash = "sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf"}, + {file = "debugpy-1.8.17-cp312-cp312-win_amd64.whl", hash = "sha256:e8f8f61c518952fb15f74a302e068b48d9c4691768ade433e4adeea961993464"}, + {file = "debugpy-1.8.17-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:857c1dd5d70042502aef1c6d1c2801211f3ea7e56f75e9c335f434afb403e464"}, + {file = "debugpy-1.8.17-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:3bea3b0b12f3946e098cce9b43c3c46e317b567f79570c3f43f0b96d00788088"}, + {file = "debugpy-1.8.17-cp313-cp313-win32.whl", hash = "sha256:e34ee844c2f17b18556b5bbe59e1e2ff4e86a00282d2a46edab73fd7f18f4a83"}, + {file = "debugpy-1.8.17-cp313-cp313-win_amd64.whl", hash = "sha256:6c5cd6f009ad4fca8e33e5238210dc1e5f42db07d4b6ab21ac7ffa904a196420"}, + {file = "debugpy-1.8.17-cp314-cp314-macosx_15_0_universal2.whl", hash = "sha256:045290c010bcd2d82bc97aa2daf6837443cd52f6328592698809b4549babcee1"}, + {file = "debugpy-1.8.17-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:b69b6bd9dba6a03632534cdf67c760625760a215ae289f7489a452af1031fe1f"}, + {file = "debugpy-1.8.17-cp314-cp314-win32.whl", hash = "sha256:5c59b74aa5630f3a5194467100c3b3d1c77898f9ab27e3f7dc5d40fc2f122670"}, + {file = "debugpy-1.8.17-cp314-cp314-win_amd64.whl", hash = "sha256:893cba7bb0f55161de4365584b025f7064e1f88913551bcd23be3260b231429c"}, + {file = "debugpy-1.8.17-cp38-cp38-macosx_15_0_x86_64.whl", hash = "sha256:8deb4e31cd575c9f9370042876e078ca118117c1b5e1f22c32befcfbb6955f0c"}, + {file = "debugpy-1.8.17-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:b75868b675949a96ab51abc114c7163f40ff0d8f7d6d5fd63f8932fd38e9c6d7"}, + {file = "debugpy-1.8.17-cp38-cp38-win32.whl", hash = "sha256:17e456da14848d618662354e1dccfd5e5fb75deec3d1d48dc0aa0baacda55860"}, + {file = "debugpy-1.8.17-cp38-cp38-win_amd64.whl", hash = "sha256:e851beb536a427b5df8aa7d0c7835b29a13812f41e46292ff80b2ef77327355a"}, + {file = "debugpy-1.8.17-cp39-cp39-macosx_15_0_x86_64.whl", hash = "sha256:f2ac8055a0c4a09b30b931100996ba49ef334c6947e7ae365cdd870416d7513e"}, + {file = "debugpy-1.8.17-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:eaa85bce251feca8e4c87ce3b954aba84b8c645b90f0e6a515c00394a9f5c0e7"}, + {file = "debugpy-1.8.17-cp39-cp39-win32.whl", hash = "sha256:b13eea5587e44f27f6c48588b5ad56dcb74a4f3a5f89250443c94587f3eb2ea1"}, + {file = "debugpy-1.8.17-cp39-cp39-win_amd64.whl", hash = "sha256:bb1bbf92317e1f35afcf3ef0450219efb3afe00be79d8664b250ac0933b9015f"}, + {file = "debugpy-1.8.17-py2.py3-none-any.whl", hash = "sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef"}, + {file = "debugpy-1.8.17.tar.gz", hash = "sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e"}, ] [[package]] name = "decorator" -version = "5.1.1" +version = "5.2.1" description = "Decorators for Humans" -category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, ] [[package]] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -868,9 +1211,9 @@ files = [ name = "dicttoxml" version = "1.7.16" description = "Converts a Python dictionary or other native data type into a valid XML string." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "dicttoxml-1.7.16-py3-none-any.whl", hash = "sha256:8677671496d0d38e66c7179f82a7e9059f94887777955dc71b0ac602ee637c26"}, {file = "dicttoxml-1.7.16.tar.gz", hash = "sha256:6f36ce644881db5cd8940bee9b7cb3f3f6b7b327ba8a67d83d3e2caa0538bf9d"}, @@ -878,14 +1221,14 @@ files = [ [[package]] name = "dill" -version = "0.3.8" +version = "0.4.0" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049"}, + {file = "dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0"}, ] [package.extras] @@ -894,46 +1237,46 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "dnspython" -version = "2.5.0" +version = "2.8.0" description = "DNS toolkit" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"}, - {file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"}, + {file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"}, + {file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=5.0.3)", "mypy (>=1.0.1)", "pylint (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "sphinx (>=7.0.0)", "twine (>=4.0.0)", "wheel (>=0.41.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.25.1)"] -doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1)"] -trio = ["trio (>=0.14)"] -wmi = ["wmi (>=1.5.1)"] +dev = ["black (>=25.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.17.0)", "mypy (>=1.17)", "pylint (>=3)", "pytest (>=8.4)", "pytest-cov (>=6.2.0)", "quart-trio (>=0.12.0)", "sphinx (>=8.2.0)", "sphinx-rtd-theme (>=3.0.0)", "twine (>=6.1.0)", "wheel (>=0.45.0)"] +dnssec = ["cryptography (>=45)"] +doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"] +doq = ["aioquic (>=1.2.0)"] +idna = ["idna (>=3.10)"] +trio = ["trio (>=0.30)"] +wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] [[package]] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] [[package]] name = "email-validator" -version = "2.1.0.post1" +version = "2.3.0" description = "A robust email address syntax and deliverability validation library." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "email_validator-2.1.0.post1-py3-none-any.whl", hash = "sha256:c973053efbeddfef924dc0bd93f6e77a1ea7ee0fce935aea7103c7a3d6d2d637"}, - {file = "email_validator-2.1.0.post1.tar.gz", hash = "sha256:a4b0bd1cf55f073b924258d19321b1f3aa74b4b5a71a42c305575dba920e1a44"}, + {file = "email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4"}, + {file = "email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426"}, ] [package.dependencies] @@ -942,41 +1285,45 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version == \"3.10\"" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.1" +version = "2.2.1" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017"}, + {file = "executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "faker" version = "18.13.0" description = "Faker is a Python package that generates fake data for you." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "Faker-18.13.0-py3-none-any.whl", hash = "sha256:801d1a2d71f1fc54d332de2ab19de7452454309937233ea2f7485402882d67b3"}, {file = "Faker-18.13.0.tar.gz", hash = "sha256:84bcf92bb725dd7341336eea4685df9a364f16f2470c4d29c1d7e6c5fd5a457d"}, @@ -987,14 +1334,14 @@ python-dateutil = ">=2.4" [[package]] name = "fastjsonschema" -version = "2.19.1" +version = "2.21.2" description = "Fastest Python implementation of JSON schema" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ - {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, - {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, + {file = "fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463"}, + {file = "fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de"}, ] [package.extras] @@ -1004,9 +1351,9 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" +groups = ["dev"] files = [ {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, @@ -1021,9 +1368,9 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.3" description = "A simple framework for building complex web applications." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, @@ -1044,9 +1391,9 @@ dotenv = ["python-dotenv"] name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, @@ -1058,30 +1405,30 @@ Flask = "*" [[package]] name = "flask-caching" -version = "2.1.0" +version = "2.3.1" description = "Adds caching support to Flask applications." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Flask-Caching-2.1.0.tar.gz", hash = "sha256:b7500c145135836a952e3de3a80881d9654e327a29c852c9265607f5c449235c"}, - {file = "Flask_Caching-2.1.0-py3-none-any.whl", hash = "sha256:f02645a629a8c89800d96dc8f690a574a0d49dcd66c7536badc6d362ba46b716"}, + {file = "Flask_Caching-2.3.1-py3-none-any.whl", hash = "sha256:d3efcf600e5925ea5a2fcb810f13b341ae984f5b52c00e9d9070392f3ca10761"}, + {file = "flask_caching-2.3.1.tar.gz", hash = "sha256:65d7fd1b4eebf810f844de7de6258254b3248296ee429bdcb3f741bcbf7b98c9"}, ] [package.dependencies] -cachelib = ">=0.9.0,<0.10.0" +cachelib = ">=0.9.0" Flask = "*" [[package]] name = "flask-cors" -version = "4.0.0" +version = "4.0.2" description = "A Flask extension adding a decorator for CORS support" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "Flask-Cors-4.0.0.tar.gz", hash = "sha256:f268522fcb2f73e2ecdde1ef45e2fd5c71cc48fe03cffb4b441c6d1b40684eb0"}, - {file = "Flask_Cors-4.0.0-py2.py3-none-any.whl", hash = "sha256:bc3492bfd6368d27cfe79c7821df5a8a319e1a6d5eab277a3794be19bdc51783"}, + {file = "Flask_Cors-4.0.2-py2.py3-none-any.whl", hash = "sha256:38364faf1a7a5d0a55bd1d2e2f83ee9e359039182f5e6a029557e1f56d92c09a"}, + {file = "flask_cors-4.0.2.tar.gz", hash = "sha256:493b98e2d1e2f1a4720a7af25693ef2fe32fbafec09a2f72c59f3e475eda61d2"}, ] [package.dependencies] @@ -1091,9 +1438,9 @@ Flask = ">=0.9" name = "flask-mail" version = "0.9.1" description = "Flask extension for sending email" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, ] @@ -1102,16 +1449,36 @@ files = [ blinker = "*" Flask = "*" +[[package]] +name = "flask-mailman" +version = "1.1.1" +description = "Porting Django's email implementation to your Flask applications." +optional = false +python-versions = "<4.0,>=3.7" +groups = ["main"] +files = [ + {file = "flask_mailman-1.1.1-py3-none-any.whl", hash = "sha256:0a66ead606b2ec9e4371d727f82709c7a51270bc5306be57c9f4ce0ed29dbe57"}, + {file = "flask_mailman-1.1.1.tar.gz", hash = "sha256:3bc1ffffbd655ba9e468946a5f02e9cc772594fe1e98ace636c2f6717419eefa"}, +] + +[package.dependencies] +flask = ">=1.0" + +[package.extras] +dev = ["bump2version", "pip", "pre-commit", "toml", "tox", "twine", "virtualenv"] +doc = ["mkdocs", "mkdocs-autorefs", "mkdocs-include-markdown-plugin", "mkdocs-material", "mkdocs-material-extensions", "mkdocstrings"] +test = ["aiosmtpd (>=1.4.4.post2,<2.0.0)", "black", "flake8", "isort", "pytest", "pytest-cov"] + [[package]] name = "flask-restx" -version = "1.3.0" +version = "1.3.2" description = "Fully featured framework for fast, easy and documented API development with Flask" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728"}, - {file = "flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691"}, + {file = "flask-restx-1.3.2.tar.gz", hash = "sha256:0ae13d77e7d7e4dce513970cfa9db45364aef210e99022de26d2b73eb4dbced5"}, + {file = "flask_restx-1.3.2-py2.py3-none-any.whl", hash = "sha256:6e035496e8223668044fc45bf769e526352fd648d9e159bd631d94fd645a687b"}, ] [package.dependencies] @@ -1119,21 +1486,21 @@ aniso8601 = ">=0.82" Flask = ">=0.8,<2.0.0 || >2.0.0" importlib-resources = "*" jsonschema = "*" -pytz = "*" +referencing = "*" werkzeug = "!=2.0.0" [package.extras] -dev = ["Faker (==2.0.0)", "black", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "tox", "twine (==3.8.0)", "tzlocal"] +dev = ["Faker (==2.0.0)", "backports.zoneinfo ; python_version < \"3.9\"", "black", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "tox", "twine (==3.8.0)"] doc = ["Sphinx (==5.3.0)", "alabaster (==0.7.12)", "sphinx-issues (==3.0.1)"] -test = ["Faker (==2.0.0)", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "twine (==3.8.0)", "tzlocal"] +test = ["Faker (==2.0.0)", "backports.zoneinfo ; python_version < \"3.9\"", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "twine (==3.8.0)"] [[package]] name = "flask-sqlalchemy" version = "3.1.1" description = "Add SQLAlchemy support to your Flask application." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, @@ -1147,9 +1514,9 @@ sqlalchemy = ">=2.0.16" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -category = "dev" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +groups = ["dev"] files = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, @@ -1157,26 +1524,166 @@ files = [ [[package]] name = "freezegun" -version = "1.4.0" +version = "1.5.5" description = "Let your Python tests travel through time" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, - {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, + {file = "freezegun-1.5.5-py3-none-any.whl", hash = "sha256:cd557f4a75cf074e84bc374249b9dd491eaeacd61376b9eb3c423282211619d2"}, + {file = "freezegun-1.5.5.tar.gz", hash = "sha256:ac7742a6cc6c25a2c35e9292dfd554b897b517d2dec26891a2e8debf205cb94a"}, ] [package.dependencies] python-dateutil = ">=2.7" +[[package]] +name = "frozenlist" +version = "1.8.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011"}, + {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565"}, + {file = "frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7"}, + {file = "frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a"}, + {file = "frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6"}, + {file = "frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967"}, + {file = "frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25"}, + {file = "frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b"}, + {file = "frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa"}, + {file = "frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf"}, + {file = "frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746"}, + {file = "frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed"}, + {file = "frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496"}, + {file = "frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231"}, + {file = "frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7"}, + {file = "frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806"}, + {file = "frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0"}, + {file = "frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda"}, + {file = "frozenlist-1.8.0-cp39-cp39-win32.whl", hash = "sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087"}, + {file = "frozenlist-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a"}, + {file = "frozenlist-1.8.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103"}, + {file = "frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d"}, + {file = "frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad"}, +] + [[package]] name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, @@ -1190,256 +1697,278 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.2.4" description = "Lightweight in-process concurrent programming" -category = "main" optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" +files = [ + {file = "greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8"}, + {file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"}, + {file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5"}, + {file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"}, + {file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d"}, + {file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"}, + {file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929"}, + {file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"}, + {file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681"}, + {file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"}, + {file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18d9260df2b5fbf41ae5139e1be4e796d99655f023a636cd0e11e6406cca7d58"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:671df96c1f23c4a0d4077a325483c1503c96a1b7d9db26592ae770daa41233d4"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16458c245a38991aa19676900d48bd1a6f2ce3e16595051a4db9d012154e8433"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be"}, + {file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"}, + {file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"}, + {file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"}, ] [package.extras] docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] +test = ["objgraph", "psutil", "setuptools"] [[package]] name = "growthbook" -version = "1.0.0" +version = "1.4.7" description = "Powerful Feature flagging and A/B testing for Python apps" -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "growthbook-1.0.0-py2.py3-none-any.whl", hash = "sha256:919acf8b543bd0f7696626006d2bc2aeb818bfa7b63953e6fb6b597cd2b46a43"}, - {file = "growthbook-1.0.0.tar.gz", hash = "sha256:465b9dd370a3a6dbad75b12558646d6c51e7926f311b7ad74fb3dfc76e1eb4ca"}, + {file = "growthbook-1.4.7-py2.py3-none-any.whl", hash = "sha256:a9f2d0ee1fa48c3bcd3075f2ff94e238713cc972333585a87566907aa30aa322"}, + {file = "growthbook-1.4.7.tar.gz", hash = "sha256:2dab2b2b8aecabf4bbbfa8acf37804eeea8c55cd419d09c5e00648b2b728be13"}, ] [package.dependencies] +aiohttp = ">=3.6.0" cryptography = "*" -typing-extensions = "*" +typing_extensions = "*" urllib3 = "*" [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.3" +version = "1.0.9" description = "A minimal low-level HTTP client." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, - {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [package.dependencies] certifi = "*" -h11 = ">=0.13,<0.15" +h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] -trio = ["trio (>=0.22.0,<0.24.0)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.26.0" +version = "0.28.1" description = "The next generation HTTP client." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] anyio = "*" certifi = "*" -httpcore = ">=1.0.0,<2.0.0" +httpcore = "==1.*" idna = "*" -sniffio = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.6" +version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "8.7.0" description = "Read metadata from Python packages" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "importlib-resources" -version = "6.1.1" +version = "6.5.2" description = "Read resources from Python packages" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, - {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, + {file = "importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec"}, + {file = "importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.3.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, ] [[package]] name = "ipykernel" -version = "6.29.2" +version = "7.1.0" description = "IPython Kernel for Jupyter" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "ipykernel-6.29.2-py3-none-any.whl", hash = "sha256:50384f5c577a260a1d53f1f59a828c7266d321c9b7d00d345693783f66616055"}, - {file = "ipykernel-6.29.2.tar.gz", hash = "sha256:3bade28004e3ff624ed57974948116670604ac5f676d12339693f3142176d3f0"}, + {file = "ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c"}, + {file = "ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db"}, ] [package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} +appnope = {version = ">=0.1.2", markers = "platform_system == \"Darwin\""} comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-client = ">=8.0.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" +nest-asyncio = ">=1.4" +packaging = ">=22" +psutil = ">=5.7" +pyzmq = ">=25" +tornado = ">=6.2" traitlets = ">=5.4.0" [package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +cov = ["coverage[toml]", "matplotlib", "pytest-cov", "trio"] +docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx (<8.2.0)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.4)", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0,<9)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" -version = "8.21.0" +version = "8.37.0" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version == \"3.10\"" files = [ - {file = "ipython-8.21.0-py3-none-any.whl", hash = "sha256:1050a3ab8473488d7eee163796b02e511d0735cf43a04ba2a8348bd0f2eaf8a5"}, - {file = "ipython-8.21.0.tar.gz", hash = "sha256:48fbc236fbe0e138b88773fa0437751f14c3645fb483f1d4c5dee58b37e5ce73"}, + {file = "ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2"}, + {file = "ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216"}, ] [package.dependencies] @@ -1448,69 +1977,118 @@ decorator = "*" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -prompt-toolkit = ">=3.0.41,<3.1.0" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" +stack_data = "*" +traitlets = ">=5.13.0" +typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] kernel = ["ipykernel"] +matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath", "trio"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "ipython" +version = "9.7.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.11" +groups = ["dev"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "ipython-9.7.0-py3-none-any.whl", hash = "sha256:bce8ac85eb9521adc94e1845b4c03d88365fd6ac2f4908ec4ed1eb1b0a065f9f"}, + {file = "ipython-9.7.0.tar.gz", hash = "sha256:5f6de88c905a566c6a9d6c400a8fed54a638e1f7543d17aae2551133216b1e4e"}, +] + +[package.dependencies] +colorama = {version = ">=0.4.4", markers = "sys_platform == \"win32\""} +decorator = ">=4.3.2" +ipython-pygments-lexers = ">=1.0.0" +jedi = ">=0.18.1" +matplotlib-inline = ">=0.1.5" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.11.0" +stack_data = ">=0.6.0" +traitlets = ">=5.13.0" +typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[doc,matplotlib,test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[matplotlib,test]", "setuptools (>=70.0)", "sphinx (>=8.0)", "sphinx-rtd-theme (>=0.1.8)", "sphinx_toml (==0.0.4)", "typing_extensions"] +matplotlib = ["matplotlib (>3.9)"] +test = ["packaging (>=20.1.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=1.0.0)", "setuptools (>=61.2)", "testpath (>=0.2)"] +test-extra = ["curio", "ipykernel (>6.30)", "ipython[matplotlib]", "ipython[test]", "jupyter_ai", "nbclient", "nbformat", "numpy (>=1.27)", "pandas (>2.1)", "trio (>=0.1.0)"] + +[[package]] +name = "ipython-pygments-lexers" +version = "1.1.1" +description = "Defines a variety of Pygments lexers for highlighting IPython code." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c"}, + {file = "ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81"}, +] + +[package.dependencies] +pygments = "*" [[package]] name = "ipywidgets" -version = "8.1.2" +version = "8.1.8" description = "Jupyter interactive widgets" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "ipywidgets-8.1.2-py3-none-any.whl", hash = "sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60"}, - {file = "ipywidgets-8.1.2.tar.gz", hash = "sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9"}, + {file = "ipywidgets-8.1.8-py3-none-any.whl", hash = "sha256:ecaca67aed704a338f88f67b1181b58f821ab5dc89c1f0f5ef99db43c1c2921e"}, + {file = "ipywidgets-8.1.8.tar.gz", hash = "sha256:61f969306b95f85fba6b6986b7fe45d73124d1d9e3023a8068710d47a22ea668"}, ] [package.dependencies] comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.10,<3.1.0" +jupyterlab_widgets = ">=3.0.15,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.10,<4.1.0" +widgetsnbextension = ">=4.0.14,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] [[package]] name = "isodate" -version = "0.6.1" +version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, ] -[package.dependencies] -six = "*" - [[package]] name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, @@ -1523,9 +2101,9 @@ arrow = ">=0.15.0" name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -1536,46 +2114,46 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "itsdangerous" -version = "2.1.2" +version = "2.2.0" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, ] [[package]] name = "jedi" -version = "0.19.1" +version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, ] [package.dependencies] -parso = ">=0.8.3,<0.9.0" +parso = ">=0.8.4,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.6" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1586,41 +2164,41 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "json5" -version = "0.9.14" +version = "0.12.1" description = "A Python implementation of the JSON5 data format." -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8.0" +groups = ["dev"] files = [ - {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"}, - {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"}, + {file = "json5-0.12.1-py3-none-any.whl", hash = "sha256:d9c9b3bc34a5f54d43c35e11ef7cb87d8bdd098c6ace87117a7b7e83e705c1d5"}, + {file = "json5-0.12.1.tar.gz", hash = "sha256:b2743e77b3242f8d03c143dd975a6ec7c52e2f2afe76ed934e53503dd4ad4990"}, ] [package.extras] -dev = ["hypothesis"] +dev = ["build (==1.2.2.post1)", "coverage (==7.5.4) ; python_version < \"3.9\"", "coverage (==7.8.0) ; python_version >= \"3.9\"", "mypy (==1.14.1) ; python_version < \"3.9\"", "mypy (==1.15.0) ; python_version >= \"3.9\"", "pip (==25.0.1)", "pylint (==3.2.7) ; python_version < \"3.9\"", "pylint (==3.3.6) ; python_version >= \"3.9\"", "ruff (==0.11.2)", "twine (==6.1.0)", "uv (==0.6.11)"] [[package]] name = "jsonpointer" -version = "2.4" +version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, ] [[package]] name = "jsonschema" -version = "4.21.1" +version = "4.25.1" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, - {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, + {file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"}, + {file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"}, ] [package.dependencies] @@ -1633,24 +2211,25 @@ jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +rfc3987-syntax = {version = ">=1.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2025.9.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, + {file = "jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe"}, + {file = "jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d"}, ] [package.dependencies] @@ -1658,39 +2237,38 @@ referencing = ">=0.31.0" [[package]] name = "jupyter" -version = "1.0.0" +version = "1.1.1" description = "Jupyter metapackage. Install all the Jupyter components in one go." -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, + {file = "jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83"}, + {file = "jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a"}, ] [package.dependencies] ipykernel = "*" ipywidgets = "*" jupyter-console = "*" +jupyterlab = "*" nbconvert = "*" notebook = "*" -qtconsole = "*" [[package]] name = "jupyter-client" -version = "8.6.0" +version = "8.6.3" description = "Jupyter protocol implementation and client libraries" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, - {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, + {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, + {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, ] [package.dependencies] -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1698,15 +2276,15 @@ traitlets = ">=5.3" [package.extras] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-console" version = "6.6.3" description = "Jupyter terminal console" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, @@ -1716,7 +2294,7 @@ files = [ ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" prompt-toolkit = ">=3.0.30" pygments = "*" pyzmq = ">=17" @@ -1727,39 +2305,39 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.7.1" +version = "5.9.1" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, - {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, + {file = "jupyter_core-5.9.1-py3-none-any.whl", hash = "sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407"}, + {file = "jupyter_core-5.9.1.tar.gz", hash = "sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508"}, ] [package.dependencies] platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} traitlets = ">=5.3" [package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<9)", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" -version = "0.9.0" +version = "0.12.0" description = "Jupyter Event System library" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, - {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, + {file = "jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb"}, + {file = "jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b"}, ] [package.dependencies] jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +packaging = "*" python-json-logger = ">=2.0.4" pyyaml = ">=5.3" referencing = "*" @@ -1769,71 +2347,71 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8)", "sphinxcontrib-spelling"] test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] [[package]] name = "jupyter-lsp" -version = "2.2.2" +version = "2.3.0" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, - {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, + {file = "jupyter_lsp-2.3.0-py3-none-any.whl", hash = "sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f"}, + {file = "jupyter_lsp-2.3.0.tar.gz", hash = "sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245"}, ] [package.dependencies] -jupyter-server = ">=1.1.2" +jupyter_server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.12.5" +version = "2.17.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, - {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, + {file = "jupyter_server-2.17.0-py3-none-any.whl", hash = "sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f"}, + {file = "jupyter_server-2.17.0.tar.gz", hash = "sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5"}, ] [package.dependencies] anyio = ">=3.1.0" -argon2-cffi = "*" -jinja2 = "*" +argon2-cffi = ">=21.1" +jinja2 = ">=3.0.3" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -jupyter-events = ">=0.9.0" -jupyter-server-terminals = "*" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-events = ">=0.11.0" +jupyter-server-terminals = ">=0.4.4" nbconvert = ">=6.4.4" nbformat = ">=5.3.0" -overrides = "*" -packaging = "*" -prometheus-client = "*" -pywinpty = {version = "*", markers = "os_name == \"nt\""} +overrides = {version = ">=5.0", markers = "python_version < \"3.12\""} +packaging = ">=22.0" +prometheus-client = ">=0.9" +pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} pyzmq = ">=24" send2trash = ">=1.8.2" terminado = ">=0.8.3" tornado = ">=6.2.0" traitlets = ">=5.6.0" -websocket-client = "*" +websocket-client = ">=1.7" [package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] [[package]] name = "jupyter-server-terminals" -version = "0.5.2" +version = "0.5.3" description = "A Jupyter Server Extension Providing Terminals." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "jupyter_server_terminals-0.5.2-py3-none-any.whl", hash = "sha256:1b80c12765da979513c42c90215481bbc39bd8ae7c0350b4f85bc3eb58d0fa80"}, - {file = "jupyter_server_terminals-0.5.2.tar.gz", hash = "sha256:396b5ccc0881e550bf0ee7012c6ef1b53edbde69e67cab1d56e89711b46052e8"}, + {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, + {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, ] [package.dependencies] @@ -1846,44 +2424,46 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.1.1" +version = "4.4.10" description = "JupyterLab computational environment" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "jupyterlab-4.1.1-py3-none-any.whl", hash = "sha256:fa3e8c18b804eac04e51ceebd9dd3dd396e08106816f0d09cc426799d7087632"}, - {file = "jupyterlab-4.1.1.tar.gz", hash = "sha256:8acc9f561729d8f32c14c294c397917cddfeeb13a5d46f811979b71b4911a9fd"}, + {file = "jupyterlab-4.4.10-py3-none-any.whl", hash = "sha256:65939ab4c8dcd0c42185c2d0d1a9d60b254dc8c46fc4fdb286b63c51e9358e07"}, + {file = "jupyterlab-4.4.10.tar.gz", hash = "sha256:521c017508af4e1d6d9d8a9d90f47a11c61197ad63b2178342489de42540a615"}, ] [package.dependencies] async-lru = ">=1.0.0" -httpx = ">=0.25.0" -ipykernel = "*" +httpx = ">=0.25.0,<1" +ipykernel = ">=6.5.0,<6.30.0 || >6.30.0" jinja2 = ">=3.0.3" jupyter-core = "*" jupyter-lsp = ">=2.0.0" jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.19.0,<3" +jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2" packaging = "*" -tomli = {version = "*", markers = "python_version < \"3.11\""} +setuptools = ">=41.1.0" +tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.2.0)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.2.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.1)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post6)", "matplotlib (==3.8.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.0)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.11.4)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<8.2.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.5.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.5)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.3.post1)", "matplotlib (==3.10.0)", "nbconvert (>=7.0.0)", "pandas (==2.2.3)", "scipy (==1.15.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] +upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] [[package]] name = "jupyterlab-pygments" version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -1891,14 +2471,14 @@ files = [ [[package]] name = "jupyterlab-server" -version = "2.25.3" +version = "2.28.0" description = "A set of server components for JupyterLab and JupyterLab like applications." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "jupyterlab_server-2.25.3-py3-none-any.whl", hash = "sha256:c48862519fded9b418c71645d85a49b2f0ec50d032ba8316738e9276046088c1"}, - {file = "jupyterlab_server-2.25.3.tar.gz", hash = "sha256:846f125a8a19656611df5b03e5912c8393cea6900859baa64fa515eb64a8dc40"}, + {file = "jupyterlab_server-2.28.0-py3-none-any.whl", hash = "sha256:e4355b148fdcf34d312bbbc80f22467d6d20460e8b8736bf235577dd18506968"}, + {file = "jupyterlab_server-2.28.0.tar.gz", hash = "sha256:35baa81898b15f93573e2deca50d11ac0ae407ebb688299d3a5213265033712c"}, ] [package.dependencies] @@ -1913,77 +2493,102 @@ requests = ">=2.31" [package.extras] docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] [[package]] name = "jupyterlab-widgets" -version = "3.0.10" +version = "3.0.16" description = "Jupyter interactive widgets for JupyterLab" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "jupyterlab_widgets-3.0.10-py3-none-any.whl", hash = "sha256:dd61f3ae7a5a7f80299e14585ce6cf3d6925a96c9103c978eda293197730cb64"}, - {file = "jupyterlab_widgets-3.0.10.tar.gz", hash = "sha256:04f2ac04976727e4f9d0fa91cdc2f1ab860f965e504c29dbd6a65c882c9d04c0"}, + {file = "jupyterlab_widgets-3.0.16-py3-none-any.whl", hash = "sha256:45fa36d9c6422cf2559198e4db481aa243c7a32d9926b500781c830c80f7ecf8"}, + {file = "jupyterlab_widgets-3.0.16.tar.gz", hash = "sha256:423da05071d55cf27a9e602216d35a3a65a3e41cdf9c5d3b643b814ce38c19e0"}, ] [[package]] -name = "lazy-object-proxy" -version = "1.10.0" -description = "A fast and thorough lazy object proxy." -category = "dev" +name = "lark" +version = "1.3.1" +description = "a modern parsing library" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, - {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, + {file = "lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12"}, + {file = "lark-1.3.1.tar.gz", hash = "sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905"}, +] + +[package.extras] +atomic-cache = ["atomicwrites"] +interegular = ["interegular (>=0.3.1,<0.4.0)"] +nearley = ["js2py"] +regex = ["regex"] + +[[package]] +name = "lazy-object-proxy" +version = "1.12.0" +description = "A fast and thorough lazy object proxy." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "lazy_object_proxy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61d5e3310a4aa5792c2b599a7a78ccf8687292c8eb09cf187cca8f09cf6a7519"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ca33565f698ac1aece152a10f432415d1a2aa9a42dfe23e5ba2bc255ab91f6"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01c7819a410f7c255b20799b65d36b414379a30c6f1684c7bd7eb6777338c1b"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:029d2b355076710505c9545aef5ab3f750d89779310e26ddf2b7b23f6ea03cd8"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc6e3614eca88b1c8a625fc0a47d0d745e7c3255b21dac0e30b3037c5e3deeb8"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:be5fe974e39ceb0d6c9db0663c0464669cf866b2851c73971409b9566e880eab"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1cf69cd1a6c7fe2dbcc3edaa017cf010f4192e53796538cc7d5e1fedbfa4bcff"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:efff4375a8c52f55a145dc8487a2108c2140f0bec4151ab4e1843e52eb9987ad"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1192e8c2f1031a6ff453ee40213afa01ba765b3dc861302cd91dbdb2e2660b00"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3605b632e82a1cbc32a1e5034278a64db555b3496e0795723ee697006b980508"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a61095f5d9d1a743e1e20ec6d6db6c2ca511961777257ebd9b288951b23b44fa"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:997b1d6e10ecc6fb6fe0f2c959791ae59599f41da61d652f6c903d1ee58b7370"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ee0d6027b760a11cc18281e702c0309dd92da458a74b4c15025d7fc490deede"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4ab2c584e3cc8be0dfca422e05ad30a9abe3555ce63e9ab7a559f62f8dbc6ff9"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:14e348185adbd03ec17d051e169ec45686dcd840a3779c9d4c10aabe2ca6e1c0"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4fcbe74fb85df8ba7825fa05eddca764138da752904b378f0ae5ab33a36c308"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:563d2ec8e4d4b68ee7848c5ab4d6057a6d703cb7963b342968bb8758dda33a23"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:53c7fd99eb156bbb82cbc5d5188891d8fdd805ba6c1e3b92b90092da2a837073"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:86fd61cb2ba249b9f436d789d1356deae69ad3231dc3c0f17293ac535162672e"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:81d1852fb30fab81696f93db1b1e55a5d1ff7940838191062f5f56987d5fcc3e"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be9045646d83f6c2664c1330904b245ae2371b5c57a3195e4028aedc9f999655"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:67f07ab742f1adfb3966c40f630baaa7902be4222a17941f3d85fd1dae5565ff"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:75ba769017b944fcacbf6a80c18b2761a1795b03f8899acdad1f1c39db4409be"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:7b22c2bbfb155706b928ac4d74c1a63ac8552a55ba7fff4445155523ea4067e1"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4a79b909aa16bde8ae606f06e6bbc9d3219d2e57fb3e0076e17879072b742c65"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:338ab2f132276203e404951205fe80c3fd59429b3a724e7b662b2eb539bb1be9"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c40b3c9faee2e32bfce0df4ae63f4e73529766893258eca78548bac801c8f66"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:717484c309df78cedf48396e420fa57fc8a2b1f06ea889df7248fdd156e58847"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a6b7ea5ea1ffe15059eb44bcbcb258f97bcb40e139b88152c40d07b1a1dfc9ac"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:08c465fb5cd23527512f9bd7b4c7ba6cec33e28aad36fbbe46bf7b858f9f3f7f"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c9defba70ab943f1df98a656247966d7729da2fe9c2d5d85346464bf320820a3"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6763941dbf97eea6b90f5b06eb4da9418cc088fce0e3883f5816090f9afcde4a"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fdc70d81235fc586b9e3d1aeef7d1553259b62ecaae9db2167a5d2550dcc391a"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0a83c6f7a6b2bfc11ef3ed67f8cbe99f8ff500b05655d8e7df9aab993a6abc95"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:256262384ebd2a77b023ad02fbcc9326282bcfd16484d5531154b02bc304f4c5"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7601ec171c7e8584f8ff3f4e440aa2eebf93e854f04639263875b8c2971f819f"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae575ad9b674d0029fc077c5231b3bc6b433a3d1a62a8c363df96974b5534728"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31020c84005d3daa4cc0fa5a310af2066efe6b0d82aeebf9ab199292652ff036"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800f32b00a47c27446a2b767df7538e6c66a3488632c402b4fb2224f9794f3c0"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:15400b18893f345857b9e18b9bd87bd06aba84af6ed086187add70aeaa3f93f1"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3d3964fbd326578bcdfffd017ef101b6fb0484f34e731fe060ba9b8816498c36"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:424a8ab6695400845c39f13c685050eab69fa0bbac5790b201cd27375e5e41d7"}, + {file = "lazy_object_proxy-1.12.0-pp39.pp310.pp311.graalpy311-none-any.whl", hash = "sha256:c3b2e0af1f7f77c4263759c4824316ce458fabe0fceadcd24ef8ca08b2d1e402"}, + {file = "lazy_object_proxy-1.12.0.tar.gz", hash = "sha256:1f5a462d92fd0cfb82f1fab28b51bfb209fabbe6aabf7f0d51472c0c124c0c61"}, ] [[package]] name = "mako" -version = "1.3.2" +version = "1.3.10" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, - {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, ] [package.dependencies] @@ -1998,9 +2603,9 @@ testing = ["pytest"] name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, @@ -2011,96 +2616,128 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, ] [[package]] name = "matplotlib-inline" -version = "0.1.6" +version = "0.2.1" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, + {file = "matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76"}, + {file = "matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe"}, ] [package.dependencies] traitlets = "*" +[package.extras] +test = ["flake8", "nbdime", "nbval", "notebook", "pytest"] + [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -2110,9 +2747,9 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, @@ -2120,35 +2757,38 @@ files = [ [[package]] name = "minilog" -version = "2.3" +version = "2.3.1" description = "Minimalistic wrapper for Python logging." -category = "main" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" +groups = ["main", "dev"] files = [ - {file = "minilog-2.3-py3-none-any.whl", hash = "sha256:e42dc2def1da424e90d6664279c128dde94adc5840557b27857957ed23ee09b6"}, - {file = "minilog-2.3.tar.gz", hash = "sha256:ebdf354f1dd86a2e8a824cdde4b7b50cdbc24b99a5465bc4d1806bd1e030bc92"}, + {file = "minilog-2.3.1-py3-none-any.whl", hash = "sha256:1a679fefe6140ce1d59c3246adc991f9eb480169e5a6c54d2be9023ee459dc30"}, + {file = "minilog-2.3.1.tar.gz", hash = "sha256:4b602572c3bcdd2d8f00d879f635c0de9e632d5d0307e131c91074be8acf444e"}, ] [[package]] name = "mistune" -version = "3.0.2" +version = "3.1.4" description = "A sane and fast Markdown parser with useful plugins and renderers" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, + {file = "mistune-3.1.4-py3-none-any.whl", hash = "sha256:93691da911e5d9d2e23bc54472892aff676df27a75274962ff9edc210364266d"}, + {file = "mistune-3.1.4.tar.gz", hash = "sha256:b5a7f801d389f724ec702840c11d8fc48f2b33519102fc7ee739e8177b672164"}, ] +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.11\""} + [[package]] name = "mkdocs" version = "1.3.1" description = "Project documentation with Markdown." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"}, {file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"}, @@ -2169,104 +2809,276 @@ watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] +[[package]] +name = "multidict" +version = "6.7.0" +description = "multidict implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36"}, + {file = "multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85"}, + {file = "multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7"}, + {file = "multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34"}, + {file = "multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff"}, + {file = "multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81"}, + {file = "multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8"}, + {file = "multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4"}, + {file = "multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b"}, + {file = "multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288"}, + {file = "multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17"}, + {file = "multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390"}, + {file = "multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6"}, + {file = "multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d"}, + {file = "multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6"}, + {file = "multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f"}, + {file = "multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885"}, + {file = "multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c"}, + {file = "multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0"}, + {file = "multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13"}, + {file = "multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd"}, + {file = "multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4"}, + {file = "multidict-6.7.0-cp39-cp39-win32.whl", hash = "sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91"}, + {file = "multidict-6.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f"}, + {file = "multidict-6.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546"}, + {file = "multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3"}, + {file = "multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "mypy" -version = "1.8.0" +version = "1.18.2" description = "Optional static typing for Python" -category = "dev" optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c"}, + {file = "mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e"}, + {file = "mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b"}, + {file = "mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66"}, + {file = "mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428"}, + {file = "mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed"}, + {file = "mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f"}, + {file = "mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341"}, + {file = "mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d"}, + {file = "mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86"}, + {file = "mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37"}, + {file = "mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8"}, + {file = "mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34"}, + {file = "mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764"}, + {file = "mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893"}, + {file = "mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914"}, + {file = "mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8"}, + {file = "mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074"}, + {file = "mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc"}, + {file = "mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e"}, + {file = "mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986"}, + {file = "mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d"}, + {file = "mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba"}, + {file = "mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544"}, + {file = "mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce"}, + {file = "mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d"}, + {file = "mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c"}, + {file = "mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb"}, + {file = "mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075"}, + {file = "mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf"}, + {file = "mypy-1.18.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25a9c8fb67b00599f839cf472713f54249a62efd53a54b565eb61956a7e3296b"}, + {file = "mypy-1.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2b9c7e284ee20e7598d6f42e13ca40b4928e6957ed6813d1ab6348aa3f47133"}, + {file = "mypy-1.18.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6985ed057513e344e43a26cc1cd815c7a94602fb6a3130a34798625bc2f07b6"}, + {file = "mypy-1.18.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f27105f1525ec024b5c630c0b9f36d5c1cc4d447d61fe51ff4bd60633f47ac"}, + {file = "mypy-1.18.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:030c52d0ea8144e721e49b1f68391e39553d7451f0c3f8a7565b59e19fcb608b"}, + {file = "mypy-1.18.2-cp39-cp39-win_amd64.whl", hash = "sha256:aa5e07ac1a60a253445797e42b8b2963c9675563a94f11291ab40718b016a7a0"}, + {file = "mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e"}, + {file = "mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" +mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] name = "nbclient" -version = "0.9.0" +version = "0.10.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -category = "dev" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" +groups = ["dev"] files = [ - {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, - {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, + {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, + {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, ] [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" nbformat = ">=5.1" traitlets = ">=5.4" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] +docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" -version = "7.16.0" -description = "Converting Jupyter Notebooks" -category = "dev" +version = "7.16.6" +description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "nbconvert-7.16.0-py3-none-any.whl", hash = "sha256:ad3dc865ea6e2768d31b7eb6c7ab3be014927216a5ece3ef276748dd809054c7"}, - {file = "nbconvert-7.16.0.tar.gz", hash = "sha256:813e6553796362489ae572e39ba1bff978536192fb518e10826b0e8cadf03ec8"}, + {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, + {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, ] [package.dependencies] beautifulsoup4 = "*" -bleach = "!=5.0.0" +bleach = {version = "!=5.0.0", extras = ["css"]} defusedxml = "*" jinja2 = ">=3.0" jupyter-core = ">=4.7" @@ -2278,34 +3090,33 @@ nbformat = ">=5.7" packaging = "*" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" -tinycss2 = "*" traitlets = ">=5.1" [package.extras] -all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["nbconvert[qtpng]"] +qtpdf = ["pyqtwebengine (>=5.15)"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] webpdf = ["playwright"] [[package]] name = "nbformat" -version = "5.9.2" +version = "5.10.4" description = "The Jupyter Notebook format" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, - {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, + {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, + {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, ] [package.dependencies] -fastjsonschema = "*" +fastjsonschema = ">=2.15" jsonschema = ">=2.6" -jupyter-core = "*" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" traitlets = ">=5.1" [package.extras] @@ -2316,9 +3127,9 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.6.0" description = "Patch asyncio to allow nested event loops" -category = "dev" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -2326,35 +3137,35 @@ files = [ [[package]] name = "notebook" -version = "7.1.0" +version = "7.4.7" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "notebook-7.1.0-py3-none-any.whl", hash = "sha256:a8fa4ccb5e5fe220f29d9900337efd7752bc6f2efe004d6f320db01f7743adc9"}, - {file = "notebook-7.1.0.tar.gz", hash = "sha256:99caf01ff166b1cc86355c9b37c1ba9bf566c1d7fc4ab57bb6f8f24e36c4260e"}, + {file = "notebook-7.4.7-py3-none-any.whl", hash = "sha256:362b7c95527f7dd3c4c84d410b782872fd9c734fb2524c11dd92758527b6eda6"}, + {file = "notebook-7.4.7.tar.gz", hash = "sha256:3f0a04027dfcee8a876de48fba13ab77ec8c12f72f848a222ed7f5081b9e342a"}, ] [package.dependencies] jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.1.1,<4.2" -jupyterlab-server = ">=2.22.1,<3" +jupyterlab = ">=4.4.9,<4.5" +jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2,<0.3" tornado = ">=6.2.0" [package.extras] dev = ["hatch", "pre-commit"] docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] +test = ["importlib-resources (>=5.0) ; python_version < \"3.10\"", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] [[package]] name = "notebook-shim" version = "0.2.4" description = "A shim layer for notebook traits and config" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, @@ -2370,9 +3181,9 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "numpy" version = "1.26.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -2416,9 +3227,10 @@ files = [ name = "overrides" version = "7.7.0" description = "A decorator to automatically detect mismatch when overriding a method." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "python_version < \"3.12\"" files = [ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, @@ -2426,96 +3238,23 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "25.0" description = "Core utilities for Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pandas" -version = "2.2.0" -description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, - {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, - {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, - {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, - {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, - {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, - {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] [[package]] name = "pandocfilters" version = "1.5.1" description = "Utilities for writing pandoc filters in python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -2523,14 +3262,14 @@ files = [ [[package]] name = "paramiko" -version = "3.4.0" +version = "3.5.1" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, - {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, + {file = "paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61"}, + {file = "paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822"}, ] [package.dependencies] @@ -2539,33 +3278,33 @@ cryptography = ">=3.3" pynacl = ">=1.5" [package.extras] -all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +all = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] +gssapi = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] invoke = ["invoke (>=2.0)"] [[package]] name = "parso" -version = "0.8.3" +version = "0.8.5" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, + {file = "parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887"}, + {file = "parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a"}, ] [package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] [[package]] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, @@ -2575,9 +3314,9 @@ files = [ name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -2587,9 +3326,10 @@ files = [ name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -2600,43 +3340,44 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" +version = "4.5.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3"}, + {file = "platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +docs = ["furo (>=2025.9.25)", "proselint (>=0.14)", "sphinx (>=8.2.3)", "sphinx-autodoc-typehints (>=3.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.4.2)", "pytest-cov (>=7)", "pytest-mock (>=3.15.1)"] +type = ["mypy (>=1.18.2)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "poethepoet" version = "0.20.0" description = "A task runner that works well with poetry." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "poethepoet-0.20.0-py3-none-any.whl", hash = "sha256:cb37be15f3895ccc65ddf188c2e3d8fb79e26cc9d469a6098cb1c6f994659f6f"}, {file = "poethepoet-0.20.0.tar.gz", hash = "sha256:ca5a2a955f52dfb0a53fad3c989ef0b69ce3d5ec0f6bfa9b1da1f9e32d262e20"}, @@ -2649,16 +3390,77 @@ tomli = ">=1.2.2" [package.extras] poetry-plugin = ["poetry (>=1.0,<2.0)"] +[[package]] +name = "polars" +version = "1.35.2" +description = "Blazingly fast DataFrame library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "polars-1.35.2-py3-none-any.whl", hash = "sha256:5e8057c8289ac148c793478323b726faea933d9776bd6b8a554b0ab7c03db87e"}, + {file = "polars-1.35.2.tar.gz", hash = "sha256:ae458b05ca6e7ca2c089342c70793f92f1103c502dc1b14b56f0a04f2cc1d205"}, +] + +[package.dependencies] +polars-runtime-32 = "1.35.2" + +[package.extras] +adbc = ["adbc-driver-manager[dbapi]", "adbc-driver-sqlite[dbapi]"] +all = ["polars[async,cloudpickle,database,deltalake,excel,fsspec,graph,iceberg,numpy,pandas,plot,pyarrow,pydantic,style,timezone]"] +async = ["gevent"] +calamine = ["fastexcel (>=0.9)"] +cloudpickle = ["cloudpickle"] +connectorx = ["connectorx (>=0.3.2)"] +database = ["polars[adbc,connectorx,sqlalchemy]"] +deltalake = ["deltalake (>=1.0.0)"] +excel = ["polars[calamine,openpyxl,xlsx2csv,xlsxwriter]"] +fsspec = ["fsspec"] +gpu = ["cudf-polars-cu12"] +graph = ["matplotlib"] +iceberg = ["pyiceberg (>=0.7.1)"] +numpy = ["numpy (>=1.16.0)"] +openpyxl = ["openpyxl (>=3.0.0)"] +pandas = ["pandas", "polars[pyarrow]"] +plot = ["altair (>=5.4.0)"] +polars-cloud = ["polars_cloud (>=0.0.1a1)"] +pyarrow = ["pyarrow (>=7.0.0)"] +pydantic = ["pydantic"] +rt64 = ["polars-runtime-64 (==1.35.2)"] +rtcompat = ["polars-runtime-compat (==1.35.2)"] +sqlalchemy = ["polars[pandas]", "sqlalchemy"] +style = ["great-tables (>=0.8.0)"] +timezone = ["tzdata ; platform_system == \"Windows\""] +xlsx2csv = ["xlsx2csv (>=0.8.0)"] +xlsxwriter = ["xlsxwriter"] + +[[package]] +name = "polars-runtime-32" +version = "1.35.2" +description = "Blazingly fast DataFrame library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "polars_runtime_32-1.35.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e465d12a29e8df06ea78947e50bd361cdf77535cd904fd562666a8a9374e7e3a"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:ef2b029b78f64fb53f126654c0bfa654045c7546bd0de3009d08bd52d660e8cc"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dda0994b5dff7f456bb2f4bbd22be9a9e5c5e28670e23fedb13601ec99a46d"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:3b9006902fc51b768ff747c0f74bd4ce04005ee8aeb290ce9c07ce1cbe1b58a9"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-win_amd64.whl", hash = "sha256:ddc015fac39735592e2e7c834c02193ba4d257bb4c8c7478b9ebe440b0756b84"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-win_arm64.whl", hash = "sha256:6861145aa321a44eda7cc6694fb7751cb7aa0f21026df51b5faa52e64f9dc39b"}, + {file = "polars_runtime_32-1.35.2.tar.gz", hash = "sha256:6e6e35733ec52abe54b7d30d245e6586b027d433315d20edfb4a5d162c79fe90"}, +] + [[package]] name = "prometheus-client" -version = "0.20.0" +version = "0.23.1" description = "Python client for the Prometheus monitoring system." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, + {file = "prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99"}, + {file = "prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce"}, ] [package.extras] @@ -2666,76 +3468,209 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.52" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955"}, + {file = "prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855"}, ] [package.dependencies] wcwidth = "*" +[[package]] +name = "propcache" +version = "0.4.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c"}, + {file = "propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb"}, + {file = "propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37"}, + {file = "propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f"}, + {file = "propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1"}, + {file = "propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6"}, + {file = "propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75"}, + {file = "propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8"}, + {file = "propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db"}, + {file = "propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66"}, + {file = "propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81"}, + {file = "propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e"}, + {file = "propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1"}, + {file = "propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717"}, + {file = "propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37"}, + {file = "propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144"}, + {file = "propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f"}, + {file = "propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153"}, + {file = "propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455"}, + {file = "propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85"}, + {file = "propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1"}, + {file = "propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183"}, + {file = "propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19"}, + {file = "propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f"}, + {file = "propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938"}, + {file = "propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237"}, + {file = "propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d"}, +] + [[package]] name = "psutil" -version = "5.9.8" -description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +version = "7.1.3" +description = "Cross-platform lib for process and system monitoring." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc"}, + {file = "psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0"}, + {file = "psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7"}, + {file = "psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251"}, + {file = "psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa"}, + {file = "psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee"}, + {file = "psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353"}, + {file = "psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b"}, + {file = "psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9"}, + {file = "psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f"}, + {file = "psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7"}, + {file = "psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264"}, + {file = "psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab"}, + {file = "psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880"}, + {file = "psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3"}, + {file = "psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b"}, + {file = "psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd"}, + {file = "psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1"}, + {file = "psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +dev = ["abi3audit", "black", "check-manifest", "colorama ; os_name == \"nt\"", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pyreadline ; os_name == \"nt\"", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-xdist", "pywin32 ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "validate-pyproject[all]", "virtualenv", "vulture", "wheel", "wheel ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "wmi ; os_name == \"nt\" and platform_python_implementation != \"PyPy\""] +test = ["pytest", "pytest-instafail", "pytest-subtests", "pytest-xdist", "pywin32 ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "setuptools", "wheel ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "wmi ; os_name == \"nt\" and platform_python_implementation != \"PyPy\""] [[package]] name = "psycopg2" -version = "2.9.9" +version = "2.9.11" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, - {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, - {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, - {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, - {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, - {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, - {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, - {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, - {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, + {file = "psycopg2-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:103e857f46bb76908768ead4e2d0ba1d1a130e7b8ed77d3ae91e8b33481813e8"}, + {file = "psycopg2-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:210daed32e18f35e3140a1ebe059ac29209dd96468f2f7559aa59f75ee82a5cb"}, + {file = "psycopg2-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:e03e4a6dbe87ff81540b434f2e5dc2bddad10296db5eea7bdc995bf5f4162938"}, + {file = "psycopg2-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:8dc379166b5b7d5ea66dcebf433011dfc51a7bb8a5fc12367fa05668e5fc53c8"}, + {file = "psycopg2-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:f10a48acba5fe6e312b891f290b4d2ca595fc9a06850fe53320beac353575578"}, + {file = "psycopg2-2.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:6ecddcf573777536bddfefaea8079ce959287798c8f5804bee6933635d538924"}, + {file = "psycopg2-2.9.11.tar.gz", hash = "sha256:964d31caf728e217c697ff77ea69c2ba0865fa41ec20bb00f0977e62fdcc52e3"}, ] [[package]] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] +markers = "os_name != \"nt\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -2743,14 +3678,14 @@ files = [ [[package]] name = "pure-eval" -version = "0.2.2" +version = "0.2.3" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] [package.extras] @@ -2758,14 +3693,14 @@ tests = ["pytest"] [[package]] name = "pycap" -version = "2.6.0" +version = "2.7.0" description = "PyCap: Python interface to REDCap" -category = "main" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.10" +groups = ["main"] files = [ - {file = "pycap-2.6.0-py3-none-any.whl", hash = "sha256:404a7ba299fa57f0fcadd9f4b6df123e593deda1dcb12b341f39b416b6e83d6b"}, - {file = "pycap-2.6.0.tar.gz", hash = "sha256:68d7403bf573b03ae24cb252fb1e5f73fe365b6c9d54c46199014edaffcc8f94"}, + {file = "pycap-2.7.0-py3-none-any.whl", hash = "sha256:f7e1342b842b6c2af55e30afc54a7e49d6fa7ba446b3c614ec7d87f90ff3e58d"}, + {file = "pycap-2.7.0.tar.gz", hash = "sha256:02f7ad47cc3d729b126d34850039fc942683a8061a348abc5105b344a1823f44"}, ] [package.dependencies] @@ -2773,15 +3708,15 @@ requests = ">=2.20,<3.0" semantic-version = ">=2.8.5,<3.0.0" [package.extras] -data-science = ["pandas (>=1.3.4,<2.0.0)"] +data-science = ["pandas (>=2.0.0,<3.0.0)"] [[package]] name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, @@ -2789,23 +3724,23 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.23" description = "C parser in Python" -category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] [[package]] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, @@ -2815,15 +3750,15 @@ files = [ snowballstemmer = ">=2.2.0" [package.extras] -toml = ["tomli (>=1.2.3)"] +toml = ["tomli (>=1.2.3) ; python_version < \"3.11\""] [[package]] name = "pyfairdatatools" version = "0.1.3" description = "Tools for AI-READI" -category = "main" optional = false python-versions = ">=3.8,<4.0" +groups = ["main"] files = [ {file = "pyfairdatatools-0.1.3-py3-none-any.whl", hash = "sha256:1ee1cc6241dc3387c6299dd4308f0f956967be2d3afa4a5d4f074ea01eb76d76"}, {file = "pyfairdatatools-0.1.3.tar.gz", hash = "sha256:0b80da09f8e02d94cf717254c21fc50cf3404b81372272f27019571db9aeb047"}, @@ -2844,9 +3779,9 @@ validators = ">=0.20.0,<0.21.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, @@ -2854,45 +3789,44 @@ files = [ [[package]] name = "pygments" -version = "2.17.2" +version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.10.1" description = "JSON Web Token implementation in Python" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" version = "2.17.7" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" +groups = ["dev"] files = [ {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, @@ -2919,9 +3853,9 @@ testutils = ["gitpython (>3)"] name = "pymdown-extensions" version = "10.4" description = "Extension pack for Python Markdown." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pymdown_extensions-10.4-py3-none-any.whl", hash = "sha256:cfc28d6a09d19448bcbf8eee3ce098c7d17ff99f7bd3069db4819af181212037"}, {file = "pymdown_extensions-10.4.tar.gz", hash = "sha256:bc46f11749ecd4d6b71cf62396104b4a200bad3498cb0f5dad1b8502fe461a35"}, @@ -2938,9 +3872,10 @@ extra = ["pygments (>=2.12)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.14\"" files = [ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, @@ -2961,13 +3896,58 @@ cffi = ">=1.4.1" docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] +[[package]] +name = "pynacl" +version = "1.6.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.14\" or platform_python_implementation == \"PyPy\"" +files = [ + {file = "pynacl-1.6.0-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:f46386c24a65383a9081d68e9c2de909b1834ec74ff3013271f1bca9c2d233eb"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:dea103a1afcbc333bc0e992e64233d360d393d1e63d0bc88554f572365664348"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:04f20784083014e265ad58c1b2dd562c3e35864b5394a14ab54f5d150ee9e53e"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbcc4452a1eb10cd5217318c822fde4be279c9de8567f78bad24c773c21254f8"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51fed9fe1bec9e7ff9af31cd0abba179d0e984a2960c77e8e5292c7e9b7f7b5d"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:10d755cf2a455d8c0f8c767a43d68f24d163b8fe93ccfaabfa7bafd26be58d73"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:536703b8f90e911294831a7fbcd0c062b837f3ccaa923d92a6254e11178aaf42"}, + {file = "pynacl-1.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6b08eab48c9669d515a344fb0ef27e2cbde847721e34bba94a343baa0f33f1f4"}, + {file = "pynacl-1.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5789f016e08e5606803161ba24de01b5a345d24590a80323379fc4408832d290"}, + {file = "pynacl-1.6.0-cp314-cp314t-win32.whl", hash = "sha256:4853c154dc16ea12f8f3ee4b7e763331876316cc3a9f06aeedf39bcdca8f9995"}, + {file = "pynacl-1.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:347dcddce0b4d83ed3f32fd00379c83c425abee5a9d2cd0a2c84871334eaff64"}, + {file = "pynacl-1.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2d6cd56ce4998cb66a6c112fda7b1fdce5266c9f05044fa72972613bef376d15"}, + {file = "pynacl-1.6.0-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:f4b3824920e206b4f52abd7de621ea7a44fd3cb5c8daceb7c3612345dfc54f2e"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:16dd347cdc8ae0b0f6187a2608c0af1c8b7ecbbe6b4a06bff8253c192f696990"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16c60daceee88d04f8d41d0a4004a7ed8d9a5126b997efd2933e08e93a3bd850"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25720bad35dfac34a2bcdd61d9e08d6bfc6041bebc7751d9c9f2446cf1e77d64"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8bfaa0a28a1ab718bad6239979a5a57a8d1506d0caf2fba17e524dbb409441cf"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ef214b90556bb46a485b7da8258e59204c244b1b5b576fb71848819b468c44a7"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:49c336dd80ea54780bcff6a03ee1a476be1612423010472e60af83452aa0f442"}, + {file = "pynacl-1.6.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f3482abf0f9815e7246d461fab597aa179b7524628a4bc36f86a7dc418d2608d"}, + {file = "pynacl-1.6.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:140373378e34a1f6977e573033d1dd1de88d2a5d90ec6958c9485b2fd9f3eb90"}, + {file = "pynacl-1.6.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6b393bc5e5a0eb86bb85b533deb2d2c815666665f840a09e0aa3362bb6088736"}, + {file = "pynacl-1.6.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a25cfede801f01e54179b8ff9514bd7b5944da560b7040939732d1804d25419"}, + {file = "pynacl-1.6.0-cp38-abi3-win32.whl", hash = "sha256:dcdeb41c22ff3c66eef5e63049abf7639e0db4edee57ba70531fc1b6b133185d"}, + {file = "pynacl-1.6.0-cp38-abi3-win_amd64.whl", hash = "sha256:cf831615cc16ba324240de79d925eacae8265b7691412ac6b24221db157f6bd1"}, + {file = "pynacl-1.6.0-cp38-abi3-win_arm64.whl", hash = "sha256:84709cea8f888e618c21ed9a0efdb1a59cc63141c403db8bf56c469b71ad56f2"}, + {file = "pynacl-1.6.0.tar.gz", hash = "sha256:cb36deafe6e2bce3b286e5d1f3e1c246e0ccdb8808ddb4550bb2792f2df298f2"}, +] + +[package.dependencies] +cffi = {version = ">=1.4.1", markers = "platform_python_implementation != \"PyPy\" and python_version < \"3.14\""} + +[package.extras] +docs = ["sphinx (<7)", "sphinx_rtd_theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] + [[package]] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -2988,9 +3968,9 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, @@ -3005,14 +3985,14 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-describe" -version = "2.2.0" +version = "2.2.1" description = "Describe-style plugin for pytest" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "pytest-describe-2.2.0.tar.gz", hash = "sha256:39bb05eb90f2497d9ca342ef9a0b7fa5bada7e58505aec33f66d661d631955b7"}, - {file = "pytest_describe-2.2.0-py3-none-any.whl", hash = "sha256:bd9e2c73acb4b9522a8400823d98f5b6a081667d3bfd7243a8598336896b544d"}, + {file = "pytest_describe-2.2.1-py3-none-any.whl", hash = "sha256:b7ea0c995599cb6db050b928d5650afdced3629b98cbb71c091fbf9ae5443ed0"}, + {file = "pytest_describe-2.2.1.tar.gz", hash = "sha256:eb59307333508d700082a4a8aadba0f874f02c73602f7113071b0b41bfe183b7"}, ] [package.dependencies] @@ -3022,9 +4002,9 @@ pytest = ">=4.6,<9" name = "pytest-expecter" version = "3.0" description = "Better testing with expecter and pytest." -category = "dev" optional = false python-versions = ">=3.8,<4.0" +groups = ["dev"] files = [ {file = "pytest-expecter-3.0.tar.gz", hash = "sha256:be8f3e9f823af6d6713e3f552ed47560061a2fd243a78952180f5df61a2b76a4"}, {file = "pytest_expecter-3.0-py3-none-any.whl", hash = "sha256:98fe65ecc1ddb7ca29084dc68ec07983dbbdb20b566fd14140b0b5f4b7c84cc8"}, @@ -3034,9 +4014,9 @@ files = [ name = "pytest-random" version = "0.02" description = "py.test plugin to randomize tests" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "pytest-random-0.02.tar.gz", hash = "sha256:92f25db8c5d9ffc20d90b51997b914372d6955cb9cf1f6ead45b90514fc0eddd"}, ] @@ -3046,14 +4026,14 @@ pytest = ">=2.2.3" [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -3061,14 +4041,14 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "1.0.1" +version = "1.2.1" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, + {file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"}, + {file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"}, ] [package.extras] @@ -3076,138 +4056,132 @@ cli = ["click (>=5.0)"] [[package]] name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -category = "main" +version = "4.0.0" +description = "JSON Log Formatter for the Python Logging Package" optional = false -python-versions = "*" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2"}, + {file = "python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f"}, ] -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] +[package.extras] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] [[package]] name = "pywinpty" -version = "2.0.12" +version = "3.0.2" description = "Pseudo terminal support for Windows from Python." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] +markers = "os_name == \"nt\"" files = [ - {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, - {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, - {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, - {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, - {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, - {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, + {file = "pywinpty-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:65db57fd3387d71e8372b6a54269cbcd0f6dfa6d4616a29e0af749ec19f5c558"}, + {file = "pywinpty-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:327790d70e4c841ebd9d0f295a780177149aeb405bca44c7115a3de5c2054b23"}, + {file = "pywinpty-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:99fdd9b455f0ad6419aba6731a7a0d2f88ced83c3c94a80ff9533d95fa8d8a9e"}, + {file = "pywinpty-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:18f78b81e4cfee6aabe7ea8688441d30247b73e52cd9657138015c5f4ee13a51"}, + {file = "pywinpty-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:663383ecfab7fc382cc97ea5c4f7f0bb32c2f889259855df6ea34e5df42d305b"}, + {file = "pywinpty-3.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:28297cecc37bee9f24d8889e47231972d6e9e84f7b668909de54f36ca785029a"}, + {file = "pywinpty-3.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:34b55ae9a1b671fe3eae071d86618110538e8eaad18fcb1531c0830b91a82767"}, + {file = "pywinpty-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:3962daf801bc38dd4de872108c424b5338c9a46c6efca5761854cd66370a9022"}, + {file = "pywinpty-3.0.2.tar.gz", hash = "sha256:1505cc4cb248af42cb6285a65c9c2086ee9e7e574078ee60933d5d7fa86fb004"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.3" description = "YAML parser and emitter for Python" -category = "main" optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] [[package]] name = "pyyaml-env-tag" -version = "0.1" -description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "dev" +version = "1.1" +description = "A custom YAML tag for referencing environment variables in YAML files." optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, - {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, + {file = "pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04"}, + {file = "pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff"}, ] [package.dependencies] @@ -3215,204 +4189,161 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "25.1.2" +version = "27.1.0" description = "Python bindings for 0MQ" -category = "dev" optional = false -python-versions = ">=3.6" -files = [ - {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, - {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, - {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, - {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, - {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, - {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, - {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, - {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, - {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, - {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, - {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, - {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, - {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, - {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, - {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, - {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, - {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, - {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, - {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, - {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, - {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, - {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, - {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, - {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, - {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, - {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, - {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pyzmq-27.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:508e23ec9bc44c0005c4946ea013d9317ae00ac67778bd47519fdf5a0e930ff4"}, + {file = "pyzmq-27.1.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:507b6f430bdcf0ee48c0d30e734ea89ce5567fd7b8a0f0044a369c176aa44556"}, + {file = "pyzmq-27.1.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf7b38f9fd7b81cb6d9391b2946382c8237fd814075c6aa9c3b746d53076023b"}, + {file = "pyzmq-27.1.0-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03ff0b279b40d687691a6217c12242ee71f0fba28bf8626ff50e3ef0f4410e1e"}, + {file = "pyzmq-27.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:677e744fee605753eac48198b15a2124016c009a11056f93807000ab11ce6526"}, + {file = "pyzmq-27.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd2fec2b13137416a1c5648b7009499bcc8fea78154cd888855fa32514f3dad1"}, + {file = "pyzmq-27.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:08e90bb4b57603b84eab1d0ca05b3bbb10f60c1839dc471fc1c9e1507bef3386"}, + {file = "pyzmq-27.1.0-cp310-cp310-win32.whl", hash = "sha256:a5b42d7a0658b515319148875fcb782bbf118dd41c671b62dae33666c2213bda"}, + {file = "pyzmq-27.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0bb87227430ee3aefcc0ade2088100e528d5d3298a0a715a64f3d04c60ba02f"}, + {file = "pyzmq-27.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:9a916f76c2ab8d045b19f2286851a38e9ac94ea91faf65bd64735924522a8b32"}, + {file = "pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86"}, + {file = "pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581"}, + {file = "pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f"}, + {file = "pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e"}, + {file = "pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e"}, + {file = "pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2"}, + {file = "pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394"}, + {file = "pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f"}, + {file = "pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97"}, + {file = "pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07"}, + {file = "pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc"}, + {file = "pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113"}, + {file = "pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233"}, + {file = "pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31"}, + {file = "pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28"}, + {file = "pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856"}, + {file = "pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496"}, + {file = "pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd"}, + {file = "pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf"}, + {file = "pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f"}, + {file = "pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5"}, + {file = "pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6"}, + {file = "pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7"}, + {file = "pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05"}, + {file = "pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9"}, + {file = "pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128"}, + {file = "pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39"}, + {file = "pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97"}, + {file = "pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db"}, + {file = "pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c"}, + {file = "pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2"}, + {file = "pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e"}, + {file = "pyzmq-27.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ce980af330231615756acd5154f29813d553ea555485ae712c491cd483df6b7a"}, + {file = "pyzmq-27.1.0-cp314-cp314t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1779be8c549e54a1c38f805e56d2a2e5c009d26de10921d7d51cfd1c8d4632ea"}, + {file = "pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7200bb0f03345515df50d99d3db206a0a6bee1955fbb8c453c76f5bf0e08fb96"}, + {file = "pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01c0e07d558b06a60773744ea6251f769cd79a41a97d11b8bf4ab8f034b0424d"}, + {file = "pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:80d834abee71f65253c91540445d37c4c561e293ba6e741b992f20a105d69146"}, + {file = "pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:544b4e3b7198dde4a62b8ff6685e9802a9a1ebf47e77478a5eb88eca2a82f2fd"}, + {file = "pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cedc4c68178e59a4046f97eca31b148ddcf51e88677de1ef4e78cf06c5376c9a"}, + {file = "pyzmq-27.1.0-cp314-cp314t-win32.whl", hash = "sha256:1f0b2a577fd770aa6f053211a55d1c47901f4d537389a034c690291485e5fe92"}, + {file = "pyzmq-27.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:19c9468ae0437f8074af379e986c5d3d7d7bfe033506af442e8c879732bedbe0"}, + {file = "pyzmq-27.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dc5dbf68a7857b59473f7df42650c621d7e8923fb03fa74a526890f4d33cc4d7"}, + {file = "pyzmq-27.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:18339186c0ed0ce5835f2656cdfb32203125917711af64da64dbaa3d949e5a1b"}, + {file = "pyzmq-27.1.0-cp38-cp38-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:753d56fba8f70962cd8295fb3edb40b9b16deaa882dd2b5a3a2039f9ff7625aa"}, + {file = "pyzmq-27.1.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b721c05d932e5ad9ff9344f708c96b9e1a485418c6618d765fca95d4daacfbef"}, + {file = "pyzmq-27.1.0-cp38-cp38-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be883ff3d722e6085ee3f4afc057a50f7f2e0c72d289fd54df5706b4e3d3a50"}, + {file = "pyzmq-27.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b2e592db3a93128daf567de9650a2f3859017b3f7a66bc4ed6e4779d6034976f"}, + {file = "pyzmq-27.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad68808a61cbfbbae7ba26d6233f2a4aa3b221de379ce9ee468aa7a83b9c36b0"}, + {file = "pyzmq-27.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e2687c2d230e8d8584fbea433c24382edfeda0c60627aca3446aa5e58d5d1831"}, + {file = "pyzmq-27.1.0-cp38-cp38-win32.whl", hash = "sha256:a1aa0ee920fb3825d6c825ae3f6c508403b905b698b6460408ebd5bb04bbb312"}, + {file = "pyzmq-27.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:df7cd397ece96cf20a76fae705d40efbab217d217897a5053267cd88a700c266"}, + {file = "pyzmq-27.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:96c71c32fff75957db6ae33cd961439f386505c6e6b377370af9b24a1ef9eafb"}, + {file = "pyzmq-27.1.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:49d3980544447f6bd2968b6ac913ab963a49dcaa2d4a2990041f16057b04c429"}, + {file = "pyzmq-27.1.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:849ca054d81aa1c175c49484afaaa5db0622092b5eccb2055f9f3bb8f703782d"}, + {file = "pyzmq-27.1.0-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3970778e74cb7f85934d2b926b9900e92bfe597e62267d7499acc39c9c28e345"}, + {file = "pyzmq-27.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:da96ecdcf7d3919c3be2de91a8c513c186f6762aa6cf7c01087ed74fad7f0968"}, + {file = "pyzmq-27.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9541c444cfe1b1c0156c5c86ece2bb926c7079a18e7b47b0b1b3b1b875e5d098"}, + {file = "pyzmq-27.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e30a74a39b93e2e1591b58eb1acef4902be27c957a8720b0e368f579b82dc22f"}, + {file = "pyzmq-27.1.0-cp39-cp39-win32.whl", hash = "sha256:b1267823d72d1e40701dcba7edc45fd17f71be1285557b7fe668887150a14b78"}, + {file = "pyzmq-27.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0c996ded912812a2fcd7ab6574f4ad3edc27cb6510349431e4930d4196ade7db"}, + {file = "pyzmq-27.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:346e9ba4198177a07e7706050f35d733e08c1c1f8ceacd5eb6389d653579ffbc"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c17e03cbc9312bee223864f1a2b13a99522e0dc9f7c5df0177cd45210ac286e6"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f328d01128373cb6763823b2b4e7f73bdf767834268c565151eacb3b7a392f90"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c1790386614232e1b3a40a958454bdd42c6d1811837b15ddbb052a032a43f62"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:448f9cb54eb0cee4732b46584f2710c8bc178b0e5371d9e4fc8125201e413a74"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:05b12f2d32112bf8c95ef2e74ec4f1d4beb01f8b5e703b38537f8849f92cb9ba"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:50081a4e98472ba9f5a02850014b4c9b629da6710f8f14f3b15897c666a28f1b"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:510869f9df36ab97f89f4cff9d002a89ac554c7ac9cadd87d444aa4cf66abd27"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1f8426a01b1c4098a750973c37131cf585f61c7911d735f729935a0c701b68d3"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726b6a502f2e34c6d2ada5e702929586d3ac948a4dbbb7fed9854ec8c0466027"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:bd67e7c8f4654bef471c0b1ca6614af0b5202a790723a58b79d9584dc8022a78"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:722ea791aa233ac0a819fc2c475e1292c76930b31f1d828cb61073e2fe5e208f"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:01f9437501886d3a1dd4b02ef59fb8cc384fa718ce066d52f175ee49dd5b7ed8"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4a19387a3dddcc762bfd2f570d14e2395b2c9701329b266f83dd87a2b3cbd381"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c618fbcd069e3a29dcd221739cacde52edcc681f041907867e0f5cc7e85f172"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff8d114d14ac671d88c89b9224c63d6c4e5a613fe8acd5594ce53d752a3aafe9"}, + {file = "pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540"}, ] [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} -[[package]] -name = "qtconsole" -version = "5.5.1" -description = "Jupyter Qt console" -category = "dev" -optional = false -python-versions = ">= 3.8" -files = [ - {file = "qtconsole-5.5.1-py3-none-any.whl", hash = "sha256:8c75fa3e9b4ed884880ff7cea90a1b67451219279ec33deaee1d59e3df1a5d2b"}, - {file = "qtconsole-5.5.1.tar.gz", hash = "sha256:a0e806c6951db9490628e4df80caec9669b65149c7ba40f9bf033c025a5b56bc"}, -] - -[package.dependencies] -ipykernel = ">=4.1" -jupyter-client = ">=4.1" -jupyter-core = "*" -packaging = "*" -pygments = "*" -pyzmq = ">=17.1" -qtpy = ">=2.4.0" -traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" - -[package.extras] -doc = ["Sphinx (>=1.3)"] -test = ["flaky", "pytest", "pytest-qt"] - -[[package]] -name = "qtpy" -version = "2.4.1" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b"}, - {file = "QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] - [[package]] name = "redis" -version = "5.0.1" +version = "5.3.1" description = "Python client for Redis database and key-value store" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, - {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, + {file = "redis-5.3.1-py3-none-any.whl", hash = "sha256:dc1909bd24669cc31b5f67a039700b16ec30571096c5f1f0d9d2324bff31af97"}, + {file = "redis-5.3.1.tar.gz", hash = "sha256:ca49577a531ea64039b5a36db3d6cd1a0c7a60c34124d46924a45b956e8cf14c"}, ] [package.dependencies] -async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} +PyJWT = ">=2.9.0" [package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] [[package]] name = "referencing" -version = "0.33.0" +version = "0.37.0" description = "JSON Referencing + Python" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main", "dev"] files = [ - {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, - {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, + {file = "referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231"}, + {file = "referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8"}, ] [package.dependencies] attrs = ">=22.2.0" rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "requests" -version = "2.31.0" +version = "2.32.5" description = "Python HTTP for Humans." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -3424,9 +4355,9 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, @@ -3439,274 +4370,337 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, ] +[[package]] +name = "rfc3987-syntax" +version = "1.1.0" +description = "Helper functions to syntactically validate strings according to RFC 3987." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "rfc3987_syntax-1.1.0-py3-none-any.whl", hash = "sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f"}, + {file = "rfc3987_syntax-1.1.0.tar.gz", hash = "sha256:717a62cbf33cffdd16dfa3a497d81ce48a660ea691b1ddd7be710c22f00b4a0d"}, +] + +[package.dependencies] +lark = ">=1.2.2" + +[package.extras] +testing = ["pytest (>=8.3.5)"] + [[package]] name = "rpds-py" -version = "0.18.0" +version = "0.28.0" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, - {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, - {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, - {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, - {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, - {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, - {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, - {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, - {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, - {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, - {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, - {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, - {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, - {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, - {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, - {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, - {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, - {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, - {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, - {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, - {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "rpds_py-0.28.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7b6013db815417eeb56b2d9d7324e64fcd4fa289caeee6e7a78b2e11fc9b438a"}, + {file = "rpds_py-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a4c6b05c685c0c03f80dabaeb73e74218c49deea965ca63f76a752807397207"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4794c6c3fbe8f9ac87699b131a1f26e7b4abcf6d828da46a3a52648c7930eba"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e8456b6ee5527112ff2354dd9087b030e3429e43a74f480d4a5ca79d269fd85"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:beb880a9ca0a117415f241f66d56025c02037f7c4efc6fe59b5b8454f1eaa50d"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6897bebb118c44b38c9cb62a178e09f1593c949391b9a1a6fe777ccab5934ee7"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b553dd06e875249fd43efd727785efb57a53180e0fde321468222eabbeaafa"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:f0b2044fdddeea5b05df832e50d2a06fe61023acb44d76978e1b060206a8a476"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05cf1e74900e8da73fa08cc76c74a03345e5a3e37691d07cfe2092d7d8e27b04"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:efd489fec7c311dae25e94fe7eeda4b3d06be71c68f2cf2e8ef990ffcd2cd7e8"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ada7754a10faacd4f26067e62de52d6af93b6d9542f0df73c57b9771eb3ba9c4"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c2a34fd26588949e1e7977cfcbb17a9a42c948c100cab890c6d8d823f0586457"}, + {file = "rpds_py-0.28.0-cp310-cp310-win32.whl", hash = "sha256:f9174471d6920cbc5e82a7822de8dfd4dcea86eb828b04fc8c6519a77b0ee51e"}, + {file = "rpds_py-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:6e32dd207e2c4f8475257a3540ab8a93eff997abfa0a3fdb287cae0d6cd874b8"}, + {file = "rpds_py-0.28.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:03065002fd2e287725d95fbc69688e0c6daf6c6314ba38bdbaa3895418e09296"}, + {file = "rpds_py-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28ea02215f262b6d078daec0b45344c89e161eab9526b0d898221d96fdda5f27"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dbade8fbf30bcc551cb352376c0ad64b067e4fc56f90e22ba70c3ce205988c"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c03002f54cc855860bfdc3442928ffdca9081e73b5b382ed0b9e8efe6e5e205"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9699fa7990368b22032baf2b2dce1f634388e4ffc03dfefaaac79f4695edc95"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9b06fe1a75e05e0713f06ea0c89ecb6452210fd60e2f1b6ddc1067b990e08d9"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9f83e7b326a3f9ec3ef84cda98fb0a74c7159f33e692032233046e7fd15da2"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:0d3259ea9ad8743a75a43eb7819324cdab393263c91be86e2d1901ee65c314e0"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a7548b345f66f6695943b4ef6afe33ccd3f1b638bd9afd0f730dd255c249c9e"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9a40040aa388b037eb39416710fbcce9443498d2eaab0b9b45ae988b53f5c67"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f60c7ea34e78c199acd0d3cda37a99be2c861dd2b8cf67399784f70c9f8e57d"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1571ae4292649100d743b26d5f9c63503bb1fedf538a8f29a98dce2d5ba6b4e6"}, + {file = "rpds_py-0.28.0-cp311-cp311-win32.whl", hash = "sha256:5cfa9af45e7c1140af7321fa0bef25b386ee9faa8928c80dc3a5360971a29e8c"}, + {file = "rpds_py-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd8d86b5d29d1b74100982424ba53e56033dc47720a6de9ba0259cf81d7cecaa"}, + {file = "rpds_py-0.28.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e27d3a5709cc2b3e013bf93679a849213c79ae0573f9b894b284b55e729e120"}, + {file = "rpds_py-0.28.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6b4f28583a4f247ff60cd7bdda83db8c3f5b05a7a82ff20dd4b078571747708f"}, + {file = "rpds_py-0.28.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d678e91b610c29c4b3d52a2c148b641df2b4676ffe47c59f6388d58b99cdc424"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e819e0e37a44a78e1383bf1970076e2ccc4dc8c2bbaa2f9bd1dc987e9afff628"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5ee514e0f0523db5d3fb171f397c54875dbbd69760a414dccf9d4d7ad628b5bd"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3fa06d27fdcee47f07a39e02862da0100cb4982508f5ead53ec533cd5fe55e"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46959ef2e64f9e4a41fc89aa20dbca2b85531f9a72c21099a3360f35d10b0d5a"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8455933b4bcd6e83fde3fefc987a023389c4b13f9a58c8d23e4b3f6d13f78c84"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:ad50614a02c8c2962feebe6012b52f9802deec4263946cddea37aaf28dd25a66"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5deca01b271492553fdb6c7fd974659dce736a15bae5dad7ab8b93555bceb28"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:735f8495a13159ce6a0d533f01e8674cec0c57038c920495f87dcb20b3ddb48a"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:961ca621ff10d198bbe6ba4957decca61aa2a0c56695384c1d6b79bf61436df5"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2374e16cc9131022e7d9a8f8d65d261d9ba55048c78f3b6e017971a4f5e6353c"}, + {file = "rpds_py-0.28.0-cp312-cp312-win32.whl", hash = "sha256:d15431e334fba488b081d47f30f091e5d03c18527c325386091f31718952fe08"}, + {file = "rpds_py-0.28.0-cp312-cp312-win_amd64.whl", hash = "sha256:a410542d61fc54710f750d3764380b53bf09e8c4edbf2f9141a82aa774a04f7c"}, + {file = "rpds_py-0.28.0-cp312-cp312-win_arm64.whl", hash = "sha256:1f0cfd1c69e2d14f8c892b893997fa9a60d890a0c8a603e88dca4955f26d1edd"}, + {file = "rpds_py-0.28.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e9e184408a0297086f880556b6168fa927d677716f83d3472ea333b42171ee3b"}, + {file = "rpds_py-0.28.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:edd267266a9b0448f33dc465a97cfc5d467594b600fe28e7fa2f36450e03053a"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85beb8b3f45e4e32f6802fb6cd6b17f615ef6c6a52f265371fb916fae02814aa"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d2412be8d00a1b895f8ad827cc2116455196e20ed994bb704bf138fe91a42724"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf128350d384b777da0e68796afdcebc2e9f63f0e9f242217754e647f6d32491"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2036d09b363aa36695d1cc1a97b36865597f4478470b0697b5ee9403f4fe399"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8e1e9be4fa6305a16be628959188e4fd5cd6f1b0e724d63c6d8b2a8adf74ea6"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0a403460c9dd91a7f23fc3188de6d8977f1d9603a351d5db6cf20aaea95b538d"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d7366b6553cdc805abcc512b849a519167db8f5e5c3472010cd1228b224265cb"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b43c6a3726efd50f18d8120ec0551241c38785b68952d240c45ea553912ac41"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0cb7203c7bc69d7c1585ebb33a2e6074492d2fc21ad28a7b9d40457ac2a51ab7"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7a52a5169c664dfb495882adc75c304ae1d50df552fbd68e100fdc719dee4ff9"}, + {file = "rpds_py-0.28.0-cp313-cp313-win32.whl", hash = "sha256:2e42456917b6687215b3e606ab46aa6bca040c77af7df9a08a6dcfe8a4d10ca5"}, + {file = "rpds_py-0.28.0-cp313-cp313-win_amd64.whl", hash = "sha256:e0a0311caedc8069d68fc2bf4c9019b58a2d5ce3cd7cb656c845f1615b577e1e"}, + {file = "rpds_py-0.28.0-cp313-cp313-win_arm64.whl", hash = "sha256:04c1b207ab8b581108801528d59ad80aa83bb170b35b0ddffb29c20e411acdc1"}, + {file = "rpds_py-0.28.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f296ea3054e11fc58ad42e850e8b75c62d9a93a9f981ad04b2e5ae7d2186ff9c"}, + {file = "rpds_py-0.28.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5a7306c19b19005ad98468fcefeb7100b19c79fc23a5f24a12e06d91181193fa"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5d9b86aa501fed9862a443c5c3116f6ead8bc9296185f369277c42542bd646b"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e5bbc701eff140ba0e872691d573b3d5d30059ea26e5785acba9132d10c8c31d"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5690671cd672a45aa8616d7374fdf334a1b9c04a0cac3c854b1136e92374fe"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f1d92ecea4fa12f978a367c32a5375a1982834649cdb96539dcdc12e609ab1a"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d252db6b1a78d0a3928b6190156042d54c93660ce4d98290d7b16b5296fb7cc"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d61b355c3275acb825f8777d6c4505f42b5007e357af500939d4a35b19177259"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:acbe5e8b1026c0c580d0321c8aae4b0a1e1676861d48d6e8c6586625055b606a"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8aa23b6f0fc59b85b4c7d89ba2965af274346f738e8d9fc2455763602e62fd5f"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7b14b0c680286958817c22d76fcbca4800ddacef6f678f3a7c79a1fe7067fe37"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bcf1d210dfee61a6c86551d67ee1031899c0fdbae88b2d44a569995d43797712"}, + {file = "rpds_py-0.28.0-cp313-cp313t-win32.whl", hash = "sha256:3aa4dc0fdab4a7029ac63959a3ccf4ed605fee048ba67ce89ca3168da34a1342"}, + {file = "rpds_py-0.28.0-cp313-cp313t-win_amd64.whl", hash = "sha256:7b7d9d83c942855e4fdcfa75d4f96f6b9e272d42fffcb72cd4bb2577db2e2907"}, + {file = "rpds_py-0.28.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:dcdcb890b3ada98a03f9f2bb108489cdc7580176cb73b4f2d789e9a1dac1d472"}, + {file = "rpds_py-0.28.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f274f56a926ba2dc02976ca5b11c32855cbd5925534e57cfe1fda64e04d1add2"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fe0438ac4a29a520ea94c8c7f1754cdd8feb1bc490dfda1bfd990072363d527"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a358a32dd3ae50e933347889b6af9a1bdf207ba5d1a3f34e1a38cd3540e6733"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e80848a71c78aa328fefaba9c244d588a342c8e03bda518447b624ea64d1ff56"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f586db2e209d54fe177e58e0bc4946bea5fb0102f150b1b2f13de03e1f0976f8"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae8ee156d6b586e4292491e885d41483136ab994e719a13458055bec14cf370"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:a805e9b3973f7e27f7cab63a6b4f61d90f2e5557cff73b6e97cd5b8540276d3d"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5d3fd16b6dc89c73a4da0b4ac8b12a7ecc75b2864b95c9e5afed8003cb50a728"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6796079e5d24fdaba6d49bda28e2c47347e89834678f2bc2c1b4fc1489c0fb01"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:76500820c2af232435cbe215e3324c75b950a027134e044423f59f5b9a1ba515"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bbdc5640900a7dbf9dd707fe6388972f5bbd883633eb68b76591044cfe346f7e"}, + {file = "rpds_py-0.28.0-cp314-cp314-win32.whl", hash = "sha256:adc8aa88486857d2b35d75f0640b949759f79dc105f50aa2c27816b2e0dd749f"}, + {file = "rpds_py-0.28.0-cp314-cp314-win_amd64.whl", hash = "sha256:66e6fa8e075b58946e76a78e69e1a124a21d9a48a5b4766d15ba5b06869d1fa1"}, + {file = "rpds_py-0.28.0-cp314-cp314-win_arm64.whl", hash = "sha256:a6fe887c2c5c59413353b7c0caff25d0e566623501ccfff88957fa438a69377d"}, + {file = "rpds_py-0.28.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7a69df082db13c7070f7b8b1f155fa9e687f1d6aefb7b0e3f7231653b79a067b"}, + {file = "rpds_py-0.28.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b1cde22f2c30ebb049a9e74c5374994157b9b70a16147d332f89c99c5960737a"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5338742f6ba7a51012ea470bd4dc600a8c713c0c72adaa0977a1b1f4327d6592"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1460ebde1bcf6d496d80b191d854adedcc619f84ff17dc1c6d550f58c9efbba"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3eb248f2feba84c692579257a043a7699e28a77d86c77b032c1d9fbb3f0219c"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3bbba5def70b16cd1c1d7255666aad3b290fbf8d0fe7f9f91abafb73611a91"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3114f4db69ac5a1f32e7e4d1cbbe7c8f9cf8217f78e6e002cedf2d54c2a548ed"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:4b0cb8a906b1a0196b863d460c0222fb8ad0f34041568da5620f9799b83ccf0b"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf681ac76a60b667106141e11a92a3330890257e6f559ca995fbb5265160b56e"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1e8ee6413cfc677ce8898d9cde18cc3a60fc2ba756b0dec5b71eb6eb21c49fa1"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b3072b16904d0b5572a15eb9d31c1954e0d3227a585fc1351aa9878729099d6c"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b670c30fd87a6aec281c3c9896d3bae4b205fd75d79d06dc87c2503717e46092"}, + {file = "rpds_py-0.28.0-cp314-cp314t-win32.whl", hash = "sha256:8014045a15b4d2b3476f0a287fcc93d4f823472d7d1308d47884ecac9e612be3"}, + {file = "rpds_py-0.28.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7a4e59c90d9c27c561eb3160323634a9ff50b04e4f7820600a2beb0ac90db578"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f5e7101145427087e493b9c9b959da68d357c28c562792300dd21a095118ed16"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:31eb671150b9c62409a888850aaa8e6533635704fe2b78335f9aaf7ff81eec4d"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b55c1f64482f7d8bd39942f376bfdf2f6aec637ee8c805b5041e14eeb771db"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24743a7b372e9a76171f6b69c01aedf927e8ac3e16c474d9fe20d552a8cb45c7"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:389c29045ee8bbb1627ea190b4976a310a295559eaf9f1464a1a6f2bf84dde78"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23690b5827e643150cf7b49569679ec13fe9a610a15949ed48b85eb7f98f34ec"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f0c9266c26580e7243ad0d72fc3e01d6b33866cfab5084a6da7576bcf1c4f72"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4c6c4db5d73d179746951486df97fd25e92396be07fc29ee8ff9a8f5afbdfb27"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3b695a8fa799dd2cfdb4804b37096c5f6dba1ac7f48a7fbf6d0485bcd060316"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6aa1bfce3f83baf00d9c5fcdbba93a3ab79958b4c7d7d1f55e7fe68c20e63912"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b0f9dceb221792b3ee6acb5438eb1f02b0cb2c247796a72b016dcc92c6de829"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5d0145edba8abd3db0ab22b5300c99dc152f5c9021fab861be0f0544dc3cbc5f"}, + {file = "rpds_py-0.28.0.tar.gz", hash = "sha256:abd4df20485a0983e2ca334a216249b6186d6e3c1627e106651943dbdb791aea"}, ] [[package]] name = "semantic-version" version = "2.10.0" description = "A library implementing the 'SemVer' scheme." -category = "main" optional = false python-versions = ">=2.7" +groups = ["main"] files = [ {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] [package.extras] -dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "flake8", "nose2", "readme-renderer (<25.0)", "tox", "wheel", "zest.releaser[recommended]"] +dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1) ; python_version == \"3.4\"", "coverage", "flake8", "nose2", "readme-renderer (<25.0) ; python_version == \"3.4\"", "tox", "wheel", "zest.releaser[recommended]"] doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "send2trash" -version = "1.8.2" +version = "1.8.3" description = "Send file to trash natively under Mac OS X, Windows and Linux" -category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["dev"] files = [ - {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, - {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, + {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, + {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, ] [package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] +nativelib = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\"", "pywin32 ; sys_platform == \"win32\""] +objc = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\""] +win32 = ["pywin32 ; sys_platform == \"win32\""] + +[[package]] +name = "setuptools" +version = "80.9.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" +version = "3.0.1" +description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" +groups = ["dev"] files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, + {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, + {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, ] [[package]] name = "soupsieve" -version = "2.5" +version = "2.8" description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c"}, + {file = "soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f"}, ] [[package]] name = "sqlalchemy" -version = "2.0.27" +version = "2.0.44" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, - {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, - {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.44-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:471733aabb2e4848d609141a9e9d56a427c0a038f4abf65dd19d7a21fd563632"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48bf7d383a35e668b984c805470518b635d48b95a3c57cb03f37eaa3551b5f9f"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf4bb6b3d6228fcf3a71b50231199fb94d2dd2611b66d33be0578ea3e6c2726"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:e998cf7c29473bd077704cea3577d23123094311f59bdc4af551923b168332b1"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ebac3f0b5732014a126b43c2b7567f2f0e0afea7d9119a3378bde46d3dcad88e"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-win32.whl", hash = "sha256:3255d821ee91bdf824795e936642bbf43a4c7cedf5d1aed8d24524e66843aa74"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-win_amd64.whl", hash = "sha256:78e6c137ba35476adb5432103ae1534f2f5295605201d946a4198a0dea4b38e7"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c77f3080674fc529b1bd99489378c7f63fcb4ba7f8322b79732e0258f0ea3ce"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26ef74ba842d61635b0152763d057c8d48215d5be9bb8b7604116a059e9985"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a172b31785e2f00780eccab00bc240ccdbfdb8345f1e6063175b3ff12ad1b0"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9480c0740aabd8cb29c329b422fb65358049840b34aba0adf63162371d2a96e"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17835885016b9e4d0135720160db3095dc78c583e7b902b6be799fb21035e749"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cbe4f85f50c656d753890f39468fcd8190c5f08282caf19219f684225bfd5fd2"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-win32.whl", hash = "sha256:2fcc4901a86ed81dc76703f3b93ff881e08761c63263c46991081fd7f034b165"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-win_amd64.whl", hash = "sha256:9919e77403a483ab81e3423151e8ffc9dd992c20d2603bf17e4a8161111e55f5"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fc44e5965ea46909a416fff0af48a219faefd5773ab79e5f8a5fcd5d62b2667"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dc8b3850d2a601ca2320d081874033684e246d28e1c5e89db0864077cfc8f5a9"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d733dec0614bb8f4bcb7c8af88172b974f685a31dc3a65cca0527e3120de5606"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22be14009339b8bc16d6b9dc8780bacaba3402aa7581658e246114abbd2236e3"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:357bade0e46064f88f2c3a99808233e67b0051cdddf82992379559322dfeb183"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4848395d932e93c1595e59a8672aa7400e8922c39bb9b0668ed99ac6fa867822"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-win32.whl", hash = "sha256:2f19644f27c76f07e10603580a47278abb2a70311136a7f8fd27dc2e096b9013"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-win_amd64.whl", hash = "sha256:1df4763760d1de0dfc8192cc96d8aa293eb1a44f8f7a5fbe74caf1b551905c5e"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7027414f2b88992877573ab780c19ecb54d3a536bef3397933573d6b5068be4"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fe166c7d00912e8c10d3a9a0ce105569a31a3d0db1a6e82c4e0f4bf16d5eca9"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3caef1ff89b1caefc28f0368b3bde21a7e3e630c2eddac16abd9e47bd27cc36a"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc2856d24afa44295735e72f3c75d6ee7fdd4336d8d3a8f3d44de7aa6b766df2"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:11bac86b0deada30b6b5f93382712ff0e911fe8d31cb9bf46e6b149ae175eff0"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d18cd0e9a0f37c9f4088e50e3839fcb69a380a0ec957408e0b57cff08ee0a26"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-win32.whl", hash = "sha256:9e9018544ab07614d591a26c1bd4293ddf40752cc435caf69196740516af7100"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-win_amd64.whl", hash = "sha256:8e0e4e66fd80f277a8c3de016a81a554e76ccf6b8d881ee0b53200305a8433f6"}, + {file = "sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05"}, + {file = "sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = ">=1", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] @@ -3716,7 +4710,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -3729,9 +4723,9 @@ sqlcipher = ["sqlcipher3_binary"] name = "sqlalchemy-json" version = "0.7.0" description = "JSON type with nested change tracking for SQLAlchemy" -category = "main" optional = false python-versions = ">= 3.6" +groups = ["main"] files = [ {file = "sqlalchemy-json-0.7.0.tar.gz", hash = "sha256:620d0b26f648f21a8fa9127df66f55f83a5ab4ae010e5397a5c6989a08238561"}, {file = "sqlalchemy_json-0.7.0-py3-none-any.whl", hash = "sha256:27881d662ca18363a4ac28175cc47ea2a6f2bef997ae1159c151026b741818e6"}, @@ -3747,9 +4741,9 @@ dev = ["pytest"] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -3765,14 +4759,14 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "terminado" -version = "0.18.0" +version = "0.18.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "terminado-0.18.0-py3-none-any.whl", hash = "sha256:87b0d96642d0fe5f5abd7783857b9cab167f221a39ff98e3b9619a788a3c0f2e"}, - {file = "terminado-0.18.0.tar.gz", hash = "sha256:1ea08a89b835dd1b8c0c900d92848147cef2537243361b2e3f4dc15df9b6fded"}, + {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, + {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, ] [package.dependencies] @@ -3787,14 +4781,14 @@ typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] [[package]] name = "tinycss2" -version = "1.2.1" +version = "1.4.0" description = "A tiny CSS parser" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, - {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, + {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, + {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, ] [package.dependencies] @@ -3802,88 +4796,130 @@ webencodings = ">=0.4" [package.extras] doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] +test = ["pytest", "ruff"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.3.0" description = "A lil' TOML parser" -category = "dev" optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, +] +markers = {main = "python_version == \"3.10\""} [[package]] name = "tomlkit" -version = "0.12.3" +version = "0.13.3" description = "Style preserving TOML library" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, - {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, + {file = "tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0"}, + {file = "tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1"}, ] [[package]] name = "tornado" -version = "6.4" +version = "6.5.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, - {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, - {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, - {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, - {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, + {file = "tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6"}, + {file = "tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef"}, + {file = "tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e"}, + {file = "tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882"}, + {file = "tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108"}, + {file = "tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c"}, + {file = "tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4"}, + {file = "tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04"}, + {file = "tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0"}, + {file = "tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f"}, + {file = "tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af"}, + {file = "tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0"}, ] [[package]] name = "traitlets" -version = "5.14.1" +version = "5.14.3" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, - {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "types-python-dateutil" -version = "2.8.19.20240106" +version = "2.9.0.20251108" description = "Typing stubs for python-dateutil" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, - {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, + {file = "types_python_dateutil-2.9.0.20251108-py3-none-any.whl", hash = "sha256:a4a537f0ea7126f8ccc2763eec9aa31ac8609e3c8e530eb2ddc5ee234b3cd764"}, + {file = "types_python_dateutil-2.9.0.20251108.tar.gz", hash = "sha256:d8a6687e197f2fa71779ce36176c666841f811368710ab8d274b876424ebfcaa"}, ] [[package]] name = "types-requests" version = "2.31.0.6" description = "Typing stubs for requests" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, @@ -3896,9 +4932,9 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, @@ -3906,47 +4942,47 @@ files = [ [[package]] name = "types-waitress" -version = "2.1.4.20240106" +version = "2.1.4.20240421" description = "Typing stubs for waitress" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "types-waitress-2.1.4.20240106.tar.gz", hash = "sha256:65a7240a0771032b2aa073d09f63020aa594c7d84e05b6fefe354ef6f2c47fc2"}, - {file = "types_waitress-2.1.4.20240106-py3-none-any.whl", hash = "sha256:0a608efb7769cff76affa2c9173e5081be95b5dc137677e43fbd826bbf333fe4"}, + {file = "types-waitress-2.1.4.20240421.tar.gz", hash = "sha256:3f961b452865979ba6a09dd3ea79bcce1cfee685a01aad03766c4f9d564651c6"}, + {file = "types_waitress-2.1.4.20240421-py3-none-any.whl", hash = "sha256:0c2d39265e096add609f4d8085f1bf1721e0a91a602a1f0a9187f3f8f3a2a328"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] name = "tzdata" -version = "2024.1" +version = "2025.2" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" +groups = ["dev"] files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] [[package]] name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, @@ -3957,28 +4993,28 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main", "dev"] files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "validators-0.20.0.tar.gz", hash = "sha256:24148ce4e64100a2d5e267233e23e7afeb55316b47d30faae7eb6e7292bc226a"}, ] @@ -3993,9 +5029,9 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "waitress" version = "2.1.2" description = "Waitress WSGI server" -category = "main" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "waitress-2.1.2-py3-none-any.whl", hash = "sha256:7500c9625927c8ec60f54377d590f67b30c8e70ef4b8894214ac6e4cad233d2a"}, {file = "waitress-2.1.2.tar.gz", hash = "sha256:780a4082c5fbc0fde6a2fcfe5e26e6efc1e8f425730863c04085769781f51eba"}, @@ -4007,41 +5043,42 @@ testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] [[package]] name = "watchdog" -version = "4.0.0" +version = "6.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false -python-versions = ">=3.8" -files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, + {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, + {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, + {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, + {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, ] [package.extras] @@ -4049,39 +5086,35 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.14" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" +groups = ["dev"] files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, + {file = "wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1"}, + {file = "wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605"}, ] [[package]] name = "webcolors" -version = "1.13" +version = "25.10.0" description = "A library for working with the color formats defined by HTML and CSS." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, - {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, + {file = "webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d"}, + {file = "webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf"}, ] -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["pytest", "pytest-cov"] - [[package]] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -4089,31 +5122,31 @@ files = [ [[package]] name = "websocket-client" -version = "1.7.0" +version = "1.9.0" description = "WebSocket client for Python with low level API options" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, + {file = "websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef"}, + {file = "websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98"}, ] [package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx_rtd_theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] -test = ["websockets"] +test = ["pytest", "websockets"] [[package]] name = "werkzeug" -version = "3.0.1" +version = "3.1.3" description = "The comprehensive WSGI web application library." -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, - {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, ] [package.dependencies] @@ -4124,113 +5157,273 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "widgetsnbextension" -version = "4.0.10" +version = "4.0.15" description = "Jupyter interactive widgets for Jupyter Notebook" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "widgetsnbextension-4.0.10-py3-none-any.whl", hash = "sha256:d37c3724ec32d8c48400a435ecfa7d3e259995201fbefa37163124a9fcb393cc"}, - {file = "widgetsnbextension-4.0.10.tar.gz", hash = "sha256:64196c5ff3b9a9183a8e699a4227fb0b7002f252c814098e66c4d1cd0644688f"}, + {file = "widgetsnbextension-4.0.15-py3-none-any.whl", hash = "sha256:8156704e4346a571d9ce73b84bee86a29906c9abfd7223b7228a28899ccf3366"}, + {file = "widgetsnbextension-4.0.15.tar.gz", hash = "sha256:de8610639996f1567952d763a5a41af8af37f2575a41f9852a38f947eb82a3b9"}, ] [[package]] name = "wrapt" -version = "1.16.0" +version = "1.17.3" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"}, + {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"}, + {file = "wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c"}, + {file = "wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775"}, + {file = "wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd"}, + {file = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05"}, + {file = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418"}, + {file = "wrapt-1.17.3-cp310-cp310-win32.whl", hash = "sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390"}, + {file = "wrapt-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6"}, + {file = "wrapt-1.17.3-cp310-cp310-win_arm64.whl", hash = "sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f"}, + {file = "wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311"}, + {file = "wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1"}, + {file = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5"}, + {file = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2"}, + {file = "wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89"}, + {file = "wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77"}, + {file = "wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd"}, + {file = "wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828"}, + {file = "wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9"}, + {file = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396"}, + {file = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc"}, + {file = "wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe"}, + {file = "wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c"}, + {file = "wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7"}, + {file = "wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277"}, + {file = "wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d"}, + {file = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa"}, + {file = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050"}, + {file = "wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8"}, + {file = "wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb"}, + {file = "wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c"}, + {file = "wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b"}, + {file = "wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa"}, + {file = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7"}, + {file = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4"}, + {file = "wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10"}, + {file = "wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6"}, + {file = "wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454"}, + {file = "wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e"}, + {file = "wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f"}, + {file = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056"}, + {file = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804"}, + {file = "wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977"}, + {file = "wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116"}, + {file = "wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:70d86fa5197b8947a2fa70260b48e400bf2ccacdcab97bb7de47e3d1e6312225"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:df7d30371a2accfe4013e90445f6388c570f103d61019b6b7c57e0265250072a"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:caea3e9c79d5f0d2c6d9ab96111601797ea5da8e6d0723f77eabb0d4068d2b2f"}, + {file = "wrapt-1.17.3-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:758895b01d546812d1f42204bd443b8c433c44d090248bf22689df673ccafe00"}, + {file = "wrapt-1.17.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02b551d101f31694fc785e58e0720ef7d9a10c4e62c1c9358ce6f63f23e30a56"}, + {file = "wrapt-1.17.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:656873859b3b50eeebe6db8b1455e99d90c26ab058db8e427046dbc35c3140a5"}, + {file = "wrapt-1.17.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a9a2203361a6e6404f80b99234fe7fb37d1fc73487b5a78dc1aa5b97201e0f22"}, + {file = "wrapt-1.17.3-cp38-cp38-win32.whl", hash = "sha256:55cbbc356c2842f39bcc553cf695932e8b30e30e797f961860afb308e6b1bb7c"}, + {file = "wrapt-1.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad85e269fe54d506b240d2d7b9f5f2057c2aa9a2ea5b32c66f8902f768117ed2"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:30ce38e66630599e1193798285706903110d4f057aab3168a34b7fdc85569afc"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65d1d00fbfb3ea5f20add88bbc0f815150dbbde3b026e6c24759466c8b5a9ef9"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7c06742645f914f26c7f1fa47b8bc4c91d222f76ee20116c43d5ef0912bba2d"}, + {file = "wrapt-1.17.3-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7e18f01b0c3e4a07fe6dfdb00e29049ba17eadbc5e7609a2a3a4af83ab7d710a"}, + {file = "wrapt-1.17.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f5f51a6466667a5a356e6381d362d259125b57f059103dd9fdc8c0cf1d14139"}, + {file = "wrapt-1.17.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:59923aa12d0157f6b82d686c3fd8e1166fa8cdfb3e17b42ce3b6147ff81528df"}, + {file = "wrapt-1.17.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:46acc57b331e0b3bcb3e1ca3b421d65637915cfcd65eb783cb2f78a511193f9b"}, + {file = "wrapt-1.17.3-cp39-cp39-win32.whl", hash = "sha256:3e62d15d3cfa26e3d0788094de7b64efa75f3a53875cdbccdf78547aed547a81"}, + {file = "wrapt-1.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:1f23fa283f51c890eda8e34e4937079114c74b4c81d2b2f1f1d94948f5cc3d7f"}, + {file = "wrapt-1.17.3-cp39-cp39-win_arm64.whl", hash = "sha256:24c2ed34dc222ed754247a2702b1e1e89fdbaa4016f324b4b8f1a802d4ffe87f"}, + {file = "wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22"}, + {file = "wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0"}, +] + +[[package]] +name = "yarl" +version = "1.22.0" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e"}, + {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f"}, + {file = "yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467"}, + {file = "yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea"}, + {file = "yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca"}, + {file = "yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e"}, + {file = "yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca"}, + {file = "yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b"}, + {file = "yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520"}, + {file = "yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8"}, + {file = "yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c"}, + {file = "yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67"}, + {file = "yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95"}, + {file = "yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d"}, + {file = "yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62"}, + {file = "yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03"}, + {file = "yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249"}, + {file = "yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da"}, + {file = "yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2"}, + {file = "yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79"}, + {file = "yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c"}, + {file = "yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e"}, + {file = "yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27"}, + {file = "yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8"}, + {file = "yarl-1.22.0-cp39-cp39-win32.whl", hash = "sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b"}, + {file = "yarl-1.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed"}, + {file = "yarl-1.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2"}, + {file = "yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff"}, + {file = "yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71"}, ] +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + [[package]] name = "zipp" -version = "3.17.0" +version = "3.23.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.10" -content-hash = "cbed924469a5f0bbc91cec5bd6f054f02434a02bcc481059fffab9481583adc0" +content-hash = "ea530ff343914cbd5b4bdb672bbafd114d85a99e2901c58a57820f13599520b8" diff --git a/pyproject.toml b/pyproject.toml index e1dffbd8..e8cd533f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,6 @@ homepage = "https://pypi.org/project/pyfairdatatools" documentation = "https://pyfairdatatools.readthedocs.io" repository = "https://github.com/AI-READI/pyfairdatatools" - keywords = [] classifiers = [ "Development Status :: 1 - Planning", @@ -36,8 +35,9 @@ Flask-Cors = "^4.0.0" flask-restx = "^1.1.0" waitress = "^2.1.2" -# Email +# Email flask-mail = "^0.9.1" +flask-mailman = "^1.0.0" # API Client requests = "^2.31.0" @@ -77,10 +77,12 @@ growthbook = "^1.0.0" pyfairdatatools = "0.1.3" # Dashboard ETL -pandas = "^2.2.0" numpy = "^1.26.4" pycap = "^2.6.0" azure-storage-blob = "^12.19.1" +azure-communication-email = "^1.0.0" +azure-storage-file-datalake = "^12.20.0" +polars = "1.35.2" [tool.poetry.group.dev.dependencies] @@ -171,7 +173,6 @@ quiet = true profile = "black" [tool.mypy] - ignore_missing_imports = true no_implicit_optional = true check_untyped_defs = true diff --git a/sql/init.sql b/sql/init.sql index 824179be..ce042e38 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -386,23 +386,23 @@ INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; --- Dumping structure for table public.invited_study_contributor -CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( +-- Dumping structure for table public.invite +CREATE TABLE IF NOT EXISTS "invite" ( "email_address" VARCHAR NOT NULL, "permission" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, "invited_on" BIGINT NOT NULL, PRIMARY KEY ("email_address", "study_id"), - CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "invite_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); --- Dumping data for table public.invited_study_contributor: 3 rows -/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; -INSERT INTO "invited_study_contributor" ("email_address", "permission", "study_id", "invited_on") VALUES +-- Dumping data for table public.invite: 3 rows +/*!40000 ALTER TABLE "invite" DISABLE KEYS */; +INSERT INTO "invite" ("email_address", "permission", "study_id", "invited_on") VALUES ('Aliya_Herman@yahoo.com', 'editor', '00000000-0000-0000-0000-000000000001', 1693805470), ('Anastacio50@hotmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), ('Edward0@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470); -/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; +/*!40000 ALTER TABLE "invite" ENABLE KEYS */; -- Dumping structure for table public.participant CREATE TABLE IF NOT EXISTS "participant" ( diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index bda50aa5..b38ac176 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -438,24 +438,24 @@ INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; --- Dumping structure for table public.invited_study_contributor -CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( +-- Dumping structure for table public.invite +CREATE TABLE IF NOT EXISTS "invite" ( "email_address" VARCHAR NOT NULL, "permission" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, "invited_on" BIGINT NOT NULL, PRIMARY KEY ("email_address", "study_id"), - CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "invite_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); --- Dumping data for table public.invited_study_contributor: -1 rows -/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; -INSERT INTO "invited_study_contributor" ("email_address", "permission", "study_id", "invited_on") VALUES +-- Dumping data for table public.invite: -1 rows +/*!40000 ALTER TABLE "invite" DISABLE KEYS */; +INSERT INTO "invite" ("email_address", "permission", "study_id", "invited_on") VALUES ('Aliya_Herman@yahoo.com', 'editor', '00000000-0000-0000-0000-000000000001', 1693805470), ('Anastacio50@hotmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), ('Edward0@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), ('Jailyn17@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000002', 1693805470); -/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; +/*!40000 ALTER TABLE "invite" ENABLE KEYS */; -- Dumping structure for table public.participant CREATE TABLE IF NOT EXISTS "participant" ( diff --git a/sql/specific_tables.sql b/sql/specific_tables.sql index 4147a2e5..ca90c0e8 100644 --- a/sql/specific_tables.sql +++ b/sql/specific_tables.sql @@ -46,7 +46,7 @@ INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); -INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES +INSERT INTO "invite" ("email_address", "permission", "invited_on", "study_id") VALUES ('aydan.gasimova@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), ('bhavesh.patel@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000003'), ('sanjay.soundarajan@@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000004'); diff --git a/templates/accept_general_invitation.html b/templates/accept_general_invitation.html new file mode 100644 index 00000000..68e3731f --- /dev/null +++ b/templates/accept_general_invitation.html @@ -0,0 +1,61 @@ + + + + + Invitation + + + +
+ +

Hello!

+
+

Congratulations! You're one step away from joining fairhub.io and discovering a whole new experience tailored just for you.

+

Please click the button to signup.

+ Signup + +

Or see here: + + {{ accept_url }} + +

+ +

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance. +

+ + +
+ + +
+ + + \ No newline at end of file diff --git a/templates/accept_study_invitation.html b/templates/accept_study_invitation.html new file mode 100644 index 00000000..d7aeecc0 --- /dev/null +++ b/templates/accept_study_invitation.html @@ -0,0 +1,57 @@ + + + + Invitation + + + +
+
+

Hello!

+
+

We are pleased to grant you {{ 'a' if role == "viewer" else "an"}} {{ role }} access to {{ study_name }}!

+

Your authorization enables you to delve into this specialized + study area and explore its curated resources.

+

Please click the button to accept the invitation.

+ Accept Invitation and Signup + +

Or see here: + + {{ accept_url }} + +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+
+ diff --git a/templates/device_notification.html b/templates/device_notification.html new file mode 100644 index 00000000..a7e13890 --- /dev/null +++ b/templates/device_notification.html @@ -0,0 +1,61 @@ + + + + + Email verification + + + + +
+ +

Hello!

+
+

We detected you have logged in from {{ device_ip }}

+

If you do not recall this operation please click to change your password

+ Change Your Password +

+ Or see here: + http://localhost:3000/studies +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+ + + \ No newline at end of file diff --git a/templates/email_verification.html b/templates/email_verification.html new file mode 100644 index 00000000..eb575352 --- /dev/null +++ b/templates/email_verification.html @@ -0,0 +1,55 @@ + + + + + Email verification + + + + +
+ +
+

Hello {{ email}}!

+ +
+

Please verify your email address by clicking the button below:

+ Verify Email Address + +

Or see here: + + {{ verification_url }} +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+ + + \ No newline at end of file diff --git a/templates/forgot_password.html b/templates/forgot_password.html new file mode 100644 index 00000000..a5fc61ab --- /dev/null +++ b/templates/forgot_password.html @@ -0,0 +1,56 @@ + + + + Invitation + + + +
+
+ +
+

Hello {{ ' ' ~ email if email else '' }}!

+

Your password reset request has been received.

+

Please click the button to reset your password.

+ Reset password + +

Or use the following link: + + {{ forgot_password_ }} + +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+
+ diff --git a/templates/invite_contributors.html b/templates/invite_contributors.html new file mode 100644 index 00000000..853b3bf6 --- /dev/null +++ b/templates/invite_contributors.html @@ -0,0 +1,67 @@ + + + + + Invitation + + + + +
+ +

Hello {{ ' ' ~ first_name if first_name else '' }}{{ ' ' ~ last_name if first_name else '' }}!

+
+

We are pleased to grant you {{ 'a' if role == "viewer" else "an"}} {{ role }} access to {{ study_name }}!

+

Your authorization enables you to delve into this specialized + study area and explore its curated resources.

+

Please click the button to accept the invitation:

+ View Study + +

Or see here: + + {{ accept_url }} + +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at + help@fairhub.io + for prompt assistance. +

+
+
Sincerely,
+
FAIRhub team
+
+ + + \ No newline at end of file diff --git a/templates/reset_password.html b/templates/reset_password.html new file mode 100644 index 00000000..d31bb31a --- /dev/null +++ b/templates/reset_password.html @@ -0,0 +1,56 @@ + + + + Invitation + + + +
+
+ +
+

Hello {{ ' ' ~ email if email else '' }}!

+

We wanted to inform you that your password has been successfully changed.

+

If you did not make this change, please click here to reset your password immediately!

+ Reset password + +

Or see here: + + {{ reset_password_ }} + +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+
+ diff --git a/tests/conftest.py b/tests/conftest.py index 96c63dff..84370211 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -109,6 +109,9 @@ pytest.global_viewer_token = "" +pytest.global_reset_token = "" + + # Create the flask app for testing @pytest.fixture(scope="session") def flask_app(): @@ -150,10 +153,26 @@ def _create_user(_test_client): "code": "", }, ) - assert response.status_code == 201 +# @pytest.fixture() +# def _verified_client(flask_app): +# """Verify the user for testing.""" +# +# with flask_app.test_client() as _test_client: +# response = _test_client.post( +# "/auth/email-verification/confirm", +# json={ +# "email": "test@fairhub.io", +# "token": 1234567, +# }, +# ) +# assert response.status_code == 201 +# response.close() +# yield _test_client + + # Fixture to sign in the user for module testing @pytest.fixture(scope="session") def _logged_in_client(flask_app): @@ -186,7 +205,6 @@ def _test_invite_study_contributor(_logged_in_client): assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_editor_token = response_data["token"] response = _logged_in_client.post( @@ -255,10 +273,45 @@ def _create_viewer_user(flask_app): "code": pytest.global_viewer_token, }, ) - assert response.status_code == 201 +# @pytest.fixture(scope="session") +# def _user_verification_for_testing(flask_app): +# """Create a viewer user for testing.""" +# with flask_app.test_client() as _test_client: +# with unittest.mock.patch("pytest_config.TestConfig", TestConfig): +# a_response = _test_client.post( +# "/auth/email-verification/confirm", +# json={ +# "email": "admin@fairhub.io", +# "token": 1234567, +# }, +# ) +# +# assert a_response.status_code == 201 +# +# e_response = _test_client.post( +# "/auth/email-verification/confirm", +# json={ +# "email": "editor@fairhub.io", +# "token": 1234567, +# }, +# ) +# +# assert e_response.status_code == 201 +# +# v_response = _test_client.post( +# "/auth/email-verification/confirm", +# json={ +# "email": "viewer@fairhub.io", +# "token": 1234567, +# }, +# ) +# +# assert v_response.status_code == 201 + + @pytest.fixture(scope="session") def clients(flask_app): """Signs in all clients needed for testing""" @@ -269,6 +322,7 @@ def clients(flask_app): _admin_client = flask_app.test_client() _editor_client = flask_app.test_client() _viewer_client = flask_app.test_client() + # _test_client = flask_app.test_client() with unittest.mock.patch("pytest_config.TestConfig", TestConfig): response = _logged_in_client.post( @@ -280,32 +334,38 @@ def clients(flask_app): ) assert response.status_code == 200 - response = _admin_client.post( + a_response = _admin_client.post( "/auth/login", json={ "email_address": "admin@fairhub.io", "password": "Testingyeshello11!", }, ) - assert response.status_code == 200 + assert a_response.status_code == 200 - response = _editor_client.post( + e_response = _editor_client.post( "/auth/login", json={ "email_address": "editor@fairhub.io", "password": "Testingyeshello11!", }, ) - assert response.status_code == 200 + assert e_response.status_code == 200 - response = _viewer_client.post( + v_response = _viewer_client.post( "/auth/login", json={ "email_address": "viewer@fairhub.io", "password": "Testingyeshello11!", }, ) - assert response.status_code == 200 + assert v_response.status_code == 200 + + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == "session": + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 5 yield _logged_in_client, _admin_client, _editor_client, _viewer_client diff --git a/tests/functional/test_server_launch.py b/tests/functional/test_010_server_launch.py similarity index 88% rename from tests/functional/test_server_launch.py rename to tests/functional/test_010_server_launch.py index c97fe78f..66c23759 100644 --- a/tests/functional/test_server_launch.py +++ b/tests/functional/test_010_server_launch.py @@ -27,6 +27,11 @@ def test_db_empty(_test_client, _empty_db, _create_user): print("User created for testing") +# def test_verify_user(_verified_client): +# """Verifies user before testing.""" +# print("User verified for testing") + + def test_signin_user(_logged_in_client): """Signs in user before testing.""" print("User signed in for testing") diff --git a/tests/functional/test_study_api.py b/tests/functional/test_020_study_api.py similarity index 80% rename from tests/functional/test_study_api.py rename to tests/functional/test_020_study_api.py index 0c4257fc..9dd94c85 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_020_study_api.py @@ -17,7 +17,8 @@ def test_post_study(_logged_in_client): json={ "title": "Study Title", "image": "https://api.dicebear.com/6.x/adventurer/svg", - "acronym": "acronym", + "short_description": "short_description", + "clinical_id": "NCT06002048", }, ) @@ -26,7 +27,7 @@ def test_post_study(_logged_in_client): assert response_data["title"] == "Study Title" assert response_data["image"] == "https://api.dicebear.com/6.x/adventurer/svg" - assert response_data["acronym"] == "acronym" + assert response_data["short_description"] == "short_description" pytest.global_study_id = response_data @@ -50,6 +51,11 @@ def test_viewer_editor_user(_create_viewer_user): print("Viewer user created for testing") +# def test_user_verification(_user_verification_for_testing): +# """User are verified for testing""" +# print("Users are verified for testing") + + def test_signin_all_clients(clients): """Signs in all clients for verifying permissions before testing continues.""" _logged_in_client, _admin_client, _editor_client, _viewer_client = clients @@ -93,26 +99,26 @@ def test_update_study(clients): f"/study/{study_id}", json={ "title": "Study Title Updated", - "image": pytest.global_study_id["image"], # type: ignore - "acronym": pytest.global_study_id["acronym"], # type: ignore + "short_description": pytest.global_study_id["short_description"], # type: ignore + "clinical_id": "NCT06002048", + "is_overwrite": False, }, ) - assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_study_id = response_data assert response_data["title"] == "Study Title Updated" - assert response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert response_data["id"] == pytest.global_study_id["id"] # type: ignore admin_response = _admin_client.put( f"/study/{study_id}", json={ "title": "Admin Study Title", - "image": pytest.global_study_id["image"], # type: ignore - "acronym": pytest.global_study_id["acronym"], # type: ignore + "short_description": pytest.global_study_id["short_description"], # type: ignore + "clinical_id": "NCT06002048", + "is_overwrite": False, }, ) @@ -121,16 +127,16 @@ def test_update_study(clients): pytest.global_study_id = admin_response_data assert admin_response_data["title"] == "Admin Study Title" - assert admin_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore - assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert admin_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore editor_response = _editor_client.put( f"/study/{study_id}", json={ "title": "Editor Study Title", - "image": pytest.global_study_id["image"], # type: ignore - "acronym": pytest.global_study_id["acronym"], # type: ignore + "short_description": pytest.global_study_id["short_description"], # type: ignore + "clinical_id": "NCT06002048", + "is_overwrite": False, }, ) @@ -139,16 +145,16 @@ def test_update_study(clients): pytest.global_study_id = editor_response_data assert editor_response_data["title"] == "Editor Study Title" - assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert editor_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert editor_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore viewer_response = _viewer_client.put( f"/study/{study_id}", json={ "title": "Viewer Study Title", - "image": pytest.global_study_id["image"], # type: ignore - "acronym": pytest.global_study_id["acronym"], # type: ignore + "short_description": pytest.global_study_id["short_description"], # type: ignore + "clinical_id": "NCT06002048", + "is_overwrite": False, }, ) @@ -185,22 +191,22 @@ def test_get_study_by_id(clients): assert response_data["id"] == pytest.global_study_id["id"] # type: ignore assert response_data["title"] == pytest.global_study_id["title"] # type: ignore assert response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert admin_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert admin_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert admin_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert editor_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert editor_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert editor_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert viewer_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert viewer_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert viewer_response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert viewer_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert viewer_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore def test_delete_studies_created(clients): @@ -218,10 +224,10 @@ def test_delete_studies_created(clients): json={ "title": "Delete Me", "image": "https://api.dicebear.com/6.x/adventurer/svg", - "acronym": "acronym", + "short_description": "short_description", + "clinical_id": "NCT06002048", }, ) - assert response.status_code == 201 response_data = json.loads(response.data) study_id = response_data["id"] diff --git a/tests/functional/test_study_dataset_api.py b/tests/functional/test_030_study_dataset_api.py similarity index 100% rename from tests/functional/test_study_dataset_api.py rename to tests/functional/test_030_study_dataset_api.py diff --git a/tests/functional/test_040_study_dataset_metadata_api.py b/tests/functional/test_040_study_dataset_metadata_api.py new file mode 100644 index 00000000..a309b84b --- /dev/null +++ b/tests/functional/test_040_study_dataset_metadata_api.py @@ -0,0 +1,3704 @@ +# pylint: disable=too-many-lines +"""Tests for the Dataset's Metadata API endpoints""" +import json +from time import sleep + +import pytest + + +# ------------------- ACCESS-RIGHTS METADATA ------------------- # +def test_post_dataset_access_rights_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/access-rights' endpoint is requested (PUT) + Then check that the response is valid and updates the dataset access metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", + json={ + "access": { + "type": "type", + "description": "description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], + }, + ) + + response_data = json.loads(response.data) + assert response.status_code == 200 + pytest.global_dataset_rights_id = response_data["rights"][0]["id"] + + assert response_data["access"]["type"] == "type" + assert response_data["access"]["description"] == "description" + assert response_data["access"]["url"] == "google.com" + assert response_data["access"]["url_last_checked"] == 123 + + assert response_data["rights"][0]["identifier"] == "Identifier" + assert response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert response_data["rights"][0]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data["rights"][0]["rights"] == "Rights" + assert response_data["rights"][0]["uri"] == "URI" + assert response_data["rights"][0]["license_text"] == "license text" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", + json={ + "access": { + "type": "admin type", + "description": "admin description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Admin Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_rights_id_admin = admin_response_data["rights"][0]["id"] + + assert admin_response_data["access"]["type"] == "admin type" + assert admin_response_data["access"]["description"] == "admin description" + assert admin_response_data["access"]["url"] == "google.com" + assert admin_response_data["access"]["url_last_checked"] == 123 + + assert admin_response_data["rights"][0]["identifier"] == "Admin Identifier" + assert admin_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + admin_response_data["rights"][0]["identifier_scheme_uri"] == "Identifier Scheme" + ) + assert admin_response_data["rights"][0]["rights"] == "Rights" + assert admin_response_data["rights"][0]["uri"] == "URI" + assert admin_response_data["rights"][0]["license_text"] == "license text" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", + json={ + "access": { + "type": "editor type", + "description": "editor description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Editor Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_rights_id_editor = editor_response_data["rights"][0]["id"] + + assert editor_response_data["access"]["type"] == "editor type" + assert editor_response_data["access"]["description"] == "editor description" + assert editor_response_data["access"]["url"] == "google.com" + assert editor_response_data["access"]["url_last_checked"] == 123 + + assert editor_response_data["rights"][0]["identifier"] == "Editor Identifier" + assert editor_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + editor_response_data["rights"][0]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert editor_response_data["rights"][0]["rights"] == "Rights" + assert editor_response_data["rights"][0]["uri"] == "URI" + assert editor_response_data["rights"][0]["license_text"] == "license text" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", + json={ + "access": { + "type": "viewer type", + "description": "viewer description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Viewer Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_access_rights_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset access metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + # Since editor was the last successful PUT request, the response data should match + assert response_data["access"]["type"] == "editor type" + assert response_data["access"]["description"] == "editor description" + assert response_data["access"]["url"] == "google.com" + assert response_data["access"]["url_last_checked"] == 123 + ### + assert response_data["rights"][0]["identifier"] == "Identifier" + assert response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert response_data["rights"][0]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data["rights"][0]["rights"] == "Rights" + assert response_data["rights"][0]["uri"] == "URI" + assert response_data["rights"][0]["license_text"] == "license text" + + assert admin_response_data["rights"][0]["identifier"] == "Identifier" + assert admin_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + admin_response_data["rights"][0]["identifier_scheme_uri"] == "Identifier Scheme" + ) + assert admin_response_data["rights"][0]["rights"] == "Rights" + assert admin_response_data["rights"][0]["uri"] == "URI" + assert admin_response_data["rights"][0]["license_text"] == "license text" + + assert editor_response_data["rights"][0]["identifier"] == "Identifier" + assert editor_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + editor_response_data["rights"][0]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert editor_response_data["rights"][0]["rights"] == "Rights" + assert editor_response_data["rights"][0]["uri"] == "URI" + assert editor_response_data["rights"][0]["license_text"] == "license text" + + assert response_data["rights"][1]["identifier"] == "Admin Identifier" + assert response_data["rights"][1]["identifier_scheme"] == "Identifier Scheme" + assert response_data["rights"][1]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data["rights"][1]["rights"] == "Rights" + assert response_data["rights"][1]["uri"] == "URI" + assert response_data["rights"][1]["license_text"] == "license text" + + assert admin_response_data["rights"][1]["identifier"] == "Admin Identifier" + assert admin_response_data["rights"][1]["identifier_scheme"] == "Identifier Scheme" + assert ( + admin_response_data["rights"][1]["identifier_scheme_uri"] == "Identifier Scheme" + ) + assert admin_response_data["rights"][1]["rights"] == "Rights" + assert admin_response_data["rights"][1]["uri"] == "URI" + assert admin_response_data["rights"][1]["license_text"] == "license text" + + assert editor_response_data["rights"][1]["identifier"] == "Admin Identifier" + assert editor_response_data["rights"][1]["identifier_scheme"] == "Identifier Scheme" + assert ( + editor_response_data["rights"][1]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert editor_response_data["rights"][1]["rights"] == "Rights" + assert editor_response_data["rights"][1]["uri"] == "URI" + assert editor_response_data["rights"][1]["license_text"] == "license text" + + assert response_data["rights"][2]["identifier"] == "Editor Identifier" + assert response_data["rights"][2]["identifier_scheme"] == "Identifier Scheme" + assert response_data["rights"][2]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data["rights"][2]["rights"] == "Rights" + assert response_data["rights"][2]["uri"] == "URI" + assert response_data["rights"][2]["license_text"] == "license text" + + assert admin_response_data["rights"][2]["identifier"] == "Editor Identifier" + assert admin_response_data["rights"][2]["identifier_scheme"] == "Identifier Scheme" + assert ( + admin_response_data["rights"][2]["identifier_scheme_uri"] == "Identifier Scheme" + ) + assert admin_response_data["rights"][2]["rights"] == "Rights" + assert admin_response_data["rights"][2]["uri"] == "URI" + assert admin_response_data["rights"][2]["license_text"] == "license text" + + assert editor_response_data["rights"][2]["identifier"] == "Editor Identifier" + assert editor_response_data["rights"][2]["identifier_scheme"] == "Identifier Scheme" + assert ( + editor_response_data["rights"][2]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert editor_response_data["rights"][2]["rights"] == "Rights" + assert editor_response_data["rights"][2]["uri"] == "URI" + assert editor_response_data["rights"][2]["license_text"] == "license text" + + assert viewer_response_data["rights"][0]["identifier"] == "Identifier" + assert viewer_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + viewer_response_data["rights"][0]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert viewer_response_data["rights"][0]["rights"] == "Rights" + assert viewer_response_data["rights"][0]["uri"] == "URI" + assert viewer_response_data["rights"][0]["license_text"] == "license text" + + assert viewer_response_data["rights"][1]["identifier"] == "Admin Identifier" + assert viewer_response_data["rights"][1]["identifier_scheme"] == "Identifier Scheme" + assert ( + viewer_response_data["rights"][1]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert viewer_response_data["rights"][1]["rights"] == "Rights" + assert viewer_response_data["rights"][1]["uri"] == "URI" + assert viewer_response_data["rights"][1]["license_text"] == "license text" + + assert viewer_response_data["rights"][2]["identifier"] == "Editor Identifier" + assert viewer_response_data["rights"][2]["identifier_scheme"] == "Identifier Scheme" + assert ( + viewer_response_data["rights"][2]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert viewer_response_data["rights"][2]["rights"] == "Rights" + assert viewer_response_data["rights"][2]["uri"] == "URI" + assert viewer_response_data["rights"][2]["license_text"] == "license text" + ####### + assert admin_response_data["access"]["type"] == "editor type" + assert admin_response_data["access"]["description"] == "editor description" + assert admin_response_data["access"]["url"] == "google.com" + assert admin_response_data["access"]["url_last_checked"] == 123 + + assert editor_response_data["access"]["type"] == "editor type" + assert editor_response_data["access"]["description"] == "editor description" + assert editor_response_data["access"]["url"] == "google.com" + assert editor_response_data["access"]["url_last_checked"] == 123 + + assert viewer_response_data["access"]["type"] == "editor type" + assert viewer_response_data["access"]["description"] == "editor description" + assert viewer_response_data["access"]["url"] == "google.com" + assert viewer_response_data["access"]["url_last_checked"] == 123 + + +# ------------------- GENERAL INFORMATION METADATA ------------------- # +def test_post_dataset_general_information_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/general-description' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + general information metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", + json={ + "titles": [{"title": "Owner Title", "type": "Subtitle"}], + "descriptions": [{"description": "Owner Description", "type": "Methods"}], + "dates": [{"date": 20210101, "type": "Accepted", "information": "Info"}], + }, + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + pytest.global_dataset_title_id = response_data["titles"][0]["id"] + pytest.global_dataset_description_id = response_data["descriptions"][0]["id"] + pytest.global_dataset_date_id = response_data["dates"][0]["id"] + + assert response_data["titles"][0]["title"] == "Owner Title" + assert response_data["titles"][0]["type"] == "Subtitle" + assert response_data["descriptions"][0]["description"] == "Owner Description" + assert response_data["descriptions"][0]["type"] == "Methods" + + assert response_data["dates"][0]["date"] == 20210101 + assert response_data["dates"][0]["type"] == "Accepted" + assert response_data["dates"][0]["information"] == "Info" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", + json={ + "titles": [{"title": "Admin Title", "type": "Subtitle"}], + "descriptions": [{"description": "Admin Description", "type": "Methods"}], + "dates": [{"date": 20210102, "type": "Accepted", "information": "Info"}], + }, + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + # assert admin_response.status_code == 200 + + admin_response_data = json.loads(admin_response.data) + + pytest.global_dataset_title_id_admin = admin_response_data["titles"][0]["id"] + pytest.global_dataset_description_id_admin = admin_response_data["descriptions"][0][ + "id" + ] + pytest.global_dataset_date_id_admin = admin_response_data["dates"][0]["id"] + + assert admin_response_data["titles"][0]["title"] == "Admin Title" + assert admin_response_data["titles"][0]["type"] == "Subtitle" + assert admin_response_data["descriptions"][0]["description"] == "Admin Description" + assert admin_response_data["descriptions"][0]["type"] == "Methods" + + assert admin_response_data["dates"][0]["date"] == 20210102 + assert admin_response_data["dates"][0]["type"] == "Accepted" + assert admin_response_data["dates"][0]["information"] == "Info" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", + json={ + "titles": [{"title": "Editor Title", "type": "Subtitle"}], + "descriptions": [{"description": "Editor Description", "type": "Methods"}], + "dates": [{"date": 20210103, "type": "Accepted", "information": "Info"}], + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_title_id_editor = editor_response_data["titles"][0]["id"] + pytest.global_dataset_description_id_editor = editor_response_data["descriptions"][ + 0 + ]["id"] + pytest.global_dataset_date_id_editor = editor_response_data["dates"][0]["id"] + + assert editor_response_data["titles"][0]["title"] == "Editor Title" + assert editor_response_data["titles"][0]["type"] == "Subtitle" + + assert ( + editor_response_data["descriptions"][0]["description"] == "Editor Description" + ) + assert editor_response_data["descriptions"][0]["type"] == "Methods" + + assert editor_response_data["dates"][0]["date"] == 20210103 + assert editor_response_data["dates"][0]["type"] == "Accepted" + assert editor_response_data["dates"][0]["information"] == "Info" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", + json={ + "titles": [{"title": "Viewer Title", "type": "Subtitle"}], + "descriptions": [{"description": "Viewer Description", "type": "Methods"}], + "dates": [{"date": 20210103, "type": "Accepted", "information": "Info"}], + }, + ) + + assert viewer_response.status_code == 403 + + +# ------------------- DELETE GENERAL INFORMATION METADATA ------------------- # +def test_get_dataset_general_information_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/general-information' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + general information metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + owner_titles = response_data["titles"] + owner_descriptions = response_data["descriptions"] + owner_dates = response_data["dates"] + + admin_titles = admin_response_data["titles"] + admin_descriptions = admin_response_data["descriptions"] + admin_dates = admin_response_data["dates"] + + editor_titles = editor_response_data["titles"] + editor_descriptions = editor_response_data["descriptions"] + editor_dates = editor_response_data["dates"] + + viewer_titles = viewer_response_data["titles"] + viewer_descriptions = viewer_response_data["descriptions"] + viewer_dates = viewer_response_data["dates"] + + assert len(owner_titles) == 4 + assert len(admin_titles) == 4 + assert len(editor_titles) == 4 + assert len(viewer_titles) == 4 + + # search for maintitle index + # pylint: disable=line-too-long + main_title = next( + (index for (index, d) in enumerate(owner_titles) if d["type"] == "MainTitle"), + None, + ) + a_main_title = next( + ( + index + for (index, d) in enumerate(admin_response_data["titles"]) + if d["type"] == "MainTitle" + ), + None, + ) + e_main_title = next( + (index for (index, d) in enumerate(editor_titles) if d["type"] == "MainTitle"), + None, + ) + v_main_title = next( + (index for (index, d) in enumerate(viewer_titles) if d["type"] == "MainTitle"), + None, + ) + # search for admin title index + admin_title = next( + ( + index + for (index, d) in enumerate(owner_titles) + if d["title"] == "Admin Title" + ), + None, + ) + a_admin_title = next( + ( + index + for (index, d) in enumerate(admin_titles) + if d["title"] == "Admin Title" + ), + None, + ) + e_admin_title = next( + ( + index + for (index, d) in enumerate(editor_titles) + if d["title"] == "Admin Title" + ), + None, + ) + v_admin_title = next( + ( + index + for (index, d) in enumerate(viewer_titles) + if d["title"] == "Admin Title" + ), + None, + ) + + # search for editor title index + editor_title = next( + ( + index + for (index, d) in enumerate(owner_titles) + if d["title"] == "Editor Title" + ), + None, + ) + a_editor_title = next( + ( + index + for (index, d) in enumerate(admin_titles) + if d["title"] == "Editor Title" + ), + None, + ) + e_editor_title = next( + ( + index + for (index, d) in enumerate(editor_titles) + if d["title"] == "Editor Title" + ), + None, + ) + v_editor_title = next( + ( + index + for (index, d) in enumerate(viewer_titles) + if d["title"] == "Editor Title" + ), + None, + ) + + # search for owner title index + own_title = next( + ( + index + for (index, d) in enumerate(owner_titles) + if d["title"] == "Owner Title" + ), + None, + ) + a_own_title = next( + ( + index + for (index, d) in enumerate(admin_titles) + if d["title"] == "Owner Title" + ), + None, + ) + e_own_title = next( + ( + index + for (index, d) in enumerate(editor_titles) + if d["title"] == "Owner Title" + ), + None, + ) + v_own_title = next( + ( + index + for (index, d) in enumerate(viewer_titles) + if d["title"] == "Owner Title" + ), + None, + ) + + assert owner_titles[main_title]["title"] == "Dataset Title" + assert owner_titles[main_title]["type"] == "MainTitle" + assert owner_titles[own_title]["title"] == "Owner Title" + assert owner_titles[own_title]["type"] == "Subtitle" + assert owner_titles[admin_title]["title"] == "Admin Title" + assert owner_titles[admin_title]["type"] == "Subtitle" + assert owner_titles[editor_title]["title"] == "Editor Title" + assert owner_titles[editor_title]["type"] == "Subtitle" + + assert admin_titles[a_main_title]["title"] == "Dataset Title" + assert admin_titles[a_main_title]["type"] == "MainTitle" + assert admin_titles[a_own_title]["title"] == "Owner Title" + assert admin_titles[a_own_title]["type"] == "Subtitle" + assert admin_titles[a_admin_title]["title"] == "Admin Title" + assert admin_titles[a_admin_title]["type"] == "Subtitle" + assert admin_titles[a_editor_title]["title"] == "Editor Title" + assert admin_titles[a_editor_title]["type"] == "Subtitle" + + assert editor_titles[e_main_title]["title"] == "Dataset Title" + assert editor_titles[e_main_title]["type"] == "MainTitle" + assert editor_titles[e_own_title]["title"] == "Owner Title" + assert editor_titles[e_own_title]["type"] == "Subtitle" + assert editor_titles[e_admin_title]["title"] == "Admin Title" + assert editor_titles[e_admin_title]["type"] == "Subtitle" + assert editor_titles[e_editor_title]["title"] == "Editor Title" + assert editor_titles[e_editor_title]["type"] == "Subtitle" + + assert viewer_titles[v_main_title]["title"] == "Dataset Title" + assert viewer_titles[v_main_title]["type"] == "MainTitle" + assert viewer_titles[v_own_title]["title"] == "Owner Title" + assert viewer_titles[v_own_title]["type"] == "Subtitle" + assert viewer_titles[v_admin_title]["title"] == "Admin Title" + assert viewer_titles[v_admin_title]["type"] == "Subtitle" + assert viewer_titles[v_editor_title]["title"] == "Editor Title" + assert viewer_titles[v_editor_title]["type"] == "Subtitle" + + assert len(owner_descriptions) == 4 + assert len(admin_descriptions) == 4 + assert len(editor_descriptions) == 4 + assert len(viewer_descriptions) == 4 + + # seacrch for type abstract index + main_descrip = next( + ( + index + for (index, d) in enumerate(owner_descriptions) + if d["type"] == "Abstract" + ), + None, + ) + a_main_descrip = next( + ( + index + for (index, d) in enumerate(admin_descriptions) + if d["type"] == "Abstract" + ), + None, + ) + e_main_descrip = next( + ( + index + for (index, d) in enumerate(editor_descriptions) + if d["type"] == "Abstract" + ), + None, + ) + v_main_descrip = next( + ( + index + for (index, d) in enumerate(viewer_descriptions) + if d["type"] == "Abstract" + ), + None, + ) + # search for owner description + # pylint: disable=line-too-long + own_descrip = next( + ( + index + for (index, d) in enumerate(owner_descriptions) + if d["description"] == "Owner Description" + ), + None, + ) + a_own_descrip = next( + ( + index + for (index, d) in enumerate(admin_descriptions) + if d["description"] == "Owner Description" + ), + None, + ) + e_own_descrip = next( + ( + index + for (index, d) in enumerate(editor_descriptions) + if d["description"] == "Owner Description" + ), + None, + ) + v_own_descrip = next( + ( + index + for (index, d) in enumerate(viewer_descriptions) + if d["description"] == "Owner Description" + ), + None, + ) + + # search for admin description + admin_descrip = next( + ( + index + for (index, d) in enumerate(owner_descriptions) + if d["description"] == "Admin Description" + ), + None, + ) + a_admin_descrip = next( + ( + index + for (index, d) in enumerate(admin_descriptions) + if d["description"] == "Admin Description" + ), + None, + ) + e_admin_descrip = next( + ( + index + for (index, d) in enumerate(editor_descriptions) + if d["description"] == "Admin Description" + ), + None, + ) + v_admin_descrip = next( + ( + index + for (index, d) in enumerate(viewer_descriptions) + if d["description"] == "Admin Description" + ), + None, + ) + + # search for editor description + edit_descrip = next( + ( + index + for (index, d) in enumerate(owner_descriptions) + if d["description"] == "Editor Description" + ), + None, + ) + a_edit_descrip = next( + ( + index + for (index, d) in enumerate(admin_descriptions) + if d["description"] == "Editor Description" + ), + None, + ) + e_edit_descrip = next( + ( + index + for (index, d) in enumerate(editor_descriptions) + if d["description"] == "Editor Description" + ), + None, + ) + v_edit_descrip = next( + ( + index + for (index, d) in enumerate(viewer_descriptions) + if d["description"] == "Editor Description" + ), + None, + ) + + assert owner_descriptions[main_descrip]["description"] == "Dataset Description" + assert owner_descriptions[main_descrip]["type"] == "Abstract" + assert owner_descriptions[own_descrip]["description"] == "Owner Description" + assert owner_descriptions[own_descrip]["type"] == "Methods" + assert owner_descriptions[admin_descrip]["description"] == "Admin Description" + assert owner_descriptions[admin_descrip]["type"] == "Methods" + assert owner_descriptions[edit_descrip]["description"] == "Editor Description" + assert owner_descriptions[edit_descrip]["type"] == "Methods" + + assert admin_descriptions[a_main_descrip]["description"] == "Dataset Description" + assert admin_descriptions[a_main_descrip]["type"] == "Abstract" + assert admin_descriptions[a_own_descrip]["description"] == "Owner Description" + assert admin_descriptions[a_own_descrip]["type"] == "Methods" + assert admin_descriptions[a_admin_descrip]["description"] == "Admin Description" + assert admin_descriptions[a_admin_descrip]["type"] == "Methods" + assert admin_descriptions[a_edit_descrip]["description"] == "Editor Description" + assert admin_descriptions[a_edit_descrip]["type"] == "Methods" + + assert editor_descriptions[e_main_descrip]["description"] == "Dataset Description" + assert editor_descriptions[e_main_descrip]["type"] == "Abstract" + assert editor_descriptions[e_own_descrip]["description"] == "Owner Description" + assert editor_descriptions[e_own_descrip]["type"] == "Methods" + assert editor_descriptions[e_admin_descrip]["description"] == "Admin Description" + assert editor_descriptions[e_admin_descrip]["type"] == "Methods" + assert editor_descriptions[e_edit_descrip]["description"] == "Editor Description" + assert editor_descriptions[e_edit_descrip]["type"] == "Methods" + + assert viewer_descriptions[v_main_descrip]["description"] == "Dataset Description" + assert viewer_descriptions[v_main_descrip]["type"] == "Abstract" + assert viewer_descriptions[v_own_descrip]["description"] == "Owner Description" + assert viewer_descriptions[v_own_descrip]["type"] == "Methods" + assert viewer_descriptions[v_admin_descrip]["description"] == "Admin Description" + assert viewer_descriptions[v_admin_descrip]["type"] == "Methods" + assert viewer_descriptions[v_edit_descrip]["description"] == "Editor Description" + assert viewer_descriptions[v_edit_descrip]["type"] == "Methods" + + assert len(owner_dates) == 3 + assert len(admin_dates) == 3 + assert len(editor_dates) == 3 + assert len(viewer_dates) == 3 + + assert owner_dates[0]["date"] == 20210101 + assert owner_dates[0]["type"] == "Accepted" + assert owner_dates[0]["information"] == "Info" + assert owner_dates[1]["date"] == 20210102 + assert owner_dates[1]["type"] == "Accepted" + assert owner_dates[1]["information"] == "Info" + assert owner_dates[2]["date"] == 20210103 + assert owner_dates[2]["type"] == "Accepted" + + assert admin_dates[0]["date"] == 20210101 + assert admin_dates[0]["type"] == "Accepted" + assert admin_dates[0]["information"] == "Info" + assert admin_dates[1]["date"] == 20210102 + assert admin_dates[1]["type"] == "Accepted" + assert admin_dates[1]["information"] == "Info" + assert admin_dates[2]["date"] == 20210103 + assert admin_dates[2]["type"] == "Accepted" + + assert editor_dates[0]["date"] == 20210101 + assert editor_dates[0]["type"] == "Accepted" + assert editor_dates[0]["information"] == "Info" + assert editor_dates[1]["date"] == 20210102 + assert editor_dates[1]["type"] == "Accepted" + assert editor_dates[1]["information"] == "Info" + assert editor_dates[2]["date"] == 20210103 + assert editor_dates[2]["type"] == "Accepted" + + assert viewer_dates[0]["date"] == 20210101 + assert viewer_dates[0]["type"] == "Accepted" + assert viewer_dates[0]["information"] == "Info" + assert viewer_dates[1]["date"] == 20210102 + assert viewer_dates[1]["type"] == "Accepted" + assert viewer_dates[1]["information"] == "Info" + assert viewer_dates[2]["date"] == 20210103 + assert viewer_dates[2]["type"] == "Accepted" + + +# ------------------- DELETE TITLE METADATA ------------------- # +def test_delete_dataset_title_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + title metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + title_id = pytest.global_dataset_title_id + admin_title_id = pytest.global_dataset_title_id_admin + editor_title_id = pytest.global_dataset_title_id_editor + + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{admin_title_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{editor_title_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- DELETE DESCRIPTION METADATA ------------------- # +def test_delete_dataset_description_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + description_id = pytest.global_dataset_description_id + admin_description_id = pytest.global_dataset_description_id_admin + editor_description_id = pytest.global_dataset_description_id_editor + + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{admin_description_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{editor_description_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- DELETE DATE METADATA ------------------- # +def test_delete_dataset_date_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset date metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + date_id = pytest.global_dataset_date_id + admin_date_id = pytest.global_dataset_date_id_admin + editor_date_id = pytest.global_dataset_date_id_editor + + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{admin_date_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{editor_date_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- DATASET TEAM METADATA ------------------- # +def test_post_dataset_team_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/team' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + team metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + creators = response_data["creators"] + contributors = response_data["contributors"] + funders = response_data["funders"] + managing_organization = response_data["managing_organization"] + + pytest.global_dataset_funder_id = funders[0]["id"] + pytest.global_dataset_creator_id = creators[0]["id"] + pytest.global_dataset_contributor_id = contributors[0]["id"] + + assert creators[0]["given_name"] == "Given Name here" + assert creators[0]["family_name"] == "Family Name here" + assert creators[0]["name_type"] == "Personal" + assert creators[0]["name_identifier"] == "Name identifier" + assert creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert creators[0]["creator"] is True + assert creators[0]["affiliations"][0]["name"] == "Test" + assert creators[0]["affiliations"][0]["identifier"] == "yes" + assert creators[0]["affiliations"][0]["scheme"] == "uh" + assert creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert funders[0]["name"] == "Name" + assert funders[0]["award_number"] == "award number" + assert funders[0]["award_title"] == "Award Title" + assert funders[0]["award_uri"] == "Award URI" + assert funders[0]["identifier"] == "Identifier" + assert funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert funders[0]["identifier_type"] == "Identifier Type" + + assert contributors[0]["given_name"] == "Given Name here" + assert contributors[0]["family_name"] == "Family Name here" + assert contributors[0]["name_type"] == "Personal" + assert contributors[0]["name_identifier"] == "Name identifier" + assert contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert contributors[0]["creator"] is False + assert contributors[0]["contributor_type"] == "Con Type" + assert contributors[0]["affiliations"][0]["name"] == "Test" + assert contributors[0]["affiliations"][0]["identifier"] == "yes" + assert contributors[0]["affiliations"][0]["scheme"] == "uh" + assert contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert managing_organization["name"] == "Managing Organization Name" + assert managing_organization["identifier"] == "identifier" + assert managing_organization["identifier_scheme"] == "identifier scheme" + assert managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Admin Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Admin Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "admin Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Admin Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + admin_creators = admin_response_data["creators"] + admin_managing_organization = admin_response_data["managing_organization"] + admin_funders = admin_response_data["funders"] + admin_contributors = admin_response_data["contributors"] + + pytest.global_dataset_funder_id_admin = admin_funders[0]["id"] + pytest.global_dataset_creator_id_admin = admin_creators[0]["id"] + pytest.global_dataset_contributor_id_admin = admin_contributors[0]["id"] + + assert admin_creators[0]["given_name"] == "Admin Given Name here" + assert admin_creators[0]["family_name"] == "Family Name here" + assert admin_creators[0]["name_type"] == "Personal" + assert admin_creators[0]["name_identifier"] == "Name identifier" + assert admin_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_creators[0]["creator"] is True + assert admin_creators[0]["affiliations"][0]["name"] == "Test" + assert admin_creators[0]["affiliations"][0]["identifier"] == "yes" + assert admin_creators[0]["affiliations"][0]["scheme"] == "uh" + assert admin_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_funders[0]["name"] == "Admin Name" + assert admin_funders[0]["award_number"] == "award number" + assert admin_funders[0]["award_title"] == "Award Title" + assert admin_funders[0]["award_uri"] == "Award URI" + assert admin_funders[0]["identifier"] == "Identifier" + assert admin_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_funders[0]["identifier_type"] == "Identifier Type" + + assert admin_contributors[0]["given_name"] == "Admin Given Name here" + assert admin_contributors[0]["family_name"] == "Family Name here" + assert admin_contributors[0]["name_type"] == "Personal" + assert admin_contributors[0]["name_identifier"] == "Name identifier" + assert admin_contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_contributors[0]["creator"] is False + assert admin_contributors[0]["contributor_type"] == "Con Type" + assert admin_contributors[0]["affiliations"][0]["name"] == "Test" + assert admin_contributors[0]["affiliations"][0]["identifier"] == "yes" + assert admin_contributors[0]["affiliations"][0]["scheme"] == "uh" + assert admin_contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_managing_organization["name"] == "admin Managing Organization Name" + assert admin_managing_organization["identifier"] == "identifier" + assert admin_managing_organization["identifier_scheme"] == "identifier scheme" + assert ( + admin_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + ) + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Editor Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Editor Given Name here", + "family_name": "Editor Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "editor Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Editor Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + editor_creators = editor_response_data["creators"] + editor_managing_organization = editor_response_data["managing_organization"] + editor_funders = editor_response_data["funders"] + editor_contributors = editor_response_data["contributors"] + + pytest.global_dataset_funder_id_editor = editor_funders[0]["id"] + pytest.global_dataset_creator_id_editor = editor_creators[0]["id"] + pytest.global_dataset_contributor_id_editor = editor_contributors[0]["id"] + + assert editor_creators[0]["given_name"] == "Editor Given Name here" + assert editor_creators[0]["family_name"] == "Family Name here" + assert editor_creators[0]["name_type"] == "Personal" + assert editor_creators[0]["name_identifier"] == "Name identifier" + assert editor_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_creators[0]["creator"] is True + assert editor_creators[0]["affiliations"][0]["name"] == "Test" + assert editor_creators[0]["affiliations"][0]["identifier"] == "yes" + assert editor_creators[0]["affiliations"][0]["scheme"] == "uh" + assert editor_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_contributors[0]["given_name"] == "Editor Given Name here" + assert editor_contributors[0]["family_name"] == "Editor Family Name here" + assert editor_contributors[0]["name_type"] == "Personal" + assert editor_contributors[0]["name_identifier"] == "Name identifier" + assert editor_contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_contributors[0]["creator"] is False + assert editor_contributors[0]["contributor_type"] == "Con Type" + assert editor_contributors[0]["affiliations"][0]["name"] == "Test" + assert editor_contributors[0]["affiliations"][0]["identifier"] == "yes" + assert editor_contributors[0]["affiliations"][0]["scheme"] == "uh" + assert editor_contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_funders[0]["name"] == "Editor Name" + assert editor_funders[0]["award_number"] == "award number" + assert editor_funders[0]["award_title"] == "Award Title" + assert editor_funders[0]["award_uri"] == "Award URI" + assert editor_funders[0]["identifier"] == "Identifier" + assert ( + editor_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + ) # pylint: disable=line-too-long + assert editor_funders[0]["identifier_type"] == "Identifier Type" + + assert editor_managing_organization["name"] == "editor Managing Organization Name" + assert editor_managing_organization["identifier"] == "identifier" + assert editor_managing_organization["identifier_scheme"] == "identifier scheme" + assert ( + editor_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + ) + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Viewer Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Viewer Given Name here", + "family_name": "Viewer Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "editor Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Viewer Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_team_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/team' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + team metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + creators = response_data["creators"] + contributors = response_data["contributors"] + funders = response_data["funders"] + managing_organization = response_data["managing_organization"] + + admin_creators = admin_response_data["creators"] + admin_managing_organization = admin_response_data["managing_organization"] + admin_funders = admin_response_data["funders"] + admin_contributors = admin_response_data["contributors"] + + editor_creators = editor_response_data["creators"] + editor_managing_organization = editor_response_data["managing_organization"] + editor_funders = editor_response_data["funders"] + editor_contributors = editor_response_data["contributors"] + + viewer_creators = viewer_response_data["creators"] + viewer_managing_organization = viewer_response_data["managing_organization"] + viewer_funders = viewer_response_data["funders"] + + assert len(funders) == 3 + assert len(admin_funders) == 3 + assert len(editor_funders) == 3 + assert len(viewer_funders) == 3 + + assert funders[0]["name"] == "Name" + assert funders[0]["award_number"] == "award number" + assert funders[0]["award_title"] == "Award Title" + assert funders[0]["award_uri"] == "Award URI" + assert funders[0]["identifier"] == "Identifier" + assert funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert funders[0]["identifier_type"] == "Identifier Type" + assert funders[1]["name"] == "Admin Name" + assert funders[1]["award_number"] == "award number" + assert funders[1]["award_title"] == "Award Title" + assert funders[1]["award_uri"] == "Award URI" + assert funders[1]["identifier"] == "Identifier" + assert funders[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert funders[1]["identifier_type"] == "Identifier Type" + assert funders[2]["name"] == "Editor Name" + assert funders[2]["award_number"] == "award number" + assert funders[2]["award_title"] == "Award Title" + assert funders[2]["award_uri"] == "Award URI" + assert funders[2]["identifier"] == "Identifier" + assert funders[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert funders[2]["identifier_type"] == "Identifier Type" + + assert admin_funders[0]["name"] == "Name" + assert admin_funders[0]["award_number"] == "award number" + assert admin_funders[0]["award_title"] == "Award Title" + assert admin_funders[0]["award_uri"] == "Award URI" + assert admin_funders[0]["identifier"] == "Identifier" + assert admin_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_funders[0]["identifier_type"] == "Identifier Type" + assert admin_funders[1]["name"] == "Admin Name" + assert admin_funders[1]["award_number"] == "award number" + assert admin_funders[1]["award_title"] == "Award Title" + assert admin_funders[1]["award_uri"] == "Award URI" + assert admin_funders[1]["identifier"] == "Identifier" + assert admin_funders[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_funders[1]["identifier_type"] == "Identifier Type" + assert admin_funders[2]["name"] == "Editor Name" + assert admin_funders[2]["award_number"] == "award number" + assert admin_funders[2]["award_title"] == "Award Title" + assert admin_funders[2]["award_uri"] == "Award URI" + assert admin_funders[2]["identifier"] == "Identifier" + assert admin_funders[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_funders[2]["identifier_type"] == "Identifier Type" + + assert editor_funders[0]["name"] == "Name" + assert editor_funders[0]["award_number"] == "award number" + assert editor_funders[0]["award_title"] == "Award Title" + assert editor_funders[0]["award_uri"] == "Award URI" + assert editor_funders[0]["identifier"] == "Identifier" + assert editor_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_funders[0]["identifier_type"] == "Identifier Type" + assert editor_funders[1]["name"] == "Admin Name" + assert editor_funders[1]["award_number"] == "award number" + assert editor_funders[1]["award_title"] == "Award Title" + assert editor_funders[1]["award_uri"] == "Award URI" + assert editor_funders[1]["identifier"] == "Identifier" + assert editor_funders[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_funders[1]["identifier_type"] == "Identifier Type" + assert editor_funders[2]["name"] == "Editor Name" + assert editor_funders[2]["award_number"] == "award number" + assert editor_funders[2]["award_title"] == "Award Title" + assert editor_funders[2]["award_uri"] == "Award URI" + assert editor_funders[2]["identifier"] == "Identifier" + assert editor_funders[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_funders[2]["identifier_type"] == "Identifier Type" + + assert viewer_funders[0]["name"] == "Name" + assert viewer_funders[0]["award_number"] == "award number" + assert viewer_funders[0]["award_title"] == "Award Title" + assert viewer_funders[0]["award_uri"] == "Award URI" + assert viewer_funders[0]["identifier"] == "Identifier" + assert viewer_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_funders[0]["identifier_type"] == "Identifier Type" + assert viewer_funders[1]["name"] == "Admin Name" + assert viewer_funders[1]["award_number"] == "award number" + assert viewer_funders[1]["award_title"] == "Award Title" + assert viewer_funders[1]["award_uri"] == "Award URI" + assert viewer_funders[1]["identifier"] == "Identifier" + assert viewer_funders[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_funders[1]["identifier_type"] == "Identifier Type" + assert viewer_funders[2]["name"] == "Editor Name" + assert viewer_funders[2]["award_number"] == "award number" + assert viewer_funders[2]["award_title"] == "Award Title" + assert viewer_funders[2]["award_uri"] == "Award URI" + assert viewer_funders[2]["identifier"] == "Identifier" + assert viewer_funders[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_funders[2]["identifier_type"] == "Identifier Type" + + assert len(creators) == 3 + assert len(admin_creators) == 3 + assert len(editor_creators) == 3 + assert len(viewer_creators) == 3 + + assert creators[0]["id"] == pytest.global_dataset_creator_id + assert creators[0]["given_name"] == "Given Name here" + assert creators[0]["family_name"] == "Family Name here" + assert creators[0]["name_type"] == "Personal" + assert creators[0]["name_identifier"] == "Name identifier" + assert creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert creators[0]["creator"] is True + assert creators[0]["affiliations"][0]["name"] == "Test" + assert creators[0]["affiliations"][0]["identifier"] == "yes" + assert creators[0]["affiliations"][0]["scheme"] == "uh" + assert creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert creators[1]["id"] == pytest.global_dataset_creator_id_admin + assert creators[1]["given_name"] == "Admin Given Name here" + assert creators[1]["family_name"] == "Family Name here" + assert creators[1]["name_type"] == "Personal" + assert creators[1]["name_identifier"] == "Name identifier" + assert creators[1]["name_identifier_scheme"] == "Name Scheme ID" + assert creators[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert creators[1]["creator"] is True + assert creators[1]["affiliations"][0]["name"] == "Test" + assert creators[1]["affiliations"][0]["identifier"] == "yes" + assert creators[1]["affiliations"][0]["scheme"] == "uh" + assert creators[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert creators[2]["id"] == pytest.global_dataset_creator_id_editor + assert creators[2]["given_name"] == "Editor Given Name here" + assert creators[2]["family_name"] == "Family Name here" + assert creators[2]["name_type"] == "Personal" + assert creators[2]["name_identifier"] == "Name identifier" + assert creators[2]["name_identifier_scheme"] == "Name Scheme ID" + assert creators[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert creators[2]["creator"] is True + assert creators[2]["affiliations"][0]["name"] == "Test" + assert creators[2]["affiliations"][0]["identifier"] == "yes" + assert creators[2]["affiliations"][0]["scheme"] == "uh" + assert creators[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_creators[0]["given_name"] == "Given Name here" + assert admin_creators[0]["family_name"] == "Family Name here" + assert admin_creators[0]["name_type"] == "Personal" + assert admin_creators[0]["name_identifier"] == "Name identifier" + assert admin_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_creators[0]["creator"] is True + assert admin_creators[0]["affiliations"][0]["name"] == "Test" + assert admin_creators[0]["affiliations"][0]["identifier"] == "yes" + assert admin_creators[0]["affiliations"][0]["scheme"] == "uh" + assert admin_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_creators[1]["given_name"] == "Admin Given Name here" + assert admin_creators[1]["family_name"] == "Family Name here" + assert admin_creators[1]["name_type"] == "Personal" + assert admin_creators[1]["name_identifier"] == "Name identifier" + assert admin_creators[1]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_creators[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_creators[1]["creator"] is True + assert admin_creators[1]["affiliations"][0]["name"] == "Test" + assert admin_creators[1]["affiliations"][0]["identifier"] == "yes" + assert admin_creators[1]["affiliations"][0]["scheme"] == "uh" + assert admin_creators[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_creators[2]["given_name"] == "Editor Given Name here" + assert admin_creators[2]["family_name"] == "Family Name here" + assert admin_creators[2]["name_type"] == "Personal" + assert admin_creators[2]["name_identifier"] == "Name identifier" + assert admin_creators[2]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_creators[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_creators[2]["creator"] is True + assert admin_creators[2]["affiliations"][0]["name"] == "Test" + assert admin_creators[2]["affiliations"][0]["identifier"] == "yes" + assert admin_creators[2]["affiliations"][0]["scheme"] == "uh" + assert admin_creators[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_creators[0]["given_name"] == "Given Name here" + assert editor_creators[0]["family_name"] == "Family Name here" + assert editor_creators[0]["name_type"] == "Personal" + assert editor_creators[0]["name_identifier"] == "Name identifier" + assert editor_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_creators[0]["creator"] is True + assert editor_creators[0]["affiliations"][0]["name"] == "Test" + assert editor_creators[0]["affiliations"][0]["identifier"] == "yes" + assert editor_creators[0]["affiliations"][0]["scheme"] == "uh" + assert editor_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_creators[1]["given_name"] == "Admin Given Name here" + assert editor_creators[1]["family_name"] == "Family Name here" + assert editor_creators[1]["name_type"] == "Personal" + assert editor_creators[1]["name_identifier"] == "Name identifier" + assert editor_creators[1]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_creators[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_creators[1]["creator"] is True + assert editor_creators[1]["affiliations"][0]["name"] == "Test" + assert editor_creators[1]["affiliations"][0]["identifier"] == "yes" + assert editor_creators[1]["affiliations"][0]["scheme"] == "uh" + assert editor_creators[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_creators[2]["given_name"] == "Editor Given Name here" + assert editor_creators[2]["family_name"] == "Family Name here" + assert editor_creators[2]["name_type"] == "Personal" + assert editor_creators[2]["name_identifier"] == "Name identifier" + assert editor_creators[2]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_creators[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_creators[2]["creator"] is True + assert editor_creators[2]["affiliations"][0]["name"] == "Test" + assert editor_creators[2]["affiliations"][0]["identifier"] == "yes" + assert editor_creators[2]["affiliations"][0]["scheme"] == "uh" + assert editor_creators[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert viewer_creators[0]["given_name"] == "Given Name here" + assert viewer_creators[0]["family_name"] == "Family Name here" + assert viewer_creators[0]["name_type"] == "Personal" + assert viewer_creators[0]["name_identifier"] == "Name identifier" + assert viewer_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_creators[0]["creator"] is True + assert viewer_creators[0]["affiliations"][0]["name"] == "Test" + assert viewer_creators[0]["affiliations"][0]["identifier"] == "yes" + assert viewer_creators[0]["affiliations"][0]["scheme"] == "uh" + assert viewer_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert viewer_creators[1]["given_name"] == "Admin Given Name here" + assert viewer_creators[1]["family_name"] == "Family Name here" + assert viewer_creators[1]["name_type"] == "Personal" + assert viewer_creators[1]["name_identifier"] == "Name identifier" + assert viewer_creators[1]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_creators[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_creators[1]["creator"] is True + assert viewer_creators[1]["affiliations"][0]["name"] == "Test" + assert viewer_creators[1]["affiliations"][0]["identifier"] == "yes" + assert viewer_creators[1]["affiliations"][0]["scheme"] == "uh" + assert viewer_creators[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert viewer_creators[2]["given_name"] == "Editor Given Name here" + assert viewer_creators[2]["family_name"] == "Family Name here" + assert viewer_creators[2]["name_type"] == "Personal" + assert viewer_creators[2]["name_identifier"] == "Name identifier" + assert viewer_creators[2]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_creators[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_creators[2]["creator"] is True + assert viewer_creators[2]["affiliations"][0]["name"] == "Test" + assert viewer_creators[2]["affiliations"][0]["identifier"] == "yes" + assert viewer_creators[2]["affiliations"][0]["scheme"] == "uh" + assert viewer_creators[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert contributors[0]["given_name"] == "Given Name here" + assert contributors[0]["family_name"] == "Family Name here" + assert contributors[0]["name_type"] == "Personal" + assert contributors[0]["name_identifier"] == "Name identifier" + assert contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert contributors[0]["creator"] is False + assert contributors[0]["contributor_type"] == "Con Type" + assert contributors[0]["affiliations"][0]["name"] == "Test" + assert contributors[0]["affiliations"][0]["identifier"] == "yes" + assert contributors[0]["affiliations"][0]["scheme"] == "uh" + assert contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert contributors[1]["given_name"] == "Admin Given Name here" + assert contributors[1]["family_name"] == "Family Name here" + assert contributors[1]["name_type"] == "Personal" + assert contributors[1]["name_identifier"] == "Name identifier" + assert contributors[1]["name_identifier_scheme"] == "Name Scheme ID" + assert contributors[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert contributors[1]["creator"] is False + assert contributors[1]["contributor_type"] == "Con Type" + assert contributors[1]["affiliations"][0]["name"] == "Test" + assert contributors[1]["affiliations"][0]["identifier"] == "yes" + assert contributors[1]["affiliations"][0]["scheme"] == "uh" + assert contributors[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert contributors[2]["given_name"] == "Editor Given Name here" + assert contributors[2]["family_name"] == "Editor Family Name here" + assert contributors[2]["name_type"] == "Personal" + assert contributors[2]["name_identifier"] == "Name identifier" + assert contributors[2]["name_identifier_scheme"] == "Name Scheme ID" + assert contributors[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert contributors[2]["creator"] is False + assert contributors[2]["contributor_type"] == "Con Type" + assert contributors[2]["affiliations"][0]["name"] == "Test" + assert contributors[2]["affiliations"][0]["identifier"] == "yes" + assert contributors[2]["affiliations"][0]["scheme"] == "uh" + assert contributors[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_contributors[0]["given_name"] == "Given Name here" + assert admin_contributors[0]["family_name"] == "Family Name here" + assert admin_contributors[0]["name_type"] == "Personal" + assert admin_contributors[0]["name_identifier"] == "Name identifier" + assert admin_contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_contributors[0]["creator"] is False + assert admin_contributors[0]["contributor_type"] == "Con Type" + assert admin_contributors[0]["affiliations"][0]["name"] == "Test" + assert admin_contributors[0]["affiliations"][0]["identifier"] == "yes" + assert admin_contributors[0]["affiliations"][0]["scheme"] == "uh" + assert admin_contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_contributors[1]["given_name"] == "Admin Given Name here" + assert admin_contributors[1]["family_name"] == "Family Name here" + assert admin_contributors[1]["name_type"] == "Personal" + assert admin_contributors[1]["name_identifier"] == "Name identifier" + assert admin_contributors[1]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_contributors[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_contributors[1]["creator"] is False + assert admin_contributors[1]["contributor_type"] == "Con Type" + assert admin_contributors[1]["affiliations"][0]["name"] == "Test" + assert admin_contributors[1]["affiliations"][0]["identifier"] == "yes" + assert admin_contributors[1]["affiliations"][0]["scheme"] == "uh" + assert admin_contributors[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_contributors[2]["given_name"] == "Editor Given Name here" + assert admin_contributors[2]["family_name"] == "Editor Family Name here" + assert admin_contributors[2]["name_type"] == "Personal" + assert admin_contributors[2]["name_identifier"] == "Name identifier" + assert admin_contributors[2]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_contributors[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_contributors[2]["creator"] is False + assert admin_contributors[2]["contributor_type"] == "Con Type" + assert admin_contributors[2]["affiliations"][0]["name"] == "Test" + assert admin_contributors[2]["affiliations"][0]["identifier"] == "yes" + assert admin_contributors[2]["affiliations"][0]["scheme"] == "uh" + assert admin_contributors[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_contributors[0]["given_name"] == "Given Name here" + assert editor_contributors[0]["family_name"] == "Family Name here" + assert editor_contributors[0]["name_type"] == "Personal" + assert editor_contributors[0]["name_identifier"] == "Name identifier" + assert editor_contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_contributors[0]["creator"] is False + assert editor_contributors[0]["contributor_type"] == "Con Type" + assert editor_contributors[0]["affiliations"][0]["name"] == "Test" + assert editor_contributors[0]["affiliations"][0]["identifier"] == "yes" + assert editor_contributors[0]["affiliations"][0]["scheme"] == "uh" + assert editor_contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_contributors[1]["given_name"] == "Admin Given Name here" + assert editor_contributors[1]["family_name"] == "Family Name here" + assert editor_contributors[1]["name_type"] == "Personal" + assert editor_contributors[1]["name_identifier"] == "Name identifier" + assert editor_contributors[1]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_contributors[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_contributors[1]["creator"] is False + assert editor_contributors[1]["contributor_type"] == "Con Type" + assert editor_contributors[1]["affiliations"][0]["name"] == "Test" + assert editor_contributors[1]["affiliations"][0]["identifier"] == "yes" + assert editor_contributors[1]["affiliations"][0]["scheme"] == "uh" + assert editor_contributors[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_contributors[2]["given_name"] == "Editor Given Name here" + assert editor_contributors[2]["family_name"] == "Editor Family Name here" + assert editor_contributors[2]["name_type"] == "Personal" + assert editor_contributors[2]["name_identifier"] == "Name identifier" + assert editor_contributors[2]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_contributors[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_contributors[2]["creator"] is False + assert editor_contributors[2]["contributor_type"] == "Con Type" + assert editor_contributors[2]["affiliations"][0]["name"] == "Test" + assert editor_contributors[2]["affiliations"][0]["identifier"] == "yes" + assert editor_contributors[2]["affiliations"][0]["scheme"] == "uh" + assert editor_contributors[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert managing_organization["name"] == "editor Managing Organization Name" + assert managing_organization["identifier"] == "identifier" + assert managing_organization["identifier_scheme"] == "identifier scheme" + assert managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + + assert admin_managing_organization["name"] == "editor Managing Organization Name" + assert admin_managing_organization["identifier"] == "identifier" + assert admin_managing_organization["identifier_scheme"] == "identifier scheme" + assert ( + admin_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + ) + + assert editor_managing_organization["name"] == "editor Managing Organization Name" + assert editor_managing_organization["identifier"] == "identifier" + assert editor_managing_organization["identifier_scheme"] == "identifier scheme" + assert ( + editor_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + ) + + assert viewer_managing_organization["name"] == "editor Managing Organization Name" + assert viewer_managing_organization["identifier"] == "identifier" + assert viewer_managing_organization["identifier_scheme"] == "identifier scheme" + assert ( + viewer_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + ) + + +# ------------------- DELETE CONTRIBUTOR METADATA ------------------- # +def test_delete_dataset_contributor_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset contributor metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + contributor_id = pytest.global_dataset_contributor_id + admin_contributor_id = pytest.global_dataset_contributor_id_admin + editor_contributor_id = pytest.global_dataset_contributor_id_editor + + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" + ) + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{admin_contributor_id}" + ) + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{editor_contributor_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- DELETE CREATOR METADATA ------------------- # +def test_delete_dataset_creator_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset creator metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + creator_id = pytest.global_dataset_creator_id + admin_creator_id = pytest.global_dataset_creator_id_admin + editor_creator_id = pytest.global_dataset_creator_id_editor + + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{admin_creator_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{editor_creator_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- DELETE DATASET FUNDER METADATA ------------------- # +def test_delete_dataset_funder_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + funder metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + funder_id = pytest.global_dataset_funder_id + a_funder_id = pytest.global_dataset_funder_id_admin + e_funder_id = pytest.global_dataset_funder_id_editor + + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{a_funder_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{e_funder_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- OTHER METADATA ------------------- # +def test_put_other_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + other metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "format": ["Format"], + "standards_followed": "Standards Followed", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["acknowledgement"] == "Yes" + assert response_data["language"] == "English" + + assert response_data["size"] == ["Size"] + assert response_data["format"] == ["Format"] + assert response_data["standards_followed"] == "Standards Followed" + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "format": ["Format"], + "standards_followed": "Standards Followed", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data["acknowledgement"] == "Yes" + assert admin_response_data["language"] == "English" + assert admin_response_data["size"] == ["Size"] + assert admin_response_data["format"] == ["Format"] + assert admin_response_data["standards_followed"] == "Standards Followed" + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "format": ["Format"], + "standards_followed": "Standards Followed", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["acknowledgement"] == "Yes" + assert editor_response_data["language"] == "English" + assert editor_response_data["size"] == ["Size"] + assert editor_response_data["format"] == ["Format"] + assert editor_response_data["standards_followed"] == "Standards Followed" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "format": ["Format"], + "standards_followed": "Standards Followed", + }, + ) + assert viewer_response.status_code == 403 + + +def test_get_other_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + other metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + # assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + # Editor was the last to update the metadata successfully so + # the response should reflect that + assert response_data["acknowledgement"] == "Yes" + assert response_data["language"] == "English" + # assert response_data["resource_type"] == "Editor Resource Type" + assert response_data["size"] == ["Size"] + assert response_data["format"] == ["Format"] + assert response_data["standards_followed"] == "Standards Followed" + + assert admin_response_data["acknowledgement"] == "Yes" + assert admin_response_data["language"] == "English" + # assert admin_response_data["resource_type"] == "Editor Resource Type" + assert admin_response_data["size"] == ["Size"] + assert admin_response_data["format"] == ["Format"] + assert admin_response_data["standards_followed"] == "Standards Followed" + + assert editor_response_data["acknowledgement"] == "Yes" + assert editor_response_data["language"] == "English" + # assert editor_response_data["resource_type"] == "Editor Resource Type" + assert editor_response_data["size"] == ["Size"] + assert editor_response_data["format"] == ["Format"] + assert editor_response_data["standards_followed"] == "Standards Followed" + + assert viewer_response_data["acknowledgement"] == "Yes" + assert viewer_response_data["language"] == "English" + assert viewer_response_data["size"] == ["Size"] + assert viewer_response_data["format"] == ["Format"] + assert viewer_response_data["standards_followed"] == "Standards Followed" + + +# ------------------- RELATED IDENTIFIER METADATA ------------------- # +def test_post_dataset_related_identifier_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + related identifier metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", + json=[ + { + "identifier": "test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert response.status_code == 201 + response_data = json.loads(response.data) + + pytest.global_dataset_related_identifier_id = response_data[0]["id"] + + assert response_data[0]["identifier"] == "test identifier" + assert response_data[0]["identifier_type"] == "test identifier type" + assert response_data[0]["relation_type"] == "test relation type" + assert response_data[0]["related_metadata_scheme"] == "test" + assert response_data[0]["scheme_uri"] == "test" + assert response_data[0]["scheme_type"] == "test" + assert response_data[0]["resource_type"] == "test" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", + json=[ + { + "identifier": "admin test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_related_identifier_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["identifier"] == "admin test identifier" + assert admin_response_data[0]["identifier_type"] == "test identifier type" + assert admin_response_data[0]["relation_type"] == "test relation type" + assert admin_response_data[0]["related_metadata_scheme"] == "test" + assert admin_response_data[0]["scheme_uri"] == "test" + assert admin_response_data[0]["scheme_type"] == "test" + assert admin_response_data[0]["resource_type"] == "test" + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", + json=[ + { + "identifier": "editor test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_related_identifier_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["identifier"] == "editor test identifier" + assert editor_response_data[0]["identifier_type"] == "test identifier type" + assert editor_response_data[0]["relation_type"] == "test relation type" + assert editor_response_data[0]["related_metadata_scheme"] == "test" + assert editor_response_data[0]["scheme_uri"] == "test" + assert editor_response_data[0]["scheme_type"] == "test" + assert editor_response_data[0]["resource_type"] == "test" + viewer_client = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", + json=[ + { + "identifier": "viewer test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test", + } + ], + ) + + assert viewer_client.status_code == 403 + + +def test_get_dataset_related_identifier_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + related identifier metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + # seach for main title and subtitle index in response_data[n]["titles"] + # pylint: disable=line-too-long + + # assert len(response_data) == 3 + # assert len(admin_response_data) == 3 + # assert len(editor_response_data) == 3 + # assert len(viewer_response_data) == 3 + assert response_data[0]["identifier"] == "test identifier" + assert response_data[0]["identifier_type"] == "test identifier type" + assert response_data[0]["relation_type"] == "test relation type" + assert response_data[0]["related_metadata_scheme"] == "test" + assert response_data[0]["scheme_uri"] == "test" + assert response_data[0]["scheme_type"] == "test" + assert response_data[0]["resource_type"] == "test" + assert response_data[1]["identifier"] == "admin test identifier" + assert response_data[1]["identifier_type"] == "test identifier type" + assert response_data[1]["relation_type"] == "test relation type" + assert response_data[1]["related_metadata_scheme"] == "test" + assert response_data[1]["scheme_uri"] == "test" + assert response_data[1]["scheme_type"] == "test" + assert response_data[1]["resource_type"] == "test" + assert response_data[2]["identifier"] == "editor test identifier" + assert response_data[2]["identifier_type"] == "test identifier type" + assert response_data[2]["relation_type"] == "test relation type" + assert response_data[2]["related_metadata_scheme"] == "test" + assert response_data[2]["scheme_uri"] == "test" + assert response_data[2]["scheme_type"] == "test" + assert response_data[2]["resource_type"] == "test" + + assert admin_response_data[0]["identifier"] == "test identifier" + assert admin_response_data[0]["identifier_type"] == "test identifier type" + assert admin_response_data[0]["relation_type"] == "test relation type" + assert admin_response_data[0]["related_metadata_scheme"] == "test" + assert admin_response_data[0]["scheme_uri"] == "test" + assert admin_response_data[0]["scheme_type"] == "test" + assert admin_response_data[0]["resource_type"] == "test" + assert admin_response_data[1]["identifier"] == "admin test identifier" + assert admin_response_data[1]["identifier_type"] == "test identifier type" + assert admin_response_data[1]["relation_type"] == "test relation type" + assert admin_response_data[1]["related_metadata_scheme"] == "test" + assert admin_response_data[1]["scheme_uri"] == "test" + assert admin_response_data[1]["scheme_type"] == "test" + assert admin_response_data[1]["resource_type"] == "test" + assert admin_response_data[2]["identifier"] == "editor test identifier" + assert admin_response_data[2]["identifier_type"] == "test identifier type" + assert admin_response_data[2]["relation_type"] == "test relation type" + assert admin_response_data[2]["related_metadata_scheme"] == "test" + assert admin_response_data[2]["scheme_uri"] == "test" + assert admin_response_data[2]["scheme_type"] == "test" + assert admin_response_data[2]["resource_type"] == "test" + + assert editor_response_data[0]["identifier"] == "test identifier" + assert editor_response_data[0]["identifier_type"] == "test identifier type" + assert editor_response_data[0]["relation_type"] == "test relation type" + assert editor_response_data[0]["related_metadata_scheme"] == "test" + assert editor_response_data[0]["scheme_uri"] == "test" + assert editor_response_data[0]["scheme_type"] == "test" + assert editor_response_data[0]["resource_type"] == "test" + assert editor_response_data[1]["identifier"] == "admin test identifier" + assert editor_response_data[1]["identifier_type"] == "test identifier type" + assert editor_response_data[1]["relation_type"] == "test relation type" + assert editor_response_data[1]["related_metadata_scheme"] == "test" + assert editor_response_data[1]["scheme_uri"] == "test" + assert editor_response_data[1]["scheme_type"] == "test" + assert editor_response_data[1]["resource_type"] == "test" + assert editor_response_data[2]["identifier"] == "editor test identifier" + assert editor_response_data[2]["identifier_type"] == "test identifier type" + assert editor_response_data[2]["relation_type"] == "test relation type" + assert editor_response_data[2]["related_metadata_scheme"] == "test" + assert editor_response_data[2]["scheme_uri"] == "test" + assert editor_response_data[2]["scheme_type"] == "test" + assert editor_response_data[2]["resource_type"] == "test" + + assert viewer_response_data[0]["identifier"] == "test identifier" + assert viewer_response_data[0]["identifier_type"] == "test identifier type" + assert viewer_response_data[0]["relation_type"] == "test relation type" + assert viewer_response_data[0]["related_metadata_scheme"] == "test" + assert viewer_response_data[0]["scheme_uri"] == "test" + assert viewer_response_data[0]["scheme_type"] == "test" + assert viewer_response_data[0]["resource_type"] == "test" + assert viewer_response_data[1]["identifier"] == "admin test identifier" + assert viewer_response_data[1]["identifier_type"] == "test identifier type" + assert viewer_response_data[1]["relation_type"] == "test relation type" + assert viewer_response_data[1]["related_metadata_scheme"] == "test" + assert viewer_response_data[1]["scheme_uri"] == "test" + assert viewer_response_data[1]["scheme_type"] == "test" + assert viewer_response_data[1]["resource_type"] == "test" + assert viewer_response_data[2]["identifier"] == "editor test identifier" + assert viewer_response_data[2]["identifier_type"] == "test identifier type" + assert viewer_response_data[2]["relation_type"] == "test relation type" + assert viewer_response_data[2]["related_metadata_scheme"] == "test" + assert viewer_response_data[2]["scheme_uri"] == "test" + assert viewer_response_data[2]["scheme_type"] == "test" + assert viewer_response_data[2]["resource_type"] == "test" + + +def test_delete_dataset_related_identifier_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (DELETE) + Then check that the response is valid and retrieves the dataset + related identifier metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + identifier_id = pytest.global_dataset_related_identifier_id + a_identifier_id = pytest.global_dataset_related_identifier_id_admin + e_identifier_id = pytest.global_dataset_related_identifier_id_editor + + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{a_identifier_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{e_identifier_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- DATA MANAGEMENT METADATA ------------------- # +def test_post_dataset_data_management_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/data-management' endpoint is requested (PUT) + Then check that the response is valid and updates the data management metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Value URI", + } + ], + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + consent = response_data["consent"] + deident = response_data["deident"] + subjects = response_data["subjects"] + pytest.global_dataset_subject_id = subjects[0]["id"] + + assert subjects[0]["scheme"] == "Scheme" + assert subjects[0]["scheme_uri"] == "Scheme URI" + assert subjects[0]["subject"] == "Subject" + assert subjects[0]["value_uri"] == "Value URI" + assert subjects[0]["classification_code"] == "Classification Code" + + assert consent["type"] == "test" + assert consent["noncommercial"] is True + assert consent["geog_restrict"] is True + assert consent["research_type"] is True + assert consent["genetic_only"] is True + assert consent["no_methods"] is True + assert consent["details"] == "test" + + assert deident["type"] == "Level" + assert deident["direct"] is True + assert deident["hipaa"] is True + assert deident["dates"] is True + assert deident["nonarr"] is True + assert deident["k_anon"] is True + assert deident["details"] == "Details" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "admin test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "admin details test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Admin Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Admin Value URI", + } + ], + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + admin_consent = admin_response_data["consent"] + admin_deident = admin_response_data["deident"] + admin_subjects = admin_response_data["subjects"] + + pytest.global_dataset_subject_id_admin = admin_subjects[0]["id"] + + assert admin_subjects[0]["scheme"] == "Admin Scheme" + assert admin_subjects[0]["scheme_uri"] == "Scheme URI" + assert admin_subjects[0]["subject"] == "Subject" + assert admin_subjects[0]["value_uri"] == "Admin Value URI" + assert admin_subjects[0]["classification_code"] == "Classification Code" + + assert admin_consent["type"] == "admin test" + assert admin_consent["details"] == "admin details test" + assert admin_consent["noncommercial"] is True + assert admin_consent["geog_restrict"] is True + assert admin_consent["research_type"] is True + assert admin_consent["genetic_only"] is True + assert admin_consent["no_methods"] is True + + assert admin_deident["type"] == "Level" + assert admin_deident["direct"] is True + assert admin_deident["hipaa"] is True + assert admin_deident["dates"] is True + assert admin_deident["nonarr"] is True + assert admin_deident["k_anon"] is True + assert admin_deident["details"] == "Details" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "editor test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "editor details test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Editor Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Editor Value URI", + } + ], + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + editor_consent = editor_response_data["consent"] + editor_deident = editor_response_data["deident"] + editor_subjects = editor_response_data["subjects"] + pytest.global_dataset_subject_id_editor = editor_subjects[0]["id"] + + assert editor_subjects[0]["scheme"] == "Editor Scheme" + assert editor_subjects[0]["scheme_uri"] == "Scheme URI" + assert editor_subjects[0]["subject"] == "Subject" + assert editor_subjects[0]["value_uri"] == "Editor Value URI" + assert editor_subjects[0]["classification_code"] == "Classification Code" + + assert editor_consent["type"] == "editor test" + assert editor_consent["details"] == "editor details test" + assert editor_consent["noncommercial"] is True + assert editor_consent["geog_restrict"] is True + assert editor_consent["research_type"] is True + assert editor_consent["genetic_only"] is True + assert editor_consent["no_methods"] is True + + assert editor_deident["type"] == "Level" + assert editor_deident["direct"] is True + assert editor_deident["hipaa"] is True + assert editor_deident["dates"] is True + assert editor_deident["nonarr"] is True + assert editor_deident["k_anon"] is True + assert editor_deident["details"] == "Details" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "viewer test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "viewer details test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Viewer Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Viewer Value URI", + } + ], + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_data_management_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/data-management' endpoint is requested (GET) + Then check that the response is valid and retrieves the data management metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + consent = response_data["consent"] + deident = response_data["deident"] + subjects = response_data["subjects"] + + admin_consent = admin_response_data["consent"] + admin_deident = admin_response_data["deident"] + admin_subjects = admin_response_data["subjects"] + + editor_consent = editor_response_data["consent"] + editor_deident = editor_response_data["deident"] + editor_subjects = editor_response_data["subjects"] + + viewer_consent = viewer_response_data["consent"] + viewer_deident = viewer_response_data["deident"] + viewer_subjects = viewer_response_data["subjects"] + + assert consent["type"] == "editor test" + assert consent["noncommercial"] is True + assert consent["geog_restrict"] is True + assert consent["research_type"] is True + assert consent["genetic_only"] is True + assert consent["no_methods"] is True + assert consent["details"] == "editor details test" + assert admin_consent["type"] == "editor test" + assert admin_consent["noncommercial"] is True + assert admin_consent["geog_restrict"] is True + assert admin_consent["research_type"] is True + assert admin_consent["genetic_only"] is True + assert admin_consent["no_methods"] is True + assert admin_consent["details"] == "editor details test" + assert editor_consent["type"] == "editor test" + assert editor_consent["noncommercial"] is True + assert editor_consent["geog_restrict"] is True + assert editor_consent["research_type"] is True + assert editor_consent["genetic_only"] is True + assert editor_consent["no_methods"] is True + assert editor_consent["details"] == "editor details test" + assert viewer_consent["type"] == "editor test" + assert viewer_consent["noncommercial"] is True + assert viewer_consent["geog_restrict"] is True + assert viewer_consent["research_type"] is True + assert viewer_consent["genetic_only"] is True + assert viewer_consent["no_methods"] is True + assert viewer_consent["details"] == "editor details test" + + assert subjects[0]["scheme"] == "Scheme" + assert subjects[0]["scheme_uri"] == "Scheme URI" + assert subjects[0]["subject"] == "Subject" + assert subjects[0]["value_uri"] == "Value URI" + assert subjects[0]["classification_code"] == "Classification Code" + assert subjects[1]["scheme"] == "Admin Scheme" + assert subjects[1]["scheme_uri"] == "Scheme URI" + assert subjects[1]["subject"] == "Subject" + assert subjects[1]["value_uri"] == "Admin Value URI" + assert subjects[1]["classification_code"] == "Classification Code" + assert subjects[2]["scheme"] == "Editor Scheme" + assert subjects[2]["scheme_uri"] == "Scheme URI" + assert subjects[2]["subject"] == "Subject" + assert subjects[2]["value_uri"] == "Editor Value URI" + assert subjects[2]["classification_code"] == "Classification Code" + + assert admin_subjects[0]["scheme"] == "Scheme" + assert admin_subjects[0]["scheme_uri"] == "Scheme URI" + assert admin_subjects[0]["subject"] == "Subject" + assert admin_subjects[0]["value_uri"] == "Value URI" + assert admin_subjects[0]["classification_code"] == "Classification Code" + assert admin_subjects[1]["scheme"] == "Admin Scheme" + assert admin_subjects[1]["scheme_uri"] == "Scheme URI" + assert admin_subjects[1]["subject"] == "Subject" + assert admin_subjects[1]["value_uri"] == "Admin Value URI" + assert admin_subjects[1]["classification_code"] == "Classification Code" + assert admin_subjects[2]["scheme"] == "Editor Scheme" + assert admin_subjects[2]["scheme_uri"] == "Scheme URI" + assert admin_subjects[2]["subject"] == "Subject" + assert admin_subjects[2]["value_uri"] == "Editor Value URI" + assert admin_subjects[2]["classification_code"] == "Classification Code" + + assert editor_subjects[0]["scheme"] == "Scheme" + assert editor_subjects[0]["scheme_uri"] == "Scheme URI" + assert editor_subjects[0]["subject"] == "Subject" + assert editor_subjects[0]["value_uri"] == "Value URI" + assert editor_subjects[0]["classification_code"] == "Classification Code" + assert editor_subjects[1]["scheme"] == "Admin Scheme" + assert editor_subjects[1]["scheme_uri"] == "Scheme URI" + assert editor_subjects[1]["subject"] == "Subject" + assert editor_subjects[1]["value_uri"] == "Admin Value URI" + assert editor_subjects[1]["classification_code"] == "Classification Code" + assert editor_subjects[2]["scheme"] == "Editor Scheme" + assert editor_subjects[2]["scheme_uri"] == "Scheme URI" + assert editor_subjects[2]["subject"] == "Subject" + assert editor_subjects[2]["value_uri"] == "Editor Value URI" + assert editor_subjects[2]["classification_code"] == "Classification Code" + + assert viewer_subjects[0]["scheme"] == "Scheme" + assert viewer_subjects[0]["scheme_uri"] == "Scheme URI" + assert viewer_subjects[0]["subject"] == "Subject" + assert viewer_subjects[0]["value_uri"] == "Value URI" + assert viewer_subjects[0]["classification_code"] == "Classification Code" + assert viewer_subjects[1]["scheme"] == "Admin Scheme" + assert viewer_subjects[1]["scheme_uri"] == "Scheme URI" + assert viewer_subjects[1]["subject"] == "Subject" + assert viewer_subjects[1]["value_uri"] == "Admin Value URI" + assert viewer_subjects[1]["classification_code"] == "Classification Code" + assert viewer_subjects[2]["scheme"] == "Editor Scheme" + assert viewer_subjects[2]["scheme_uri"] == "Scheme URI" + assert viewer_subjects[2]["subject"] == "Subject" + assert viewer_subjects[2]["value_uri"] == "Editor Value URI" + assert viewer_subjects[2]["classification_code"] == "Classification Code" + + assert deident["type"] == "Level" + assert deident["direct"] is True + assert deident["hipaa"] is True + assert deident["dates"] is True + assert deident["nonarr"] is True + assert deident["k_anon"] is True + assert deident["details"] == "Details" + assert admin_deident["type"] == "Level" + assert admin_deident["direct"] is True + assert admin_deident["hipaa"] is True + assert admin_deident["dates"] is True + assert admin_deident["nonarr"] is True + assert admin_deident["k_anon"] is True + assert admin_deident["details"] == "Details" + assert editor_deident["type"] == "Level" + assert editor_deident["direct"] is True + assert editor_deident["hipaa"] is True + assert editor_deident["dates"] is True + assert editor_deident["nonarr"] is True + assert editor_deident["k_anon"] is True + assert editor_deident["details"] == "Details" + assert viewer_deident["type"] == "Level" + assert viewer_deident["direct"] is True + assert viewer_deident["hipaa"] is True + assert viewer_deident["dates"] is True + assert viewer_deident["nonarr"] is True + assert viewer_deident["k_anon"] is True + assert viewer_deident["details"] == "Details" + + +def test_delete_dataset_subject_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + subjects metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + subject_id = pytest.global_dataset_subject_id + admin_sub_id = pytest.global_dataset_subject_id_admin + editor_sub_id = pytest.global_dataset_subject_id_editor + + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{admin_sub_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{editor_sub_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # +def test_post_alternative_identifier(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset alternative identifier + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "identifier test", + "type": "ARK", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert response.status_code == 201 + response_data = json.loads(response.data) + pytest.global_alternative_identifier_id = response_data[0]["id"] + + assert response_data[0]["identifier"] == "identifier test" + assert response_data[0]["type"] == "ARK" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "admin test", + "type": "ARK", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "editor test", + "type": "ARK", + } + ], + ) + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "viewer test", + "type": "ARK", + } + ], + ) + + assert admin_response.status_code == 201 + assert editor_response.status_code == 201 + assert viewer_response.status_code == 403 + + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + pytest.global_alternative_identifier_id_admin = admin_response_data[0]["id"] + pytest.global_alternative_identifier_id_editor = editor_response_data[0]["id"] + + assert admin_response_data[0]["identifier"] == "admin test" + assert admin_response_data[0]["type"] == "ARK" + assert editor_response_data[0]["identifier"] == "editor test" + assert editor_response_data[0]["type"] == "ARK" + + +def test_get_alternative_identifier(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset alternative identifier content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data[0]["identifier"] == "identifier test" + assert response_data[0]["type"] == "ARK" + assert response_data[1]["identifier"] == "admin test" + assert response_data[1]["type"] == "ARK" + assert response_data[2]["identifier"] == "editor test" + assert response_data[2]["type"] == "ARK" + + assert admin_response_data[0]["identifier"] == "identifier test" + assert admin_response_data[0]["type"] == "ARK" + assert admin_response_data[1]["identifier"] == "admin test" + assert admin_response_data[1]["type"] == "ARK" + assert admin_response_data[2]["identifier"] == "editor test" + assert admin_response_data[2]["type"] == "ARK" + + assert editor_response_data[0]["identifier"] == "identifier test" + assert editor_response_data[0]["type"] == "ARK" + assert editor_response_data[1]["identifier"] == "admin test" + assert editor_response_data[1]["type"] == "ARK" + assert editor_response_data[2]["identifier"] == "editor test" + assert editor_response_data[2]["type"] == "ARK" + + assert viewer_response_data[0]["identifier"] == "identifier test" + assert viewer_response_data[0]["type"] == "ARK" + assert viewer_response_data[1]["identifier"] == "admin test" + assert viewer_response_data[1]["type"] == "ARK" + assert viewer_response_data[2]["identifier"] == "editor test" + assert viewer_response_data[2]["type"] == "ARK" + + +def test_delete_alternative_identifier(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset alternative identifier content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + identifier_id = pytest.global_alternative_identifier_id + admin_identifier_id = pytest.global_alternative_identifier_id_admin + editor_identifier_id = pytest.global_alternative_identifier_id_editor + + # verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" + ) + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{admin_identifier_id}" + ) + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{editor_identifier_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- DATASET HEALTHSHEET MOTIVATION METADATA ------------------- # +def test_put_healthsheet_motivation_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_motivation_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# # ------------------- DATASET HEALTHSHEET COMPOSITION METADATA ------------------- # +def test_put_healthsheet_composition_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/composition' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet composition metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_composition_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/composition' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET HEALTHSHEET COLLECTION METADATA ------------------- # +def test_put_healthsheet_collection_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/collection' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet collection metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_collection_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/collection' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET HEALTHSHEET PREPROCESSING METADATA ------------------- # +def test_put_healthsheet_preprocessing_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet preprocessing metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_preprocessing_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# # ------------------- DATASET HEALTHSHEET USES METADATA ------------------- # +def test_put_healthsheet_uses_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/uses' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet uses metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_uses_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/uses' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET HEALTHSHEET DISTRIBUTION METADATA ------------------- # +def test_put_healthsheet_distribution_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/distribution' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet distribution metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_distribution_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + distribution metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET HEALTHSHEET MAINTENANCE METADATA ------------------- # +def test_put_healthsheet_maintenance_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/maintenance' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet maintenance metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_maintenance_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + maintenance metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_050_study_metadata_api.py similarity index 71% rename from tests/functional/test_study_metadata_api.py rename to tests/functional/test_050_study_metadata_api.py index 8db5675c..c5eb3259 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_050_study_metadata_api.py @@ -695,11 +695,11 @@ def test_delete_cc_metadata(clients): assert editor_response.status_code == 204 -# ------------------- COLLABORATORS METADATA ------------------- # -def test_post_collaborators_metadata(clients): +# ------------------- TEAM METADATA ------------------- # +def test_post_team_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/collaborators' + WHEN the '/study/{study_id}/metadata/team' endpoint is requested (POST) THEN check that the response is valid and creates the collaborators metadata """ @@ -707,170 +707,792 @@ def test_post_collaborators_metadata(clients): study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, ) # Add a one second delay to prevent duplicate timestamps sleep(1) - assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_collaborators_id = response_data[0]["id"] + pytest.global_collaborators_id = response_data["collaborators"][0]["id"] + + assert response_data["collaborators"][0]["name"] == "collaborator1123" + assert response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert response_data["collaborators"][0]["identifier_scheme"] == "collaborator1123" + assert ( + response_data["collaborators"][0]["identifier_scheme_uri"] == "collaborator1123" + ) - assert response_data[0]["name"] == "collaborator1123" - assert response_data[0]["identifier"] == "collaborator1123" - assert response_data[0]["identifier_scheme"] == "collaborator1123" - assert response_data[0]["identifier_scheme_uri"] == "collaborator1123" + assert response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + response_data["sponsors"]["responsible_party_investigator_first_name"] == "name" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + assert ( + response_data["sponsors"]["responsible_party_investigator_identifier_value"] + == "identifier" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_identifier_scheme"] + == "scheme" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert response_data["sponsors"]["lead_sponsor_name"] == "name" + assert response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + assert response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" admin_response = _admin_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "admin collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "admin collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "admin collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, ) # Add a one second delay to prevent duplicate timestamps sleep(1) assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) - pytest.global_admin_collaborators_id_admin = admin_response_data[0]["id"] + pytest.global_admin_collaborators_id_admin = admin_response_data["collaborators"][ + 0 + ]["id"] - assert admin_response_data[0]["name"] == "admin collaborator1123" - assert admin_response_data[0]["identifier"] == "collaborator1123" - assert admin_response_data[0]["identifier_scheme"] == "collaborator1123" - assert admin_response_data[0]["identifier_scheme_uri"] == "collaborator1123" + assert admin_response_data["collaborators"][0]["name"] == "admin collaborator1123" + assert admin_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + admin_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + admin_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert admin_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert admin_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert admin_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert admin_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + assert ( + admin_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + ) editor_response = _editor_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "editor collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "editor collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "editor collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, ) assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) - pytest.global_editor_collaborators_id_editor = editor_response_data[0]["id"] + pytest.global_editor_collaborators_id_editor = editor_response_data[ + "collaborators" + ][0]["id"] - assert editor_response_data[0]["name"] == "editor collaborator1123" - assert editor_response_data[0]["identifier"] == "collaborator1123" - assert editor_response_data[0]["identifier_scheme"] == "collaborator1123" - assert editor_response_data[0]["identifier_scheme_uri"] == "collaborator1123" + assert editor_response_data["collaborators"][0]["name"] == "editor collaborator1123" + assert editor_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + editor_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + editor_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert editor_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert editor_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert editor_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert ( + editor_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + ) + assert ( + editor_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + ) viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "editor collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_team_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/team' endpoint is requested (GET) + THEN check that the response is valid and retrieves the collaborators metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.get(f"/study/{study_id}/metadata/team") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/team") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/team") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/team") + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["collaborators"][0]["name"] == "collaborator1123" + assert response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert response_data["collaborators"][0]["identifier_scheme"] == "collaborator1123" + assert ( + response_data["collaborators"][0]["identifier_scheme_uri"] == "collaborator1123" + ) + + assert admin_response_data["collaborators"][0]["name"] == "collaborator1123" + assert admin_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + admin_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + admin_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert editor_response_data["collaborators"][0]["name"] == "collaborator1123" + assert editor_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + editor_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + editor_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert viewer_response_data["collaborators"][0]["name"] == "collaborator1123" + assert viewer_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + viewer_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + viewer_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert response_data["collaborators"][1]["name"] == "collaborator1123" + assert response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert response_data["collaborators"][1]["identifier_scheme"] == "collaborator1123" + assert ( + response_data["collaborators"][1]["identifier_scheme_uri"] == "collaborator1123" + ) + + assert admin_response_data["collaborators"][1]["name"] == "collaborator1123" + assert admin_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + admin_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + admin_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert editor_response_data["collaborators"][1]["name"] == "collaborator1123" + assert editor_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + editor_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + editor_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert viewer_response_data["collaborators"][1]["name"] == "collaborator1123" + assert viewer_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + viewer_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + viewer_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + response_data["sponsors"]["responsible_party_investigator_first_name"] == "name" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + + assert ( + response_data["sponsors"]["responsible_party_investigator_identifier_value"] + == "identifier" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_identifier_scheme"] + == "scheme" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert response_data["sponsors"]["lead_sponsor_name"] == "name" + assert response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + assert response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + + assert admin_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert admin_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert admin_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert admin_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + assert ( + admin_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + ) + + assert editor_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert editor_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert editor_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert ( + editor_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + ) + assert ( + editor_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + ) + + assert viewer_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + viewer_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + viewer_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + viewer_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert viewer_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert viewer_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert ( + viewer_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + ) + assert ( + viewer_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" ) - assert viewer_response.status_code == 403 + assert response_data["collaborators"][1]["name"] == "collaborator1123" + assert response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert response_data["collaborators"][1]["identifier_scheme"] == "collaborator1123" + assert ( + response_data["collaborators"][1]["identifier_scheme_uri"] == "collaborator1123" + ) + assert admin_response_data["collaborators"][1]["name"] == "collaborator1123" + assert admin_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + admin_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + admin_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) -def test_get_collaborators_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (GET) - THEN check that the response is valid and retrieves the collaborators metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore + assert editor_response_data["collaborators"][1]["name"] == "collaborator1123" + assert editor_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + editor_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + editor_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) - response = _logged_in_client.get(f"/study/{study_id}/metadata/collaborators") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/collaborators") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/collaborators") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/collaborators") + assert viewer_response_data["collaborators"][1]["name"] == "collaborator1123" + assert viewer_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + viewer_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + viewer_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 + # assert response_data[2]["name"] == "editor collaborator1123" + # assert response_data[2]["identifier"] == "collaborator1123" + # assert response_data[2]["identifier_scheme"] == "collaborator1123" + # assert response_data[2]["identifier_scheme_uri"] == "collaborator1123" + # + # assert admin_response_data[2]["name"] == "editor collaborator1123" + # assert admin_response_data[2]["identifier"] == "collaborator1123" + # assert admin_response_data[2]["identifier_scheme"] == "collaborator1123" + # assert admin_response_data[2]["identifier_scheme_uri"] == "collaborator1123" + # + # assert editor_response_data[2]["name"] == "editor collaborator1123" + # assert editor_response_data[2]["identifier"] == "collaborator1123" + # assert editor_response_data[2]["identifier_scheme"] == "collaborator1123" + # assert editor_response_data[2]["identifier_scheme_uri"] == "collaborator1123" + # + # assert viewer_response_data[2]["name"] == "editor collaborator1123" + # assert viewer_response_data[2]["identifier"] == "collaborator1123" + # assert viewer_response_data[2]["identifier_scheme"] == "collaborator1123" + # assert viewer_response_data[2]["identifier_scheme_uri"] == "collaborator1123" + # - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - assert response_data[0]["name"] == "collaborator1123" - assert response_data[0]["identifier"] == "collaborator1123" - assert response_data[0]["identifier_scheme"] == "collaborator1123" - assert response_data[0]["identifier_scheme_uri"] == "collaborator1123" - - assert admin_response_data[0]["name"] == "collaborator1123" - assert admin_response_data[0]["identifier"] == "collaborator1123" - assert admin_response_data[0]["identifier_scheme"] == "collaborator1123" - assert admin_response_data[0]["identifier_scheme_uri"] == "collaborator1123" - - assert editor_response_data[0]["name"] == "collaborator1123" - assert editor_response_data[0]["identifier"] == "collaborator1123" - assert editor_response_data[0]["identifier_scheme"] == "collaborator1123" - assert editor_response_data[0]["identifier_scheme_uri"] == "collaborator1123" - - assert viewer_response_data[0]["name"] == "collaborator1123" - assert viewer_response_data[0]["identifier"] == "collaborator1123" - assert viewer_response_data[0]["identifier_scheme"] == "collaborator1123" - assert viewer_response_data[0]["identifier_scheme_uri"] == "collaborator1123" - - assert response_data[1]["name"] == "admin collaborator1123" - assert response_data[1]["identifier"] == "collaborator1123" - assert response_data[1]["identifier_scheme"] == "collaborator1123" - assert response_data[1]["identifier_scheme_uri"] == "collaborator1123" - - assert admin_response_data[1]["name"] == "admin collaborator1123" - assert admin_response_data[1]["identifier"] == "collaborator1123" - assert admin_response_data[1]["identifier_scheme"] == "collaborator1123" - assert admin_response_data[1]["identifier_scheme_uri"] == "collaborator1123" - - assert editor_response_data[1]["name"] == "admin collaborator1123" - assert editor_response_data[1]["identifier"] == "collaborator1123" - assert editor_response_data[1]["identifier_scheme"] == "collaborator1123" - assert editor_response_data[1]["identifier_scheme_uri"] == "collaborator1123" - - assert viewer_response_data[1]["name"] == "admin collaborator1123" - assert viewer_response_data[1]["identifier"] == "collaborator1123" - assert viewer_response_data[1]["identifier_scheme"] == "collaborator1123" - assert viewer_response_data[1]["identifier_scheme_uri"] == "collaborator1123" - - assert response_data[2]["name"] == "editor collaborator1123" - assert response_data[2]["identifier"] == "collaborator1123" - assert response_data[2]["identifier_scheme"] == "collaborator1123" - assert response_data[2]["identifier_scheme_uri"] == "collaborator1123" - - assert admin_response_data[2]["name"] == "editor collaborator1123" - assert admin_response_data[2]["identifier"] == "collaborator1123" - assert admin_response_data[2]["identifier_scheme"] == "collaborator1123" - assert admin_response_data[2]["identifier_scheme_uri"] == "collaborator1123" - - assert editor_response_data[2]["name"] == "editor collaborator1123" - assert editor_response_data[2]["identifier"] == "collaborator1123" - assert editor_response_data[2]["identifier_scheme"] == "collaborator1123" - assert editor_response_data[2]["identifier_scheme_uri"] == "collaborator1123" - - assert viewer_response_data[2]["name"] == "editor collaborator1123" - assert viewer_response_data[2]["identifier"] == "collaborator1123" - assert viewer_response_data[2]["identifier_scheme"] == "collaborator1123" - assert viewer_response_data[2]["identifier_scheme_uri"] == "collaborator1123" +# ------------------- COLLABORATORS DELETE METADATA ------------------- # def test_delete_collaborators_metadata(clients): @@ -904,114 +1526,359 @@ def test_delete_collaborators_metadata(clients): assert editor_response.status_code == 204 -# # ------------------- CONDITIONS METADATA ------------------- # -def test_post_conditions_metadata(clients): +# ------------------- DESCRIPTION METADATA ------------------- # +def test_post_description_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (POST) - THEN check that the response is valid and creates the conditions metadata + WHEN the '/study/{study_id}/metadata/description' endpoint is requested (POST) + THEN check that the response is valid and creates the description metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], + f"/study/{study_id}/metadata/description", + json={ + "conditions": [ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + }, ) - assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_conditions_id = response_data[0]["id"] + pytest.global_identification_id = response_data["identification"]["secondary"][0][ + "id" + ] - assert response_data[0]["name"] == "condition" - assert response_data[0]["classification_code"] == "classification code" - assert response_data[0]["scheme"] == "scheme" - assert response_data[0]["scheme_uri"] == "scheme uri" - assert response_data[0]["condition_uri"] == "condition" + pytest.global_keywords_id = response_data["keywords"][0]["id"] + pytest.global_conditions_id = response_data["conditions"][0]["id"] + + assert response_data["conditions"][0]["name"] == "condition" + assert ( + response_data["conditions"][0]["classification_code"] == "classification code" + ) + assert response_data["conditions"][0]["scheme"] == "scheme" + assert response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert response_data["conditions"][0]["condition_uri"] == "condition" + + assert response_data["keywords"][0]["name"] == "keywords" + assert response_data["keywords"][0]["classification_code"] == "classification code" + assert response_data["keywords"][0]["scheme"] == "scheme" + assert response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert response_data["keywords"][0]["keyword_uri"] == "keywords" + assert response_data["identification"]["primary"]["identifier"] == "first" + assert response_data["identification"]["primary"]["identifier_type"] == "test" + assert response_data["identification"]["primary"]["identifier_domain"] == "domain" + assert response_data["identification"]["primary"]["identifier_link"] == "link" + assert response_data["identification"]["secondary"][0]["identifier"] == "test" + assert response_data["identification"]["secondary"][0]["identifier_type"] == "test" + assert ( + response_data["identification"]["secondary"][0]["identifier_domain"] + == "dodfasdfmain" + ) + assert response_data["identification"]["secondary"][0]["identifier_link"] == "link" + + assert response_data["description"]["brief_summary"] == "brief_summary" + assert ( + response_data["description"]["detailed_description"] == "detailed_description" + ) admin_response = _admin_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "admin condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], + f"/study/{study_id}/metadata/description", + json={ + "conditions": [ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + }, ) assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) - pytest.global_admin_conditions_id_admin = admin_response_data[0]["id"] + pytest.global_identification_id_admin = admin_response_data["identification"][ + "secondary" + ][1]["id"] + pytest.global_admin_keywords_id_admin = admin_response_data["keywords"][0]["id"] + pytest.global_admin_conditions_id_admin = admin_response_data["conditions"][0]["id"] + + assert admin_response_data["conditions"][0]["name"] == "condition" + assert ( + admin_response_data["conditions"][0]["classification_code"] + == "classification code" + ) + assert admin_response_data["conditions"][0]["scheme"] == "scheme" + assert admin_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data["conditions"][0]["condition_uri"] == "condition" + + assert admin_response_data["keywords"][0]["name"] == "keywords" + assert ( + admin_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert admin_response_data["keywords"][0]["scheme"] == "scheme" + assert admin_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data["keywords"][0]["keyword_uri"] == "keywords" + assert admin_response_data["identification"]["primary"]["identifier"] == "first" + assert admin_response_data["identification"]["primary"]["identifier_type"] == "test" + assert ( + admin_response_data["identification"]["primary"]["identifier_domain"] + == "domain" + ) + assert admin_response_data["identification"]["primary"]["identifier_link"] == "link" + assert admin_response_data["identification"]["secondary"][0]["identifier"] == "test" + assert ( + admin_response_data["identification"]["secondary"][0]["identifier_type"] + == "test" + ) + assert ( + admin_response_data["identification"]["secondary"][0]["identifier_domain"] + == "dodfasdfmain" + ) + assert ( + admin_response_data["identification"]["secondary"][0]["identifier_link"] + == "link" + ) - assert admin_response_data[0]["name"] == "admin condition" - assert admin_response_data[0]["classification_code"] == "classification code" - assert admin_response_data[0]["scheme"] == "scheme" - assert admin_response_data[0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["condition_uri"] == "condition" + assert admin_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + admin_response_data["description"]["detailed_description"] + == "detailed_description" + ) editor_response = _editor_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "editor condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], + f"/study/{study_id}/metadata/description", + json={ + "conditions": [ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + }, ) assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) - pytest.global_editor_conditions_id_editor = editor_response_data[0]["id"] - assert editor_response_data[0]["name"] == "editor condition" - assert editor_response_data[0]["classification_code"] == "classification code" - assert editor_response_data[0]["scheme"] == "scheme" - assert editor_response_data[0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["condition_uri"] == "condition" + pytest.global_identification_id_editor = editor_response_data["identification"][ + "secondary" + ][2]["id"] + pytest.global_editor_keywords_id_editor = editor_response_data["keywords"][0]["id"] + pytest.global_editor_conditions_id_editor = editor_response_data["conditions"][0][ + "id" + ] + + assert editor_response_data["conditions"][0]["name"] == "condition" + assert ( + editor_response_data["conditions"][0]["classification_code"] + == "classification code" + ) + assert editor_response_data["conditions"][0]["scheme"] == "scheme" + assert editor_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data["conditions"][0]["condition_uri"] == "condition" + + assert editor_response_data["keywords"][0]["name"] == "keywords" + assert ( + editor_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert editor_response_data["keywords"][0]["scheme"] == "scheme" + assert editor_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data["keywords"][0]["keyword_uri"] == "keywords" + assert editor_response_data["identification"]["primary"]["identifier"] == "first" + assert ( + editor_response_data["identification"]["primary"]["identifier_type"] == "test" + ) + assert ( + editor_response_data["identification"]["primary"]["identifier_domain"] + == "domain" + ) + assert ( + editor_response_data["identification"]["primary"]["identifier_link"] == "link" + ) + assert ( + editor_response_data["identification"]["secondary"][0]["identifier"] == "test" + ) + assert ( + editor_response_data["identification"]["secondary"][0]["identifier_type"] + == "test" + ) + assert ( + editor_response_data["identification"]["secondary"][0]["identifier_domain"] + == "dodfasdfmain" + ) + assert ( + editor_response_data["identification"]["secondary"][0]["identifier_link"] + == "link" + ) + + assert editor_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + editor_response_data["description"]["detailed_description"] + == "detailed_description" + ) viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "editor condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], + f"/study/{study_id}/metadata/description", + json={ + "conditions": [ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + }, ) assert viewer_response.status_code == 403 -def test_get_conditions_metadata(clients): +def test_get_description_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) - THEN check that the response is valid and retrieves the conditions metadata + WHEN the '/study/{study_id}/metadata/description' endpoint is requested (GET) + THEN check that the response is valid and retrieves the description metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/conditions") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/conditions") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/conditions") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/conditions") + response = _logged_in_client.get(f"/study/{study_id}/metadata/description") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/description") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/description") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/description") assert response.status_code == 200 assert admin_response.status_code == 200 @@ -1023,323 +1890,139 @@ def test_get_conditions_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0]["name"] == "condition" - assert response_data[0]["classification_code"] == "classification code" - assert response_data[0]["scheme"] == "scheme" - assert response_data[0]["scheme_uri"] == "scheme uri" - assert response_data[0]["condition_uri"] == "condition" - - assert admin_response_data[0]["name"] == "condition" - assert admin_response_data[0]["classification_code"] == "classification code" - assert admin_response_data[0]["scheme"] == "scheme" - assert admin_response_data[0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["condition_uri"] == "condition" - - assert editor_response_data[0]["name"] == "condition" - assert editor_response_data[0]["classification_code"] == "classification code" - assert editor_response_data[0]["scheme"] == "scheme" - assert editor_response_data[0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["condition_uri"] == "condition" - - assert viewer_response_data[0]["name"] == "condition" - assert viewer_response_data[0]["classification_code"] == "classification code" - assert viewer_response_data[0]["scheme"] == "scheme" - assert viewer_response_data[0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[0]["condition_uri"] == "condition" - - assert response_data[1]["name"] == "admin condition" - assert response_data[1]["classification_code"] == "classification code" - assert response_data[1]["scheme"] == "scheme" - assert response_data[1]["scheme_uri"] == "scheme uri" - assert response_data[1]["condition_uri"] == "condition" - - assert admin_response_data[1]["name"] == "admin condition" - assert admin_response_data[1]["classification_code"] == "classification code" - assert admin_response_data[1]["scheme"] == "scheme" - assert admin_response_data[1]["scheme_uri"] == "scheme uri" - assert admin_response_data[1]["condition_uri"] == "condition" - - assert editor_response_data[1]["name"] == "admin condition" - assert editor_response_data[1]["classification_code"] == "classification code" - assert editor_response_data[1]["scheme"] == "scheme" - assert editor_response_data[1]["scheme_uri"] == "scheme uri" - assert editor_response_data[1]["condition_uri"] == "condition" - - assert viewer_response_data[1]["name"] == "admin condition" - assert viewer_response_data[1]["classification_code"] == "classification code" - assert viewer_response_data[1]["scheme"] == "scheme" - assert viewer_response_data[1]["scheme_uri"] == "scheme uri" - assert viewer_response_data[1]["condition_uri"] == "condition" - - assert response_data[2]["name"] == "editor condition" - assert response_data[2]["classification_code"] == "classification code" - assert response_data[2]["scheme"] == "scheme" - assert response_data[2]["scheme_uri"] == "scheme uri" - assert response_data[2]["condition_uri"] == "condition" - - assert admin_response_data[2]["name"] == "editor condition" - assert admin_response_data[2]["classification_code"] == "classification code" - assert admin_response_data[2]["scheme"] == "scheme" - assert admin_response_data[2]["scheme_uri"] == "scheme uri" - assert admin_response_data[2]["condition_uri"] == "condition" - - assert editor_response_data[2]["name"] == "editor condition" - assert editor_response_data[2]["classification_code"] == "classification code" - assert editor_response_data[2]["scheme"] == "scheme" - assert editor_response_data[2]["scheme_uri"] == "scheme uri" - assert editor_response_data[2]["condition_uri"] == "condition" - - assert viewer_response_data[2]["name"] == "editor condition" - assert viewer_response_data[2]["classification_code"] == "classification code" - assert viewer_response_data[2]["scheme"] == "scheme" - assert viewer_response_data[2]["scheme_uri"] == "scheme uri" - assert viewer_response_data[2]["condition_uri"] == "condition" - - -def test_delete_conditions_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) - THEN check that the response is valid and retrieves the identification metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - conditions_id = pytest.global_conditions_id - admin_conditions_id = pytest.global_admin_conditions_id_admin - editor_conditions_id = pytest.global_editor_conditions_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/conditions/{conditions_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/metadata/conditions/{conditions_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/conditions/{admin_conditions_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/conditions/{editor_conditions_id}" + assert response_data["description"]["brief_summary"] == "brief_summary" + assert ( + response_data["description"]["detailed_description"] == "detailed_description" ) - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- KEYWORDS METADATA ------------------- # -def test_post_keywords_metadata(clients): - """ - GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (POST) - THEN check that the response is valid and creates the keywords metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + assert admin_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + admin_response_data["description"]["detailed_description"] + == "detailed_description" ) - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_keywords_id = response_data[0]["id"] - - assert response_data[0]["name"] == "keywords" - assert response_data[0]["classification_code"] == "classification code" - assert response_data[0]["scheme"] == "scheme" - assert response_data[0]["scheme_uri"] == "scheme uri" - assert response_data[0]["keyword_uri"] == "keywords" - admin_response = _admin_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "admin keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + assert editor_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + editor_response_data["description"]["detailed_description"] + == "detailed_description" ) - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_admin_keywords_id_admin = admin_response_data[0]["id"] - assert admin_response_data[0]["name"] == "admin keywords" - assert admin_response_data[0]["classification_code"] == "classification code" - assert admin_response_data[0]["scheme"] == "scheme" - assert admin_response_data[0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["keyword_uri"] == "keywords" - - editor_response = _editor_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "editor keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + assert viewer_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + viewer_response_data["description"]["detailed_description"] + == "detailed_description" ) - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_editor_keywords_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["name"] == "editor keywords" - assert editor_response_data[0]["classification_code"] == "classification code" - assert editor_response_data[0]["scheme"] == "scheme" - assert editor_response_data[0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["keyword_uri"] == "keywords" + assert response_data["conditions"][0]["name"] == "condition" + assert ( + response_data["conditions"][0]["classification_code"] == "classification code" + ) + assert response_data["conditions"][0]["scheme"] == "scheme" + assert response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert response_data["conditions"][0]["condition_uri"] == "condition" - viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "editor keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + assert admin_response_data["conditions"][0]["name"] == "condition" + assert ( + admin_response_data["conditions"][0]["classification_code"] + == "classification code" ) + assert admin_response_data["conditions"][0]["scheme"] == "scheme" + assert admin_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data["conditions"][0]["condition_uri"] == "condition" - assert viewer_response.status_code == 403 + assert editor_response_data["conditions"][0]["name"] == "condition" + assert ( + editor_response_data["conditions"][0]["classification_code"] + == "classification code" + ) + assert editor_response_data["conditions"][0]["scheme"] == "scheme" + assert editor_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data["conditions"][0]["condition_uri"] == "condition" + assert viewer_response_data["conditions"][0]["name"] == "condition" + assert ( + viewer_response_data["conditions"][0]["classification_code"] + == "classification code" + ) + assert viewer_response_data["conditions"][0]["scheme"] == "scheme" + assert viewer_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert viewer_response_data["conditions"][0]["condition_uri"] == "condition" -def test_get_keywords_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (GET) - THEN check that the response is valid and retrieves the keywords metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore + assert response_data["keywords"][0]["name"] == "keywords" + assert response_data["keywords"][0]["classification_code"] == "classification code" + assert response_data["keywords"][0]["scheme"] == "scheme" + assert response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert response_data["keywords"][0]["keyword_uri"] == "keywords" - response = _logged_in_client.get(f"/study/{study_id}/metadata/keywords") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/keywords") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/keywords") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/keywords") + assert admin_response_data["keywords"][0]["name"] == "keywords" + assert ( + admin_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert admin_response_data["keywords"][0]["scheme"] == "scheme" + assert admin_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data["keywords"][0]["keyword_uri"] == "keywords" - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 + assert editor_response_data["keywords"][0]["name"] == "keywords" + assert ( + editor_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert editor_response_data["keywords"][0]["scheme"] == "scheme" + assert editor_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data["keywords"][0]["keyword_uri"] == "keywords" - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) + assert viewer_response_data["keywords"][0]["name"] == "keywords" + assert ( + viewer_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert viewer_response_data["keywords"][0]["scheme"] == "scheme" + assert viewer_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert viewer_response_data["keywords"][0]["keyword_uri"] == "keywords" - assert response_data[0]["name"] == "keywords" - assert response_data[0]["classification_code"] == "classification code" - assert response_data[0]["scheme"] == "scheme" - assert response_data[0]["scheme_uri"] == "scheme uri" - assert response_data[0]["keyword_uri"] == "keywords" - - assert admin_response_data[0]["name"] == "keywords" - assert admin_response_data[0]["classification_code"] == "classification code" - assert admin_response_data[0]["scheme"] == "scheme" - assert admin_response_data[0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["keyword_uri"] == "keywords" - - assert editor_response_data[0]["name"] == "keywords" - assert editor_response_data[0]["classification_code"] == "classification code" - assert editor_response_data[0]["scheme"] == "scheme" - assert editor_response_data[0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["keyword_uri"] == "keywords" - - assert viewer_response_data[0]["name"] == "keywords" - assert viewer_response_data[0]["classification_code"] == "classification code" - assert viewer_response_data[0]["scheme"] == "scheme" - assert viewer_response_data[0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[0]["keyword_uri"] == "keywords" - - assert response_data[1]["name"] == "admin keywords" - assert response_data[1]["classification_code"] == "classification code" - assert response_data[1]["scheme"] == "scheme" - assert response_data[1]["scheme_uri"] == "scheme uri" - assert response_data[1]["keyword_uri"] == "keywords" - - assert admin_response_data[1]["name"] == "admin keywords" - assert admin_response_data[1]["classification_code"] == "classification code" - assert admin_response_data[1]["scheme"] == "scheme" - assert admin_response_data[1]["scheme_uri"] == "scheme uri" - assert admin_response_data[1]["keyword_uri"] == "keywords" - - assert editor_response_data[1]["name"] == "admin keywords" - assert editor_response_data[1]["classification_code"] == "classification code" - assert editor_response_data[1]["scheme"] == "scheme" - assert editor_response_data[1]["scheme_uri"] == "scheme uri" - assert editor_response_data[1]["keyword_uri"] == "keywords" - - assert viewer_response_data[1]["name"] == "admin keywords" - assert viewer_response_data[1]["classification_code"] == "classification code" - assert viewer_response_data[1]["scheme"] == "scheme" - assert viewer_response_data[1]["scheme_uri"] == "scheme uri" - assert viewer_response_data[1]["keyword_uri"] == "keywords" - - assert response_data[2]["name"] == "editor keywords" - assert response_data[2]["classification_code"] == "classification code" - assert response_data[2]["scheme"] == "scheme" - assert response_data[2]["scheme_uri"] == "scheme uri" - assert response_data[2]["keyword_uri"] == "keywords" - - assert admin_response_data[2]["name"] == "editor keywords" - assert admin_response_data[2]["classification_code"] == "classification code" - assert admin_response_data[2]["scheme"] == "scheme" - assert admin_response_data[2]["scheme_uri"] == "scheme uri" - assert admin_response_data[2]["keyword_uri"] == "keywords" - - assert editor_response_data[2]["name"] == "editor keywords" - assert editor_response_data[2]["classification_code"] == "classification code" - assert editor_response_data[2]["scheme"] == "scheme" - assert editor_response_data[2]["scheme_uri"] == "scheme uri" - assert editor_response_data[2]["keyword_uri"] == "keywords" - - assert viewer_response_data[2]["name"] == "editor keywords" - assert viewer_response_data[2]["classification_code"] == "classification code" - assert viewer_response_data[2]["scheme"] == "scheme" - assert viewer_response_data[2]["scheme_uri"] == "scheme uri" - assert viewer_response_data[2]["keyword_uri"] == "keywords" + assert response_data["identification"]["primary"]["identifier"] == "first" + assert response_data["identification"]["primary"]["identifier_type"] == "test" + assert response_data["identification"]["primary"]["identifier_domain"] == "domain" + assert response_data["identification"]["primary"]["identifier_link"] == "link" + assert response_data["identification"]["secondary"][0]["identifier"] == "test" + assert response_data["identification"]["secondary"][0]["identifier_type"] == "test" + assert ( + response_data["identification"]["secondary"][0]["identifier_domain"] + == "dodfasdfmain" + ) + assert response_data["identification"]["secondary"][0]["identifier_link"] == "link" + assert response_data["identification"]["secondary"][1]["identifier"] == "test" + assert response_data["identification"]["secondary"][1]["identifier_type"] == "test" -def test_delete_keywords_metadata(clients): +# ------------------- IDENTIFICATION METADATA ------------------- # +def test_delete_identification_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (GET) + WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) THEN check that the response is valid and retrieves the identification metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - keywords_id = pytest.global_keywords_id - admin_keywords_id = pytest.global_admin_keywords_id_admin - editor_keywords_id = pytest.global_editor_keywords_id_editor + identification_id = pytest.global_identification_id + admin_identification_id = pytest.global_identification_id_admin + editor_identification_id = pytest.global_identification_id_editor viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/keywords/{keywords_id}" + f"/study/{study_id}/metadata/identification/{identification_id}" ) + response = _logged_in_client.delete( - f"/study/{study_id}/metadata/keywords/{keywords_id}" + f"/study/{study_id}/metadata/identification/{identification_id}" ) + admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/keywords/{admin_keywords_id}" + f"/study/{study_id}/metadata/identification/{admin_identification_id}" ) + editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/keywords/{editor_keywords_id}" + f"/study/{study_id}/metadata/identification/{editor_identification_id}" ) assert viewer_response.status_code == 403 @@ -1348,104 +2031,68 @@ def test_delete_keywords_metadata(clients): assert editor_response.status_code == 204 -# ------------------- DESCRIPTION METADATA ------------------- # -def test_put_description_metadata(clients): +# ------------------- CONDITIONS METADATA ------------------- # +def test_delete_conditions_metadata(clients): """ - GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/description' endpoint is requested (POST) - THEN check that the response is valid and creates the description metadata + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore + conditions_id = pytest.global_conditions_id + admin_conditions_id = pytest.global_admin_conditions_id_admin + editor_conditions_id = pytest.global_editor_conditions_id_editor - response = _logged_in_client.put( - f"/study/{study_id}/metadata/description", - json={ - "brief_summary": "brief_summary", - "detailed_description": "detailed_description", - }, + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/conditions/{conditions_id}" ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["brief_summary"] == "brief_summary" - assert response_data["detailed_description"] == "detailed_description" - - admin_response = _admin_client.put( - f"/study/{study_id}/metadata/description", - json={ - "brief_summary": "admin-brief_summary", - "detailed_description": "admin-detailed_description", - }, + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/conditions/{conditions_id}" ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["brief_summary"] == "admin-brief_summary" - assert admin_response_data["detailed_description"] == "admin-detailed_description" - - editor_response = _editor_client.put( - f"/study/{study_id}/metadata/description", - json={ - "brief_summary": "editor-brief_summary", - "detailed_description": "editor-detailed_description", - }, + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/conditions/{admin_conditions_id}" ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["brief_summary"] == "editor-brief_summary" - assert editor_response_data["detailed_description"] == "editor-detailed_description" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/metadata/description", - json={ - "brief_summary": "viewer-brief_summary", - "detailed_description": "viewer-detailed_description", - }, + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/conditions/{editor_conditions_id}" ) assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_get_description_metadata(clients): +# ------------------- KEYWORDS METADATA ------------------- # +def test_delete_keywords_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/description' endpoint is requested (GET) - THEN check that the response is valid and retrieves the description metadata + WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore + keywords_id = pytest.global_keywords_id + admin_keywords_id = pytest.global_admin_keywords_id_admin + editor_keywords_id = pytest.global_editor_keywords_id_editor - response = _logged_in_client.get(f"/study/{study_id}/metadata/description") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/description") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/description") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/description") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["brief_summary"] == "editor-brief_summary" - assert response_data["detailed_description"] == "editor-detailed_description" - - assert admin_response_data["brief_summary"] == "editor-brief_summary" - assert admin_response_data["detailed_description"] == "editor-detailed_description" - - assert editor_response_data["brief_summary"] == "editor-brief_summary" - assert editor_response_data["detailed_description"] == "editor-detailed_description" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/keywords/{keywords_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/keywords/{keywords_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/keywords/{admin_keywords_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/keywords/{editor_keywords_id}" + ) - assert viewer_response_data["brief_summary"] == "editor-brief_summary" - assert viewer_response_data["detailed_description"] == "editor-detailed_description" + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- DESIGN METADATA ------------------- # @@ -1961,273 +2608,13 @@ def test_get_eligibility_metadata(clients): assert viewer_response_data["gender_description"] == "editor-none" assert viewer_response_data["minimum_age_value"] == 18 assert viewer_response_data["maximum_age_value"] == 61 - assert viewer_response_data["minimum_age_unit"] == "1" - assert viewer_response_data["maximum_age_unit"] == "2" - assert viewer_response_data["healthy_volunteers"] == "Yes" - assert viewer_response_data["inclusion_criteria"] == ["tests"] - assert viewer_response_data["exclusion_criteria"] == ["Probability Sample"] - assert viewer_response_data["study_population"] == "study_population" - assert viewer_response_data["sampling_method"] == "Probability Sample" - - -# ------------------- IDENTIFICATION METADATA ------------------- # -def test_post_identification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (POST) - THEN check that the response is valid and creates the identification metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/identification", - json={ - "primary": { - "identifier": "first", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ - { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", - } - ], - }, - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_identification_id = response_data["secondary"][0]["id"] - - assert response_data["primary"]["identifier"] == "first" - assert response_data["primary"]["identifier_type"] == "test" - assert response_data["primary"]["identifier_domain"] == "domain" - assert response_data["primary"]["identifier_link"] == "link" - assert response_data["secondary"][0]["identifier"] == "test" - assert response_data["secondary"][0]["identifier_type"] == "test" - assert response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert response_data["secondary"][0]["identifier_link"] == "link" - - admin_response = _admin_client.post( - f"/study/{study_id}/metadata/identification", - json={ - "primary": { - "identifier": "admin-first", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ - { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", - } - ], - }, - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_identification_id_admin = admin_response_data["secondary"][1]["id"] - - assert admin_response_data["primary"]["identifier"] == "admin-first" - assert admin_response_data["primary"]["identifier_type"] == "test" - assert admin_response_data["primary"]["identifier_domain"] == "domain" - assert admin_response_data["primary"]["identifier_link"] == "link" - assert admin_response_data["secondary"][1]["identifier"] == "test" - assert admin_response_data["secondary"][1]["identifier_type"] == "test" - assert admin_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert admin_response_data["secondary"][1]["identifier_link"] == "link" - - editor_response = _editor_client.post( - f"/study/{study_id}/metadata/identification", - json={ - "primary": { - "identifier": "editor-first", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ - { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", - } - ], - }, - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_identification_id_editor = editor_response_data["secondary"][2]["id"] - - assert editor_response_data["primary"]["identifier"] == "editor-first" - assert editor_response_data["primary"]["identifier_type"] == "test" - assert editor_response_data["primary"]["identifier_domain"] == "domain" - assert editor_response_data["primary"]["identifier_link"] == "link" - assert editor_response_data["secondary"][2]["identifier"] == "test" - assert editor_response_data["secondary"][2]["identifier_type"] == "test" - assert editor_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert editor_response_data["secondary"][2]["identifier_link"] == "link" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/identification", - json={ - "primary": { - "identifier": "viewer-first", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ - { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", - } - ], - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_identification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) - THEN check that the response is valid and retrieves the identification metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/identification") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/identification") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/identification") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/identification") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["primary"]["identifier"] == "editor-first" - assert response_data["primary"]["identifier_type"] == "test" - assert response_data["primary"]["identifier_domain"] == "domain" - assert response_data["primary"]["identifier_link"] == "link" - assert response_data["secondary"][0]["identifier"] == "test" - assert response_data["secondary"][0]["identifier_type"] == "test" - assert response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert response_data["secondary"][0]["identifier_link"] == "link" - assert response_data["secondary"][1]["identifier"] == "test" - assert response_data["secondary"][1]["identifier_type"] == "test" - assert response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert response_data["secondary"][1]["identifier_link"] == "link" - assert response_data["secondary"][2]["identifier"] == "test" - assert response_data["secondary"][2]["identifier_type"] == "test" - assert response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert response_data["secondary"][2]["identifier_link"] == "link" - - assert admin_response_data["primary"]["identifier"] == "editor-first" - assert admin_response_data["primary"]["identifier_type"] == "test" - assert admin_response_data["primary"]["identifier_domain"] == "domain" - assert admin_response_data["primary"]["identifier_link"] == "link" - assert admin_response_data["secondary"][0]["identifier"] == "test" - assert admin_response_data["secondary"][0]["identifier_type"] == "test" - assert admin_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert admin_response_data["secondary"][0]["identifier_link"] == "link" - assert admin_response_data["secondary"][1]["identifier"] == "test" - assert admin_response_data["secondary"][1]["identifier_type"] == "test" - assert admin_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert admin_response_data["secondary"][1]["identifier_link"] == "link" - assert admin_response_data["secondary"][2]["identifier"] == "test" - assert admin_response_data["secondary"][2]["identifier_type"] == "test" - assert admin_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert admin_response_data["secondary"][2]["identifier_link"] == "link" - - assert editor_response_data["primary"]["identifier"] == "editor-first" - assert editor_response_data["primary"]["identifier_type"] == "test" - assert editor_response_data["primary"]["identifier_domain"] == "domain" - assert editor_response_data["primary"]["identifier_link"] == "link" - assert editor_response_data["secondary"][0]["identifier"] == "test" - assert editor_response_data["secondary"][0]["identifier_type"] == "test" - assert editor_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert editor_response_data["secondary"][0]["identifier_link"] == "link" - assert editor_response_data["secondary"][1]["identifier"] == "test" - assert editor_response_data["secondary"][1]["identifier_type"] == "test" - assert editor_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert editor_response_data["secondary"][1]["identifier_link"] == "link" - assert editor_response_data["secondary"][2]["identifier"] == "test" - assert editor_response_data["secondary"][2]["identifier_type"] == "test" - assert editor_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert editor_response_data["secondary"][2]["identifier_link"] == "link" - - assert viewer_response_data["primary"]["identifier"] == "editor-first" - assert viewer_response_data["primary"]["identifier_type"] == "test" - assert viewer_response_data["primary"]["identifier_domain"] == "domain" - assert viewer_response_data["primary"]["identifier_link"] == "link" - assert viewer_response_data["secondary"][0]["identifier"] == "test" - assert viewer_response_data["secondary"][0]["identifier_type"] == "test" - assert viewer_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert viewer_response_data["secondary"][0]["identifier_link"] == "link" - assert viewer_response_data["secondary"][1]["identifier"] == "test" - assert viewer_response_data["secondary"][1]["identifier_type"] == "test" - assert viewer_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert viewer_response_data["secondary"][1]["identifier_link"] == "link" - assert viewer_response_data["secondary"][2]["identifier"] == "test" - assert viewer_response_data["secondary"][2]["identifier_type"] == "test" - assert viewer_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert viewer_response_data["secondary"][2]["identifier_link"] == "link" - - -def test_delete_identification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) - THEN check that the response is valid and retrieves the identification metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - identification_id = pytest.global_identification_id - admin_identification_id = pytest.global_identification_id_admin - editor_identification_id = pytest.global_identification_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/identification/{identification_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/metadata/identification/{identification_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/identification/{admin_identification_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/identification/{editor_identification_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 + assert viewer_response_data["minimum_age_unit"] == "1" + assert viewer_response_data["maximum_age_unit"] == "2" + assert viewer_response_data["healthy_volunteers"] == "Yes" + assert viewer_response_data["inclusion_criteria"] == ["tests"] + assert viewer_response_data["exclusion_criteria"] == ["Probability Sample"] + assert viewer_response_data["study_population"] == "study_population" + assert viewer_response_data["sampling_method"] == "Probability Sample" # ------------------- INTERVENTION METADATA ------------------- # @@ -2835,7 +3222,6 @@ def test_get_overall_official_metadata(clients): admin_response_data = json.loads(admin_response.data) editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0]["first_name"] == "test" assert response_data[0]["last_name"] == "test" assert response_data[0]["affiliation"] == "aff" @@ -3146,434 +3532,6 @@ def test_get_oversight_metadata(clients): assert viewer_response_data["human_subject_review_status"] == "yes" -# ------------------- SPONSORS METADATA ------------------- # -def test_put_sponsors_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (PUT) - THEN check that the response is valid and updates the sponsors metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.put( - f"/study/{study_id}/metadata/sponsor", - json={ - "lead_sponsor_identifier_scheme": "scheme", - "lead_sponsor_identifier_scheme_uri": "uri", - "responsible_party_type": "Sponsor", - "responsible_party_investigator_first_name": "name", - "responsible_party_investigator_last_name": "surname", - "responsible_party_investigator_title": "title", - "responsible_party_investigator_identifier_value": "identifier", - "responsible_party_investigator_identifier_scheme": "scheme", - "responsible_party_investigator_identifier_scheme_uri": "uri", - "responsible_party_investigator_affiliation_name": "affiliation", - "responsible_party_investigator_affiliation_identifier_value": "identifier", - "responsible_party_investigator_affiliation_identifier_scheme": "scheme", - "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", - "lead_sponsor_name": "name", - "lead_sponsor_identifier": "identifier", - }, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["responsible_party_type"] == "Sponsor" - assert response_data["responsible_party_investigator_first_name"] == "name" - assert response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - response_data["responsible_party_investigator_identifier_value"] == "identifier" - ) - assert response_data["responsible_party_investigator_identifier_scheme"] == "scheme" - assert ( - response_data["responsible_party_investigator_identifier_scheme_uri"] == "uri" - ) - assert ( - response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - response_data["responsible_party_investigator_affiliation_identifier_value"] - == "identifier" - ) - assert ( - response_data["responsible_party_investigator_affiliation_identifier_scheme"] - == "scheme" - ) - assert ( - response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert response_data["lead_sponsor_name"] == "name" - assert response_data["lead_sponsor_identifier"] == "identifier" - assert response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - admin_response = _admin_client.put( - f"/study/{study_id}/metadata/sponsor", - json={ - "lead_sponsor_identifier_scheme": "scheme", - "lead_sponsor_identifier_scheme_uri": "uri", - "responsible_party_type": "Sponsor", - "responsible_party_investigator_first_name": "name", - "responsible_party_investigator_last_name": "surname", - "responsible_party_investigator_title": "title", - "responsible_party_investigator_identifier_value": "identifier", - "responsible_party_investigator_identifier_scheme": "scheme", - "responsible_party_investigator_identifier_scheme_uri": "uri", - "responsible_party_investigator_affiliation_name": "affiliation", - "responsible_party_investigator_affiliation_identifier_value": "identifier", - "responsible_party_investigator_affiliation_identifier_scheme": "scheme", - "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", - "lead_sponsor_name": "name", - "lead_sponsor_identifier": "identifier", - }, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["responsible_party_type"] == "Sponsor" - assert admin_response_data["responsible_party_investigator_first_name"] == "name" - assert admin_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - admin_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - admin_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - admin_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - admin_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - admin_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert admin_response_data["lead_sponsor_name"] == "name" - assert admin_response_data["lead_sponsor_identifier"] == "identifier" - assert admin_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert admin_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - editor_response = _editor_client.put( - f"/study/{study_id}/metadata/sponsor", - json={ - "lead_sponsor_identifier_scheme": "scheme", - "lead_sponsor_identifier_scheme_uri": "uri", - "responsible_party_type": "Sponsor", - "responsible_party_investigator_first_name": "name", - "responsible_party_investigator_last_name": "surname", - "responsible_party_investigator_title": "title", - "responsible_party_investigator_identifier_value": "identifier", - "responsible_party_investigator_identifier_scheme": "scheme", - "responsible_party_investigator_identifier_scheme_uri": "uri", - "responsible_party_investigator_affiliation_name": "affiliation", - "responsible_party_investigator_affiliation_identifier_value": "identifier", - "responsible_party_investigator_affiliation_identifier_scheme": "scheme", - "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", - "lead_sponsor_name": "name", - "lead_sponsor_identifier": "identifier", - }, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["responsible_party_type"] == "Sponsor" - assert editor_response_data["responsible_party_investigator_first_name"] == "name" - assert editor_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - editor_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - editor_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - editor_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - editor_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - editor_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert editor_response_data["lead_sponsor_name"] == "name" - assert editor_response_data["lead_sponsor_identifier"] == "identifier" - assert editor_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert editor_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/metadata/sponsor", - json={ - "responsible_party_type": "Sponsor", - "responsible_party_investigator_first_name": "name", - "responsible_party_investigator_last_name": "surname", - "responsible_party_investigator_title": "title", - "responsible_party_investigator_identifier_value": "identifier", - "responsible_party_investigator_identifier_scheme": "scheme", - "responsible_party_investigator_identifier_scheme_uri": "uri", - "responsible_party_investigator_affiliation_name": "affiliation", - "responsible_party_investigator_affiliation_identifier_value": "identifier", - "responsible_party_investigator_affiliation_identifier_scheme": "scheme", - "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", - "lead_sponsor_name": "name", - "lead_sponsor_identifier": "identifier", - "lead_sponsor_identifier_scheme": "scheme", - "lead_sponsor_identifier_scheme_uri": "uri", - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_sponsors_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (GET) - THEN check that the response is valid and retrieves the sponsors metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/sponsor") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/sponsor") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/sponsor") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/sponsor") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["responsible_party_type"] == "Sponsor" - assert response_data["responsible_party_investigator_first_name"] == "name" - assert response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - - assert ( - response_data["responsible_party_investigator_identifier_value"] == "identifier" - ) - assert response_data["responsible_party_investigator_identifier_scheme"] == "scheme" - assert ( - response_data["responsible_party_investigator_identifier_scheme_uri"] == "uri" - ) - assert ( - response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - response_data["responsible_party_investigator_affiliation_identifier_value"] - == "identifier" - ) - assert ( - response_data["responsible_party_investigator_affiliation_identifier_scheme"] - == "scheme" - ) - assert ( - response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert response_data["lead_sponsor_name"] == "name" - assert response_data["lead_sponsor_identifier"] == "identifier" - assert response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - assert admin_response_data["responsible_party_type"] == "Sponsor" - assert admin_response_data["responsible_party_investigator_first_name"] == "name" - assert admin_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - admin_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - admin_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - admin_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - admin_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - admin_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert admin_response_data["lead_sponsor_name"] == "name" - assert admin_response_data["lead_sponsor_identifier"] == "identifier" - assert admin_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert admin_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - assert editor_response_data["responsible_party_type"] == "Sponsor" - assert editor_response_data["responsible_party_investigator_first_name"] == "name" - assert editor_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - editor_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - editor_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - editor_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - editor_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - editor_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert editor_response_data["lead_sponsor_name"] == "name" - assert editor_response_data["lead_sponsor_identifier"] == "identifier" - assert editor_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert editor_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - assert viewer_response_data["responsible_party_type"] == "Sponsor" - assert viewer_response_data["responsible_party_investigator_first_name"] == "name" - assert viewer_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - viewer_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - viewer_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - viewer_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - viewer_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - viewer_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - viewer_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - viewer_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - viewer_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert viewer_response_data["lead_sponsor_name"] == "name" - assert viewer_response_data["lead_sponsor_identifier"] == "identifier" - assert viewer_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert viewer_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - # ------------------- STATUS METADATA ------------------- # def test_put_status_metadata(clients): """ diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_060_study_version_api.py similarity index 84% rename from tests/functional/test_study_version_api.py rename to tests/functional/test_060_study_version_api.py index 0b326d74..dcc0396f 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_060_study_version_api.py @@ -280,23 +280,47 @@ def test_get_version_study_metadata(clients): } ], ) - id_response = _logged_in_client.post( - f"/study/{study_id}/metadata/identification", + description_response = _logged_in_client.post( + f"/study/{study_id}/metadata/description", json={ - "primary": { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ + "conditions": [ { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", } ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, }, ) intervention_response = _logged_in_client.post( @@ -310,40 +334,41 @@ def test_get_version_study_metadata(clients): } ], ) - collaborators_response = _logged_in_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], - ) - conditions_response = _logged_in_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], - ) - keywords_response = _logged_in_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + team_response = _logged_in_client.post( + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, ) of_response = _logged_in_client.post( @@ -368,12 +393,10 @@ def test_get_version_study_metadata(clients): assert arm_response.status_code == 201 assert cc_response.status_code == 201 assert location_response.status_code == 201 - assert id_response.status_code == 201 + assert description_response.status_code == 201 assert intervention_response.status_code == 201 + assert team_response.status_code == 201 assert of_response.status_code == 201 - assert collaborators_response.status_code == 201 - assert conditions_response.status_code == 201 - assert keywords_response.status_code == 201 response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata" @@ -405,7 +428,7 @@ def test_get_version_study_metadata(clients): assert response_data["collaborators"][0]["name"] == "collaborator1123" assert response_data["conditions"][0]["name"] == "condition" assert response_data["keywords"][0]["name"] == "keywords" - assert response_data["description"]["brief_summary"] == "editor-brief_summary" + assert response_data["description"]["brief_summary"] == "brief_summary" assert response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert response_data["design"]["study_type"] == "Interventional" assert response_data["design"]["design_intervention_model"] == "Treatment" @@ -431,7 +454,7 @@ def test_get_version_study_metadata(clients): assert response_data["eligibility"]["sex"] == "All" assert response_data["eligibility"]["gender_based"] == "Yes" assert response_data["eligibility"]["maximum_age_value"] == 61 - assert response_data["primary_identifier"]["identifier"] == "test" + assert response_data["primary_identifier"]["identifier"] == "first" assert response_data["primary_identifier"]["identifier_type"] == "test" assert response_data["secondary_identifiers"][0]["identifier"] == "test" assert response_data["secondary_identifiers"][0]["identifier_type"] == "test" @@ -467,7 +490,7 @@ def test_get_version_study_metadata(clients): assert admin_response_data["collaborators"][0]["name"] == "collaborator1123" assert admin_response_data["conditions"][0]["name"] == "condition" assert admin_response_data["keywords"][0]["name"] == "keywords" - assert admin_response_data["description"]["brief_summary"] == "editor-brief_summary" + assert admin_response_data["description"]["brief_summary"] == "brief_summary" assert admin_response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert admin_response_data["design"]["study_type"] == "Interventional" assert admin_response_data["design"]["design_intervention_model"] == "Treatment" @@ -495,7 +518,7 @@ def test_get_version_study_metadata(clients): assert admin_response_data["eligibility"]["sex"] == "All" assert admin_response_data["eligibility"]["gender_based"] == "Yes" assert admin_response_data["eligibility"]["maximum_age_value"] == 61 - assert admin_response_data["primary_identifier"]["identifier"] == "test" + assert admin_response_data["primary_identifier"]["identifier"] == "first" assert admin_response_data["primary_identifier"]["identifier_type"] == "test" assert admin_response_data["secondary_identifiers"][0]["identifier"] == "test" assert admin_response_data["secondary_identifiers"][0]["identifier_type"] == "test" @@ -534,9 +557,7 @@ def test_get_version_study_metadata(clients): assert editor_response_data["collaborators"][0]["name"] == "collaborator1123" assert editor_response_data["conditions"][0]["name"] == "condition" assert editor_response_data["keywords"][0]["name"] == "keywords" - assert ( - editor_response_data["description"]["brief_summary"] == "editor-brief_summary" - ) + assert editor_response_data["description"]["brief_summary"] == "brief_summary" assert editor_response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert editor_response_data["design"]["study_type"] == "Interventional" assert editor_response_data["design"]["design_intervention_model"] == "Treatment" @@ -565,7 +586,7 @@ def test_get_version_study_metadata(clients): assert editor_response_data["eligibility"]["sex"] == "All" assert editor_response_data["eligibility"]["gender_based"] == "Yes" assert editor_response_data["eligibility"]["maximum_age_value"] == 61 - assert editor_response_data["primary_identifier"]["identifier"] == "test" + assert editor_response_data["primary_identifier"]["identifier"] == "first" assert editor_response_data["primary_identifier"]["identifier_type"] == "test" assert editor_response_data["secondary_identifiers"][0]["identifier"] == "test" assert editor_response_data["secondary_identifiers"][0]["identifier_type"] == "test" @@ -604,9 +625,7 @@ def test_get_version_study_metadata(clients): assert viewer_response_data["collaborators"][0]["name"] == "collaborator1123" assert viewer_response_data["conditions"][0]["name"] == "condition" assert viewer_response_data["keywords"][0]["name"] == "keywords" - assert ( - viewer_response_data["description"]["brief_summary"] == "editor-brief_summary" - ) + assert viewer_response_data["description"]["brief_summary"] == "brief_summary" assert viewer_response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert viewer_response_data["design"]["study_type"] == "Interventional" assert viewer_response_data["design"]["design_intervention_model"] == "Treatment" @@ -635,7 +654,7 @@ def test_get_version_study_metadata(clients): assert viewer_response_data["eligibility"]["sex"] == "All" assert viewer_response_data["eligibility"]["gender_based"] == "Yes" assert viewer_response_data["eligibility"]["maximum_age_value"] == 61 - assert viewer_response_data["primary_identifier"]["identifier"] == "test" + assert viewer_response_data["primary_identifier"]["identifier"] == "first" assert viewer_response_data["primary_identifier"]["identifier_type"] == "test" assert viewer_response_data["secondary_identifiers"][0]["identifier"] == "test" assert viewer_response_data["secondary_identifiers"][0]["identifier_type"] == "test" @@ -677,91 +696,125 @@ def test_get_version_dataset_metadata(clients): dataset_id = pytest.global_dataset_id # type: ignore version_id = pytest.global_dataset_version_id # type: ignore - contributor_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - creator_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - date_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210101, "type": "Type", "information": "Info"}], + team_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "editor Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, ) - funder_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", - json=[ - { - "name": "Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", - } - ], + general_information_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", + json={ + "titles": [{"title": "Title", "type": "Subtitle"}], + "descriptions": [{"description": "Owner Description", "type": "Methods"}], + "dates": [{"date": 20210101, "type": "Accepted", "information": "Info"}], + }, ) - rights_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], + access_rights_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", + json={ + "access": { + "type": "editor type", + "description": "editor description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], + }, ) - subject_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Value URI", - } - ], + dataset_data_management_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Value URI", + } + ], + }, ) alt_identifier_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", @@ -786,13 +839,11 @@ def test_get_version_dataset_metadata(clients): } ], ) - assert contributor_response.status_code == 201 - assert creator_response.status_code == 201 - assert date_response.status_code == 201 - assert funder_response.status_code == 201 - assert rights_response.status_code == 201 - assert subject_response.status_code == 201 + assert team_response.status_code == 200 + assert access_rights_response.status_code == 200 assert alt_identifier_response.status_code == 201 + assert general_information_response.status_code == 200 + assert dataset_data_management_response.status_code == 200 assert related_identifier_response.status_code == 201 response = _logged_in_client.get( @@ -825,7 +876,7 @@ def test_get_version_dataset_metadata(clients): assert response_data["contributors"][0]["contributor_type"] == "Con Type" assert response_data["dates"][0]["date"] == "01-01-1970" - assert response_data["dates"][0]["type"] == "Type" + assert response_data["dates"][0]["type"] == "Accepted" assert response_data["creators"][0]["last_name"] == "Family Name here" assert response_data["creators"][0]["first_name"] == "Given Name here" @@ -875,7 +926,7 @@ def test_get_version_dataset_metadata(clients): assert admin_response_data["contributors"][0]["name_type"] == "Personal" assert admin_response_data["contributors"][0]["contributor_type"] == "Con Type" assert admin_response_data["dates"][0]["date"] == "01-01-1970" - assert admin_response_data["dates"][0]["type"] == "Type" + assert admin_response_data["dates"][0]["type"] == "Accepted" assert admin_response_data["creators"][0]["first_name"] == "Given Name here" assert admin_response_data["creators"][0]["last_name"] == "Family Name here" assert admin_response_data["creators"][0]["name_type"] == "Personal" @@ -919,7 +970,7 @@ def test_get_version_dataset_metadata(clients): assert editor_response_data["contributors"][0]["name_type"] == "Personal" assert editor_response_data["contributors"][0]["contributor_type"] == "Con Type" assert editor_response_data["dates"][0]["date"] == "01-01-1970" - assert editor_response_data["dates"][0]["type"] == "Type" + assert editor_response_data["dates"][0]["type"] == "Accepted" assert editor_response_data["creators"][0]["first_name"] == "Given Name here" assert editor_response_data["creators"][0]["last_name"] == "Family Name here" assert editor_response_data["creators"][0]["name_type"] == "Personal" diff --git a/tests/functional/test_070_user.py b/tests/functional/test_070_user.py new file mode 100644 index 00000000..067b5745 --- /dev/null +++ b/tests/functional/test_070_user.py @@ -0,0 +1,296 @@ +import datetime + +from apis.authentication import set_now +from model.db import db + +# ------------------- Password Change ------------------- # + + +def test_post_password_change(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/change' endpoint is requested (PUT) + THEN check that the response is valid and the password is changed + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + + response = _logged_in_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + a_response = _admin_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + e_response = _editor_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + v_response = _viewer_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + + assert response.status_code == 200 + assert a_response.status_code == 200 + assert e_response.status_code == 200 + assert v_response.status_code == 200 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == "session": + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 0 + + +def test_post_password_login_invalid_old_password(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/login' endpoint is requested (POST) + THEN check that the response is an error when old password is provided + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + response = _logged_in_client.post( + "/auth/login", + json={ + "email_address": "test@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + a_response = _admin_client.post( + "/auth/login", + json={ + "email_address": "admin@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + e_response = _editor_client.post( + "/auth/login", + json={ + "email_address": "editor@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + v_response = _viewer_client.post( + "/auth/login", + json={ + "email_address": "viewer@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + assert response.status_code == 401 + assert a_response.status_code == 401 + assert e_response.status_code == 401 + assert v_response.status_code == 401 + + +def test_post_login_new_password(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/login' endpoint is requested (POST) + THEN check that the response is valid when new password is provided + """ + _logged_in_client = clients[0] + response = _logged_in_client.post( + "/auth/login", + json={ + "email_address": "test@fairhub.io", + "password": "Updatedpassword4testing!", + }, + ) + assert response.status_code == 200 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == "session": + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 1 + + +def test_post_reset_password(flask_app): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/reset-password' endpoint is requested (POST) + THEN check that the response is valid and the password is changed + """ + _test_client = flask_app.test_client() + + forgot_response = _test_client.post( + "/auth/forgot-password", + json={ + "email_address": "test@fairhub.io", + }, + ) + assert forgot_response.status_code == 200 + + token = forgot_response.headers["X-Token"] + assert token is not None + + reset_response = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "Updatedpassword4testing!1", + "new_password": "Updatedpassword4testing!1", + }, + ) + assert reset_response.status_code == 200 + + response = _test_client.post( + "/auth/login", + json={ + "email_address": "test@fairhub.io", + "password": "Updatedpassword4testing!1", + }, + ) + + assert response.status_code == 200 + logout_response = _test_client.post("/auth/logout") + assert logout_response.status_code == 204 + + +def test_post_reset_password_invalidation(flask_app): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/reset-password' endpoint is requested (POST) + THEN check that the response is valid and the password is changed + """ + _test_client = flask_app.test_client() + + forgot_response = _test_client.post( + "/auth/forgot-password", + json={ + "email_address": "test@fairhub.io", + }, + ) + assert forgot_response.status_code == 200 + + token = forgot_response.headers["X-Token"] + assert token is not None + + reset_response = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "invalidatepassword4testing!", + "new_password": "invalidatepassword4testing!", + }, + ) + + assert reset_response.status_code == 200 + + reset_response_old = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "uniquepassword4testing!", + "new_password": "uniquepassword4testing!", + }, + ) + + assert reset_response_old.status_code == 400 + + +def test_post_reset_password_is_not_same_old(flask_app): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/reset-password' endpoint is requested (POST) + THEN check that the response is valid and the password is changed + """ + _test_client = flask_app.test_client() + + forgot_response = _test_client.post( + "/auth/forgot-password", + json={ + "email_address": "test@fairhub.io", + }, + ) + assert forgot_response.status_code == 200 + + token = forgot_response.headers["X-Token"] + assert token is not None + + reset_response_old = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "invalidatepassword4testing!", + "new_password": "invalidatepassword4testing!", + }, + ) + + assert reset_response_old.status_code == 422 + + +def test_post_reset_password_expired(flask_app): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/reset-password' endpoint is requested (POST) + THEN check that the response is valid and the password is changed + """ + _test_client = flask_app.test_client() + + set_now( + datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(minutes=6) + ) + forgot_response = _test_client.post( + "/auth/forgot-password", + json={ + "email_address": "test@fairhub.io", + }, + ) + set_now(None) + + assert forgot_response.status_code == 200 + + token = forgot_response.headers["X-Token"] + + assert token is not None + + reset_response = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + }, + ) + assert reset_response.status_code == 401 + + +def test_post_logout(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/login' endpoint is requested (POST) + THEN check that the response is valid when new password is provided + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + + response = _logged_in_client.post("/auth/logout") + a_response = _admin_client.post("/auth/logout") + e_response = _editor_client.post("/auth/logout") + v_response = _viewer_client.post("/auth/logout") + + assert response.status_code == 204 + assert a_response.status_code == 204 + assert e_response.status_code == 204 + assert v_response.status_code == 204 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == "session": + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 0 diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py deleted file mode 100644 index aad43499..00000000 --- a/tests/functional/test_study_dataset_metadata_api.py +++ /dev/null @@ -1,3992 +0,0 @@ -# pylint: disable=too-many-lines -"""Tests for the Dataset's Metadata API endpoints""" -import json -from time import sleep - -import pytest - - -# ------------------- ACCESS METADATA ------------------- # -def test_put_dataset_access_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (PUT) - Then check that the response is valid and updates the dataset access metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access", - json={ - "type": "type", - "description": "description", - "url": "google.com", - "url_last_checked": 123, - }, - ) - - response_data = json.loads(response.data) - assert response.status_code == 200 - - assert response_data["type"] == "type" - assert response_data["description"] == "description" - assert response_data["url"] == "google.com" - assert response_data["url_last_checked"] == 123 - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access", - json={ - "type": "admin type", - "description": "admin description", - "url": "google.com", - "url_last_checked": 123, - }, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["type"] == "admin type" - assert admin_response_data["description"] == "admin description" - assert admin_response_data["url"] == "google.com" - assert admin_response_data["url_last_checked"] == 123 - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access", - json={ - "type": "editor type", - "description": "editor description", - "url": "google.com", - "url_last_checked": 123, - }, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["type"] == "editor type" - assert editor_response_data["description"] == "editor description" - assert editor_response_data["url"] == "google.com" - assert editor_response_data["url_last_checked"] == 123 - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access", - json={ - "type": "viewer type", - "description": "viewer description", - "url": "google.com", - "url_last_checked": 123, - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_access_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset access metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # Since editor was the last successful PUT request, the response data should match - assert response_data["type"] == "editor type" - assert response_data["description"] == "editor description" - assert response_data["url"] == "google.com" - assert response_data["url_last_checked"] == 123 - - assert admin_response_data["type"] == "editor type" - assert admin_response_data["description"] == "editor description" - assert admin_response_data["url"] == "google.com" - assert admin_response_data["url_last_checked"] == 123 - - assert editor_response_data["type"] == "editor type" - assert editor_response_data["description"] == "editor description" - assert editor_response_data["url"] == "google.com" - assert editor_response_data["url_last_checked"] == 123 - - assert viewer_response_data["type"] == "editor type" - assert viewer_response_data["description"] == "editor description" - assert viewer_response_data["url"] == "google.com" - assert viewer_response_data["url_last_checked"] == 123 - - -# ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # -def test_post_alternative_identifier(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset alternative identifier - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", - json=[ - { - "identifier": "identifier test", - "type": "ARK", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_alternative_identifier_id = response_data[0]["id"] - - assert response_data[0]["identifier"] == "identifier test" - assert response_data[0]["type"] == "ARK" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", - json=[ - { - "identifier": "admin test", - "type": "ARK", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", - json=[ - { - "identifier": "editor test", - "type": "ARK", - } - ], - ) - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", - json=[ - { - "identifier": "viewer test", - "type": "ARK", - } - ], - ) - - assert admin_response.status_code == 201 - assert editor_response.status_code == 201 - assert viewer_response.status_code == 403 - - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - pytest.global_alternative_identifier_id_admin = admin_response_data[0]["id"] - pytest.global_alternative_identifier_id_editor = editor_response_data[0]["id"] - - assert admin_response_data[0]["identifier"] == "admin test" - assert admin_response_data[0]["type"] == "ARK" - assert editor_response_data[0]["identifier"] == "editor test" - assert editor_response_data[0]["type"] == "ARK" - - -def test_get_alternative_identifier(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset alternative identifier content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data[0]["identifier"] == "identifier test" - assert response_data[0]["type"] == "ARK" - assert response_data[1]["identifier"] == "admin test" - assert response_data[1]["type"] == "ARK" - assert response_data[2]["identifier"] == "editor test" - assert response_data[2]["type"] == "ARK" - - assert admin_response_data[0]["identifier"] == "identifier test" - assert admin_response_data[0]["type"] == "ARK" - assert admin_response_data[1]["identifier"] == "admin test" - assert admin_response_data[1]["type"] == "ARK" - assert admin_response_data[2]["identifier"] == "editor test" - assert admin_response_data[2]["type"] == "ARK" - - assert editor_response_data[0]["identifier"] == "identifier test" - assert editor_response_data[0]["type"] == "ARK" - assert editor_response_data[1]["identifier"] == "admin test" - assert editor_response_data[1]["type"] == "ARK" - assert editor_response_data[2]["identifier"] == "editor test" - assert editor_response_data[2]["type"] == "ARK" - - assert viewer_response_data[0]["identifier"] == "identifier test" - assert viewer_response_data[0]["type"] == "ARK" - assert viewer_response_data[1]["identifier"] == "admin test" - assert viewer_response_data[1]["type"] == "ARK" - assert viewer_response_data[2]["identifier"] == "editor test" - assert viewer_response_data[2]["type"] == "ARK" - - -def test_delete_alternative_identifier(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset alternative identifier content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - identifier_id = pytest.global_alternative_identifier_id - admin_identifier_id = pytest.global_alternative_identifier_id_admin - editor_identifier_id = pytest.global_alternative_identifier_id_editor - - # verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" - ) - # pylint: disable=line-too-long - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{admin_identifier_id}" - ) - # pylint: disable=line-too-long - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{editor_identifier_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- CONSENT METADATA ------------------- # -def test_put_dataset_consent_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (PUT) - Then check that the response is valid and updates the dataset consent metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", - json={ - "type": "test", - "noncommercial": True, - "geog_restrict": True, - "research_type": True, - "genetic_only": True, - "no_methods": True, - "details": "test", - }, - ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["type"] == "test" - assert response_data["noncommercial"] is True - assert response_data["geog_restrict"] is True - assert response_data["research_type"] is True - assert response_data["genetic_only"] is True - assert response_data["no_methods"] is True - assert response_data["details"] == "test" - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", - json={ - "type": "admin test", - "noncommercial": True, - "geog_restrict": True, - "research_type": True, - "genetic_only": True, - "no_methods": True, - "details": "admin details test", - }, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["type"] == "admin test" - assert admin_response_data["details"] == "admin details test" - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", - json={ - "type": "editor test", - "noncommercial": True, - "geog_restrict": True, - "research_type": True, - "genetic_only": True, - "no_methods": True, - "details": "editor details test", - }, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["type"] == "editor test" - assert editor_response_data["details"] == "editor details test" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", - json={ - "type": "viewer test", - "noncommercial": True, - "geog_restrict": True, - "research_type": True, - "genetic_only": True, - "no_methods": True, - "details": "viewer details test", - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_consent_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset consent metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # Editor was the last successful PUT request, so the response data should match - assert response_data["type"] == "editor test" - assert response_data["noncommercial"] is True - assert response_data["geog_restrict"] is True - assert response_data["research_type"] is True - assert response_data["genetic_only"] is True - assert response_data["no_methods"] is True - assert response_data["details"] == "editor details test" - - assert admin_response_data["type"] == "editor test" - assert admin_response_data["noncommercial"] is True - assert admin_response_data["geog_restrict"] is True - assert admin_response_data["research_type"] is True - assert admin_response_data["genetic_only"] is True - assert admin_response_data["no_methods"] is True - assert admin_response_data["details"] == "editor details test" - - assert editor_response_data["type"] == "editor test" - assert editor_response_data["noncommercial"] is True - assert editor_response_data["geog_restrict"] is True - assert editor_response_data["research_type"] is True - assert editor_response_data["genetic_only"] is True - assert editor_response_data["no_methods"] is True - assert editor_response_data["details"] == "editor details test" - - assert viewer_response_data["type"] == "editor test" - assert viewer_response_data["noncommercial"] is True - assert viewer_response_data["geog_restrict"] is True - assert viewer_response_data["research_type"] is True - assert viewer_response_data["genetic_only"] is True - assert viewer_response_data["no_methods"] is True - assert viewer_response_data["details"] == "editor details test" - - -# ------------------- CONTRIBUTOR METADATA ------------------- # -def test_post_dataset_contributor_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset contributor metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - - # Add a one second delay to prevent duplicate timestamps - sleep(1) - response_data = json.loads(response.data) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_contributor_id = response_data[0]["id"] - - assert response_data[0]["given_name"] == "Given Name here" - assert response_data[0]["family_name"] == "Family Name here" - assert response_data[0]["name_type"] == "Personal" - assert response_data[0]["name_identifier"] == "Name identifier" - assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[0]["creator"] is False - assert response_data[0]["contributor_type"] == "Con Type" - assert response_data[0]["affiliations"][0]["name"] == "Test" - assert response_data[0]["affiliations"][0]["identifier"] == "yes" - assert response_data[0]["affiliations"][0]["scheme"] == "uh" - assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Admin Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_contributor_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["given_name"] == "Admin Given Name here" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Editor Given Name here", - "family_name": "Editor Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_contributor_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["given_name"] == "Editor Given Name here" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Viewer Given Name here", - "family_name": "Viewer Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_contributor_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset contributor metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - -def test_delete_dataset_contributor_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset contributor metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - contributor_id = pytest.global_dataset_contributor_id - admin_contributor_id = pytest.global_dataset_contributor_id_admin - editor_contributor_id = pytest.global_dataset_contributor_id_editor - - # Verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" - ) - # pylint: disable=line-too-long - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{admin_contributor_id}" - ) - # pylint: disable=line-too-long - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{editor_contributor_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- CREATOR METADATA ------------------- # -def test_post_dataset_creator_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (POST) - Then check that the response is valid and creates the dataset creator metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_creator_id = response_data[0]["id"] - - assert response_data[0]["given_name"] == "Given Name here" - assert response_data[0]["family_name"] == "Family Name here" - assert response_data[0]["name_type"] == "Personal" - assert response_data[0]["name_identifier"] == "Name identifier" - assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[0]["creator"] is True - assert response_data[0]["affiliations"][0]["name"] == "Test" - assert response_data[0]["affiliations"][0]["identifier"] == "yes" - assert response_data[0]["affiliations"][0]["scheme"] == "uh" - assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Admin Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_creator_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["given_name"] == "Admin Given Name here" - assert admin_response_data[0]["family_name"] == "Family Name here" - assert admin_response_data[0]["name_type"] == "Personal" - assert admin_response_data[0]["name_identifier"] == "Name identifier" - assert admin_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert admin_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert admin_response_data[0]["creator"] is True - assert admin_response_data[0]["affiliations"][0]["name"] == "Test" - assert admin_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert admin_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert admin_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Editor Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_creator_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["given_name"] == "Editor Given Name here" - assert editor_response_data[0]["family_name"] == "Family Name here" - assert editor_response_data[0]["name_type"] == "Personal" - assert editor_response_data[0]["name_identifier"] == "Name identifier" - assert editor_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert editor_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert editor_response_data[0]["creator"] is True - assert editor_response_data[0]["affiliations"][0]["name"] == "Test" - assert editor_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert editor_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert editor_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Viewer Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_creator_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset creator metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert len(response_data) == 3 - assert len(admin_response_data) == 3 - assert len(editor_response_data) == 3 - assert len(viewer_response_data) == 3 - - assert response_data[0]["id"] == pytest.global_dataset_creator_id - assert response_data[0]["given_name"] == "Given Name here" - assert response_data[0]["family_name"] == "Family Name here" - assert response_data[0]["name_type"] == "Personal" - assert response_data[0]["name_identifier"] == "Name identifier" - assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[0]["creator"] is True - assert response_data[0]["affiliations"][0]["name"] == "Test" - assert response_data[0]["affiliations"][0]["identifier"] == "yes" - assert response_data[0]["affiliations"][0]["scheme"] == "uh" - assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert response_data[1]["id"] == pytest.global_dataset_creator_id_admin - assert response_data[1]["given_name"] == "Admin Given Name here" - assert response_data[1]["family_name"] == "Family Name here" - assert response_data[1]["name_type"] == "Personal" - assert response_data[1]["name_identifier"] == "Name identifier" - assert response_data[1]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[1]["creator"] is True - assert response_data[1]["affiliations"][0]["name"] == "Test" - assert response_data[1]["affiliations"][0]["identifier"] == "yes" - assert response_data[1]["affiliations"][0]["scheme"] == "uh" - assert response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert response_data[2]["id"] == pytest.global_dataset_creator_id_editor - assert response_data[2]["given_name"] == "Editor Given Name here" - assert response_data[2]["family_name"] == "Family Name here" - assert response_data[2]["name_type"] == "Personal" - assert response_data[2]["name_identifier"] == "Name identifier" - assert response_data[2]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[2]["creator"] is True - assert response_data[2]["affiliations"][0]["name"] == "Test" - assert response_data[2]["affiliations"][0]["identifier"] == "yes" - assert response_data[2]["affiliations"][0]["scheme"] == "uh" - assert response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - - assert admin_response_data[0]["given_name"] == "Given Name here" - assert admin_response_data[0]["family_name"] == "Family Name here" - assert admin_response_data[0]["name_type"] == "Personal" - assert admin_response_data[0]["name_identifier"] == "Name identifier" - assert admin_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert admin_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert admin_response_data[0]["creator"] is True - assert admin_response_data[0]["affiliations"][0]["name"] == "Test" - assert admin_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert admin_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert admin_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert admin_response_data[1]["given_name"] == "Admin Given Name here" - assert admin_response_data[1]["family_name"] == "Family Name here" - assert admin_response_data[1]["name_type"] == "Personal" - assert admin_response_data[1]["name_identifier"] == "Name identifier" - assert admin_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" - assert admin_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert admin_response_data[1]["creator"] is True - assert admin_response_data[1]["affiliations"][0]["name"] == "Test" - assert admin_response_data[1]["affiliations"][0]["identifier"] == "yes" - assert admin_response_data[1]["affiliations"][0]["scheme"] == "uh" - assert admin_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert admin_response_data[2]["given_name"] == "Editor Given Name here" - assert admin_response_data[2]["family_name"] == "Family Name here" - assert admin_response_data[2]["name_type"] == "Personal" - assert admin_response_data[2]["name_identifier"] == "Name identifier" - assert admin_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" - assert admin_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert admin_response_data[2]["creator"] is True - assert admin_response_data[2]["affiliations"][0]["name"] == "Test" - assert admin_response_data[2]["affiliations"][0]["identifier"] == "yes" - assert admin_response_data[2]["affiliations"][0]["scheme"] == "uh" - assert admin_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - - assert editor_response_data[0]["given_name"] == "Given Name here" - assert editor_response_data[0]["family_name"] == "Family Name here" - assert editor_response_data[0]["name_type"] == "Personal" - assert editor_response_data[0]["name_identifier"] == "Name identifier" - assert editor_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert editor_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert editor_response_data[0]["creator"] is True - assert editor_response_data[0]["affiliations"][0]["name"] == "Test" - assert editor_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert editor_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert editor_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert editor_response_data[1]["given_name"] == "Admin Given Name here" - assert editor_response_data[1]["family_name"] == "Family Name here" - assert editor_response_data[1]["name_type"] == "Personal" - assert editor_response_data[1]["name_identifier"] == "Name identifier" - assert editor_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" - assert editor_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert editor_response_data[1]["creator"] is True - assert editor_response_data[1]["affiliations"][0]["name"] == "Test" - assert editor_response_data[1]["affiliations"][0]["identifier"] == "yes" - assert editor_response_data[1]["affiliations"][0]["scheme"] == "uh" - assert editor_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert editor_response_data[2]["given_name"] == "Editor Given Name here" - assert editor_response_data[2]["family_name"] == "Family Name here" - assert editor_response_data[2]["name_type"] == "Personal" - assert editor_response_data[2]["name_identifier"] == "Name identifier" - assert editor_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" - assert editor_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert editor_response_data[2]["creator"] is True - assert editor_response_data[2]["affiliations"][0]["name"] == "Test" - assert editor_response_data[2]["affiliations"][0]["identifier"] == "yes" - assert editor_response_data[2]["affiliations"][0]["scheme"] == "uh" - assert editor_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - - assert viewer_response_data[0]["given_name"] == "Given Name here" - assert viewer_response_data[0]["family_name"] == "Family Name here" - assert viewer_response_data[0]["name_type"] == "Personal" - assert viewer_response_data[0]["name_identifier"] == "Name identifier" - assert viewer_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert viewer_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert viewer_response_data[0]["creator"] is True - assert viewer_response_data[0]["affiliations"][0]["name"] == "Test" - assert viewer_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert viewer_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert viewer_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[1]["given_name"] == "Admin Given Name here" - assert viewer_response_data[1]["family_name"] == "Family Name here" - assert viewer_response_data[1]["name_type"] == "Personal" - assert viewer_response_data[1]["name_identifier"] == "Name identifier" - assert viewer_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" - assert viewer_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert viewer_response_data[1]["creator"] is True - assert viewer_response_data[1]["affiliations"][0]["name"] == "Test" - assert viewer_response_data[1]["affiliations"][0]["identifier"] == "yes" - assert viewer_response_data[1]["affiliations"][0]["scheme"] == "uh" - assert viewer_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[2]["given_name"] == "Editor Given Name here" - assert viewer_response_data[2]["family_name"] == "Family Name here" - assert viewer_response_data[2]["name_type"] == "Personal" - assert viewer_response_data[2]["name_identifier"] == "Name identifier" - assert viewer_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" - assert viewer_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert viewer_response_data[2]["creator"] is True - assert viewer_response_data[2]["affiliations"][0]["name"] == "Test" - assert viewer_response_data[2]["affiliations"][0]["identifier"] == "yes" - assert viewer_response_data[2]["affiliations"][0]["scheme"] == "uh" - assert viewer_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - - -def test_delete_dataset_creator_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset creator metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - creator_id = pytest.global_dataset_creator_id - admin_creator_id = pytest.global_dataset_creator_id_admin - editor_creator_id = pytest.global_dataset_creator_id_editor - - # Verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{admin_creator_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{editor_creator_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- DATE METADATA ------------------- # -def test_post_dataset_date_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset date metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210101, "type": "Type", "information": "Info"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_date_id = response_data[0]["id"] - - assert response_data[0]["date"] == 20210101 - assert response_data[0]["type"] == "Type" - assert response_data[0]["information"] == "Info" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210102, "type": "Type", "information": "Info"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_date_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["date"] == 20210102 - assert admin_response_data[0]["type"] == "Type" - assert admin_response_data[0]["information"] == "Info" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210103, "type": "Type", "information": "Info"}], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_date_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["date"] == 20210103 - assert editor_response_data[0]["type"] == "Type" - assert editor_response_data[0]["information"] == "Info" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210101, "type": "Type", "information": "Info"}], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_date_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset date metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert len(response_data) == 3 - assert len(admin_response_data) == 3 - assert len(editor_response_data) == 3 - assert len(viewer_response_data) == 3 - - assert response_data[0]["date"] == 20210101 - assert response_data[0]["type"] == "Type" - assert response_data[0]["information"] == "Info" - assert response_data[1]["date"] == 20210102 - assert response_data[1]["type"] == "Type" - assert response_data[1]["information"] == "Info" - assert response_data[2]["date"] == 20210103 - assert response_data[2]["type"] == "Type" - - assert admin_response_data[0]["date"] == 20210101 - assert admin_response_data[0]["type"] == "Type" - assert admin_response_data[0]["information"] == "Info" - assert admin_response_data[1]["date"] == 20210102 - assert admin_response_data[1]["type"] == "Type" - assert admin_response_data[1]["information"] == "Info" - assert admin_response_data[2]["date"] == 20210103 - assert admin_response_data[2]["type"] == "Type" - - assert editor_response_data[0]["date"] == 20210101 - assert editor_response_data[0]["type"] == "Type" - assert editor_response_data[0]["information"] == "Info" - assert editor_response_data[1]["date"] == 20210102 - assert editor_response_data[1]["type"] == "Type" - assert editor_response_data[1]["information"] == "Info" - assert editor_response_data[2]["date"] == 20210103 - assert editor_response_data[2]["type"] == "Type" - - assert viewer_response_data[0]["date"] == 20210101 - assert viewer_response_data[0]["type"] == "Type" - assert viewer_response_data[0]["information"] == "Info" - assert viewer_response_data[1]["date"] == 20210102 - assert viewer_response_data[1]["type"] == "Type" - assert viewer_response_data[1]["information"] == "Info" - assert viewer_response_data[2]["date"] == 20210103 - assert viewer_response_data[2]["type"] == "Type" - - -def test_delete_dataset_date_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset date metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - date_id = pytest.global_dataset_date_id - admin_date_id = pytest.global_dataset_date_id_admin - editor_date_id = pytest.global_dataset_date_id_editor - - # Verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{admin_date_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{editor_date_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- DE-IDENTIFICATION LEVEL METADATA ------------------- # -def test_put_dataset_deidentification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - de-identification metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", - json={ - "type": "Level", - "direct": True, - "hipaa": True, - "dates": True, - "nonarr": True, - "k_anon": True, - "details": "Details", - }, - ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["type"] == "Level" - assert response_data["direct"] is True - assert response_data["hipaa"] is True - assert response_data["dates"] is True - assert response_data["nonarr"] is True - assert response_data["k_anon"] is True - assert response_data["details"] == "Details" - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", - json={ - "type": "Level", - "direct": True, - "hipaa": True, - "dates": True, - "nonarr": True, - "k_anon": True, - "details": "Details", - }, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["type"] == "Level" - assert admin_response_data["direct"] is True - assert admin_response_data["hipaa"] is True - assert admin_response_data["dates"] is True - assert admin_response_data["nonarr"] is True - assert admin_response_data["k_anon"] is True - assert admin_response_data["details"] == "Details" - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", - json={ - "type": "Level", - "direct": True, - "hipaa": True, - "dates": True, - "nonarr": True, - "k_anon": True, - "details": "Details", - }, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["type"] == "Level" - assert editor_response_data["direct"] is True - assert editor_response_data["hipaa"] is True - assert editor_response_data["dates"] is True - assert editor_response_data["nonarr"] is True - assert editor_response_data["k_anon"] is True - assert editor_response_data["details"] == "Details" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", - json={ - "type": "Level", - "direct": True, - "hipaa": True, - "dates": True, - "nonarr": True, - "k_anon": True, - "details": "Details", - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_deidentification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - de-identification metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["type"] == "Level" - assert response_data["direct"] is True - assert response_data["hipaa"] is True - assert response_data["dates"] is True - assert response_data["nonarr"] is True - assert response_data["k_anon"] is True - assert response_data["details"] == "Details" - - assert admin_response_data["type"] == "Level" - assert admin_response_data["direct"] is True - assert admin_response_data["hipaa"] is True - assert admin_response_data["dates"] is True - assert admin_response_data["nonarr"] is True - assert admin_response_data["k_anon"] is True - assert admin_response_data["details"] == "Details" - - assert editor_response_data["type"] == "Level" - assert editor_response_data["direct"] is True - assert editor_response_data["hipaa"] is True - assert editor_response_data["dates"] is True - assert editor_response_data["nonarr"] is True - assert editor_response_data["k_anon"] is True - assert editor_response_data["details"] == "Details" - - assert viewer_response_data["type"] == "Level" - assert viewer_response_data["direct"] is True - assert viewer_response_data["hipaa"] is True - assert viewer_response_data["dates"] is True - assert viewer_response_data["nonarr"] is True - assert viewer_response_data["k_anon"] is True - assert viewer_response_data["details"] == "Details" - - -# ------------------- DESCRIPTION METADATA ------------------- # -def test_post_dataset_descriptions_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Owner Description", "type": "Methods"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_description_id = response_data[0]["id"] - - assert response_data[0]["description"] == "Owner Description" - assert response_data[0]["type"] == "Methods" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Admin Description", "type": "Methods"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_description_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["description"] == "Admin Description" - assert admin_response_data[0]["type"] == "Methods" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Editor Description", "type": "Methods"}], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_description_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["description"] == "Editor Description" - assert editor_response_data[0]["type"] == "Methods" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Viewer Description", "type": "Methods"}], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_descriptions_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # Dataset description is included in the responses - assert len(response_data) == 4 - assert len(admin_response_data) == 4 - assert len(editor_response_data) == 4 - assert len(viewer_response_data) == 4 - - # seacrch for type abstract index - main_descrip = next( - (index for (index, d) in enumerate(response_data) if d["type"] == "Abstract"), - None, - ) - a_main_descrip = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["type"] == "Abstract" - ), - None, - ) - e_main_descrip = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["type"] == "Abstract" - ), - None, - ) - v_main_descrip = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["type"] == "Abstract" - ), - None, - ) - - # search for owner description - # pylint: disable=line-too-long - own_descrip = next( - ( - index - for (index, d) in enumerate(response_data) - if d["description"] == "Owner Description" - ), - None, - ) - a_own_descrip = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["description"] == "Owner Description" - ), - None, - ) - e_own_descrip = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["description"] == "Owner Description" - ), - None, - ) - v_own_descrip = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["description"] == "Owner Description" - ), - None, - ) - - # search for admin description - admin_descrip = next( - ( - index - for (index, d) in enumerate(response_data) - if d["description"] == "Admin Description" - ), - None, - ) - a_admin_descrip = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["description"] == "Admin Description" - ), - None, - ) - e_admin_descrip = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["description"] == "Admin Description" - ), - None, - ) - v_admin_descrip = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["description"] == "Admin Description" - ), - None, - ) - - # search for editor description - edit_descrip = next( - ( - index - for (index, d) in enumerate(response_data) - if d["description"] == "Editor Description" - ), - None, - ) - a_edit_descrip = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["description"] == "Editor Description" - ), - None, - ) - e_edit_descrip = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["description"] == "Editor Description" - ), - None, - ) - v_edit_descrip = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["description"] == "Editor Description" - ), - None, - ) - - assert response_data[main_descrip]["description"] == "Dataset Description" - assert response_data[main_descrip]["type"] == "Abstract" - assert response_data[own_descrip]["description"] == "Owner Description" - assert response_data[own_descrip]["type"] == "Methods" - assert response_data[admin_descrip]["description"] == "Admin Description" - assert response_data[admin_descrip]["type"] == "Methods" - assert response_data[edit_descrip]["description"] == "Editor Description" - assert response_data[edit_descrip]["type"] == "Methods" - - assert admin_response_data[a_main_descrip]["description"] == "Dataset Description" - assert admin_response_data[a_main_descrip]["type"] == "Abstract" - assert admin_response_data[a_own_descrip]["description"] == "Owner Description" - assert admin_response_data[a_own_descrip]["type"] == "Methods" - assert admin_response_data[a_admin_descrip]["description"] == "Admin Description" - assert admin_response_data[a_admin_descrip]["type"] == "Methods" - assert admin_response_data[a_edit_descrip]["description"] == "Editor Description" - assert admin_response_data[a_edit_descrip]["type"] == "Methods" - - assert editor_response_data[e_main_descrip]["description"] == "Dataset Description" - assert editor_response_data[e_main_descrip]["type"] == "Abstract" - assert editor_response_data[e_own_descrip]["description"] == "Owner Description" - assert editor_response_data[e_own_descrip]["type"] == "Methods" - assert editor_response_data[e_admin_descrip]["description"] == "Admin Description" - assert editor_response_data[e_admin_descrip]["type"] == "Methods" - assert editor_response_data[e_edit_descrip]["description"] == "Editor Description" - assert editor_response_data[e_edit_descrip]["type"] == "Methods" - - assert viewer_response_data[v_main_descrip]["description"] == "Dataset Description" - assert viewer_response_data[v_main_descrip]["type"] == "Abstract" - assert viewer_response_data[v_own_descrip]["description"] == "Owner Description" - assert viewer_response_data[v_own_descrip]["type"] == "Methods" - assert viewer_response_data[v_admin_descrip]["description"] == "Admin Description" - assert viewer_response_data[v_admin_descrip]["type"] == "Methods" - assert viewer_response_data[v_edit_descrip]["description"] == "Editor Description" - assert viewer_response_data[v_edit_descrip]["type"] == "Methods" - - -def test_delete_dataset_description_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - description_id = pytest.global_dataset_description_id - admin_description_id = pytest.global_dataset_description_id_admin - editor_description_id = pytest.global_dataset_description_id_editor - - # Verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{admin_description_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{editor_description_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- DATASET HEALTHSHEET MOTIVATION METADATA ------------------- # -def test_put_healthsheet_motivation_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", - json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", - json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", - json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", - json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_healthsheet_motivation_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - # Editor was the last successful PUT request, so the response data should match - - -# # ------------------- DATASET HEALTHSHEET COMPOSITION METADATA ------------------- # -def test_put_healthsheet_composition_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/composition' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet composition metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", - json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", - json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", - json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", - json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_healthsheet_composition_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/composition' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - # Editor was the last successful PUT request, so the response data should match - - -# ------------------- DATASET HEALTHSHEET COLLECTION METADATA ------------------- # -def test_put_healthsheet_collection_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/collection' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet collection metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", - json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", - json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", - json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", - json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_healthsheet_collection_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/collection' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - # Editor was the last successful PUT request, so the response data should match - - -# ------------------- DATASET HEALTHSHEET PREPROCESSING METADATA ------------------- # -def test_put_healthsheet_preprocessing_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet preprocessing metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", - json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", - json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", - json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", - json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_healthsheet_preprocessing_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - # Editor was the last successful PUT request, so the response data should match - - -# # ------------------- DATASET HEALTHSHEET USES METADATA ------------------- # -def test_put_healthsheet_uses_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/uses' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet uses metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", - json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", - json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", - json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", - json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_healthsheet_uses_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/uses' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - # Editor was the last successful PUT request, so the response data should match - - -# ------------------- DATASET HEALTHSHEET DISTRIBUTION METADATA ------------------- # -def test_put_healthsheet_distribution_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/distribution' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet distribution metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", - json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", - json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", - json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", - json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_healthsheet_distribution_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - distribution metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - # Editor was the last successful PUT request, so the response data should match - - -# ------------------- DATASET HEALTHSHEET MAINTENANCE METADATA ------------------- # -def test_put_healthsheet_maintenance_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/maintenance' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet maintenance metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", - json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", - json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", - json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", - json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_healthsheet_maintenance_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - maintenance metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - # Editor was the last successful PUT request, so the response data should match - - -# ------------------- DATASET FUNDER METADATA ------------------- # -def test_post_dataset_funder_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - funder metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", - json=[ - { - "name": "Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_funder_id = response_data[0]["id"] - - assert response_data[0]["name"] == "Name" - assert response_data[0]["award_number"] == "award number" - assert response_data[0]["award_title"] == "Award Title" - assert response_data[0]["award_uri"] == "Award URI" - assert response_data[0]["identifier"] == "Identifier" - assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert response_data[0]["identifier_type"] == "Identifier Type" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", - json=[ - { - "name": "Admin Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_funder_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["name"] == "Admin Name" - assert admin_response_data[0]["award_number"] == "award number" - assert admin_response_data[0]["award_title"] == "Award Title" - assert admin_response_data[0]["award_uri"] == "Award URI" - assert admin_response_data[0]["identifier"] == "Identifier" - assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert admin_response_data[0]["identifier_type"] == "Identifier Type" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", - json=[ - { - "name": "Editor Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_funder_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["name"] == "Editor Name" - assert editor_response_data[0]["award_number"] == "award number" - assert editor_response_data[0]["award_title"] == "Award Title" - assert editor_response_data[0]["award_uri"] == "Award URI" - assert editor_response_data[0]["identifier"] == "Identifier" - assert ( - editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - ) # pylint: disable=line-too-long - assert editor_response_data[0]["identifier_type"] == "Identifier Type" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", - json=[ - { - "name": "Viewer Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", - } - ], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_funder_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - funder metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert len(response_data) == 3 - assert len(admin_response_data) == 3 - assert len(editor_response_data) == 3 - assert len(viewer_response_data) == 3 - - assert response_data[0]["name"] == "Name" - assert response_data[0]["award_number"] == "award number" - assert response_data[0]["award_title"] == "Award Title" - assert response_data[0]["award_uri"] == "Award URI" - assert response_data[0]["identifier"] == "Identifier" - assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert response_data[0]["identifier_type"] == "Identifier Type" - assert response_data[1]["name"] == "Admin Name" - assert response_data[1]["award_number"] == "award number" - assert response_data[1]["award_title"] == "Award Title" - assert response_data[1]["award_uri"] == "Award URI" - assert response_data[1]["identifier"] == "Identifier" - assert response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert response_data[1]["identifier_type"] == "Identifier Type" - assert response_data[2]["name"] == "Editor Name" - assert response_data[2]["award_number"] == "award number" - assert response_data[2]["award_title"] == "Award Title" - assert response_data[2]["award_uri"] == "Award URI" - assert response_data[2]["identifier"] == "Identifier" - assert response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert response_data[2]["identifier_type"] == "Identifier Type" - - assert admin_response_data[0]["name"] == "Name" - assert admin_response_data[0]["award_number"] == "award number" - assert admin_response_data[0]["award_title"] == "Award Title" - assert admin_response_data[0]["award_uri"] == "Award URI" - assert admin_response_data[0]["identifier"] == "Identifier" - assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert admin_response_data[0]["identifier_type"] == "Identifier Type" - assert admin_response_data[1]["name"] == "Admin Name" - assert admin_response_data[1]["award_number"] == "award number" - assert admin_response_data[1]["award_title"] == "Award Title" - assert admin_response_data[1]["award_uri"] == "Award URI" - assert admin_response_data[1]["identifier"] == "Identifier" - assert admin_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert admin_response_data[1]["identifier_type"] == "Identifier Type" - assert admin_response_data[2]["name"] == "Editor Name" - assert admin_response_data[2]["award_number"] == "award number" - assert admin_response_data[2]["award_title"] == "Award Title" - assert admin_response_data[2]["award_uri"] == "Award URI" - assert admin_response_data[2]["identifier"] == "Identifier" - assert admin_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert admin_response_data[2]["identifier_type"] == "Identifier Type" - - assert editor_response_data[0]["name"] == "Name" - assert editor_response_data[0]["award_number"] == "award number" - assert editor_response_data[0]["award_title"] == "Award Title" - assert editor_response_data[0]["award_uri"] == "Award URI" - assert editor_response_data[0]["identifier"] == "Identifier" - assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert editor_response_data[0]["identifier_type"] == "Identifier Type" - assert editor_response_data[1]["name"] == "Admin Name" - assert editor_response_data[1]["award_number"] == "award number" - assert editor_response_data[1]["award_title"] == "Award Title" - assert editor_response_data[1]["award_uri"] == "Award URI" - assert editor_response_data[1]["identifier"] == "Identifier" - assert editor_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert editor_response_data[1]["identifier_type"] == "Identifier Type" - assert editor_response_data[2]["name"] == "Editor Name" - assert editor_response_data[2]["award_number"] == "award number" - assert editor_response_data[2]["award_title"] == "Award Title" - assert editor_response_data[2]["award_uri"] == "Award URI" - assert editor_response_data[2]["identifier"] == "Identifier" - assert editor_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert editor_response_data[2]["identifier_type"] == "Identifier Type" - - assert viewer_response_data[0]["name"] == "Name" - assert viewer_response_data[0]["award_number"] == "award number" - assert viewer_response_data[0]["award_title"] == "Award Title" - assert viewer_response_data[0]["award_uri"] == "Award URI" - assert viewer_response_data[0]["identifier"] == "Identifier" - assert viewer_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert viewer_response_data[0]["identifier_type"] == "Identifier Type" - assert viewer_response_data[1]["name"] == "Admin Name" - assert viewer_response_data[1]["award_number"] == "award number" - assert viewer_response_data[1]["award_title"] == "Award Title" - assert viewer_response_data[1]["award_uri"] == "Award URI" - assert viewer_response_data[1]["identifier"] == "Identifier" - assert viewer_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert viewer_response_data[1]["identifier_type"] == "Identifier Type" - assert viewer_response_data[2]["name"] == "Editor Name" - assert viewer_response_data[2]["award_number"] == "award number" - assert viewer_response_data[2]["award_title"] == "Award Title" - assert viewer_response_data[2]["award_uri"] == "Award URI" - assert viewer_response_data[2]["identifier"] == "Identifier" - assert viewer_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert viewer_response_data[2]["identifier_type"] == "Identifier Type" - - -def test_delete_dataset_funder_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - funder metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - funder_id = pytest.global_dataset_funder_id - a_funder_id = pytest.global_dataset_funder_id_admin - e_funder_id = pytest.global_dataset_funder_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{a_funder_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{e_funder_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- OTHER METADATA ------------------- # -def test_put_other_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - other metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other", - json={ - "acknowledgement": "Yes", - "language": "English", - "resource_type": "Resource Type", - "size": ["Size"], - "format": ["Format"], - "standards_followed": "Standards Followed", - }, - ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["acknowledgement"] == "Yes" - assert response_data["language"] == "English" - - assert response_data["size"] == ["Size"] - assert response_data["format"] == ["Format"] - assert response_data["standards_followed"] == "Standards Followed" - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other", - json={ - "acknowledgement": "Yes", - "language": "English", - "resource_type": "Resource Type", - "size": ["Size"], - "format": ["Format"], - "standards_followed": "Standards Followed", - }, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["acknowledgement"] == "Yes" - assert admin_response_data["language"] == "English" - assert admin_response_data["size"] == ["Size"] - assert admin_response_data["format"] == ["Format"] - assert admin_response_data["standards_followed"] == "Standards Followed" - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other", - json={ - "acknowledgement": "Yes", - "language": "English", - "resource_type": "Resource Type", - "size": ["Size"], - "format": ["Format"], - "standards_followed": "Standards Followed", - }, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["acknowledgement"] == "Yes" - assert editor_response_data["language"] == "English" - assert editor_response_data["size"] == ["Size"] - assert editor_response_data["format"] == ["Format"] - assert editor_response_data["standards_followed"] == "Standards Followed" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other", - json={ - "acknowledgement": "Yes", - "language": "English", - "resource_type": "Resource Type", - "size": ["Size"], - "format": ["Format"], - "standards_followed": "Standards Followed", - }, - ) - assert viewer_response.status_code == 403 - - -def test_get_other_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - other metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - # assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # Editor was the last to update the metadata successfully so - # the response should reflect that - assert response_data["acknowledgement"] == "Yes" - assert response_data["language"] == "English" - # assert response_data["resource_type"] == "Editor Resource Type" - assert response_data["size"] == ["Size"] - assert response_data["format"] == ["Format"] - assert response_data["standards_followed"] == "Standards Followed" - - assert admin_response_data["acknowledgement"] == "Yes" - assert admin_response_data["language"] == "English" - # assert admin_response_data["resource_type"] == "Editor Resource Type" - assert admin_response_data["size"] == ["Size"] - assert admin_response_data["format"] == ["Format"] - assert admin_response_data["standards_followed"] == "Standards Followed" - - assert editor_response_data["acknowledgement"] == "Yes" - assert editor_response_data["language"] == "English" - # assert editor_response_data["resource_type"] == "Editor Resource Type" - assert editor_response_data["size"] == ["Size"] - assert editor_response_data["format"] == ["Format"] - assert editor_response_data["standards_followed"] == "Standards Followed" - - assert viewer_response_data["acknowledgement"] == "Yes" - assert viewer_response_data["language"] == "English" - assert viewer_response_data["size"] == ["Size"] - assert viewer_response_data["format"] == ["Format"] - assert viewer_response_data["standards_followed"] == "Standards Followed" - - -# ------------------- DATASET MANAGING ORGANIZATION METADATA ------------------- # -def test_put_dataset_managing_organization_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - managing organization metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", - json={ - "name": "Managing Organization Name", - "identifier": "identifier", - "identifier_scheme": "identifier scheme", - "identifier_scheme_uri": "identifier scheme_uri", - }, - ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["name"] == "Managing Organization Name" - assert response_data["identifier"] == "identifier" - assert response_data["identifier_scheme"] == "identifier scheme" - assert response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", - json={ - "name": "admin Managing Organization Name", - "identifier": "identifier", - "identifier_scheme": "identifier scheme", - "identifier_scheme_uri": "identifier scheme_uri", - }, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["name"] == "admin Managing Organization Name" - assert admin_response_data["identifier"] == "identifier" - assert admin_response_data["identifier_scheme"] == "identifier scheme" - assert admin_response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", - json={ - "name": "editor Managing Organization Name", - "identifier": "identifier", - "identifier_scheme": "identifier scheme", - "identifier_scheme_uri": "identifier scheme_uri", - }, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["name"] == "editor Managing Organization Name" - assert editor_response_data["identifier"] == "identifier" - assert editor_response_data["identifier_scheme"] == "identifier scheme" - assert editor_response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", - json={ - "name": "editor Managing Organization Name", - "identifier": "identifier", - "identifier_scheme": "identifier scheme", - "identifier_scheme_uri": "identifier scheme_uri", - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_managing_organization_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - managing-organization metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # Editor was the last to update the metadata successfully so - # the response should reflect that - assert response_data["name"] == "editor Managing Organization Name" - assert response_data["identifier"] == "identifier" - assert response_data["identifier_scheme"] == "identifier scheme" - assert response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - assert admin_response_data["name"] == "editor Managing Organization Name" - assert admin_response_data["identifier"] == "identifier" - assert admin_response_data["identifier_scheme"] == "identifier scheme" - assert admin_response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - assert editor_response_data["name"] == "editor Managing Organization Name" - assert editor_response_data["identifier"] == "identifier" - assert editor_response_data["identifier_scheme"] == "identifier scheme" - assert editor_response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - assert viewer_response_data["name"] == "editor Managing Organization Name" - assert viewer_response_data["identifier"] == "identifier" - assert viewer_response_data["identifier_scheme"] == "identifier scheme" - assert viewer_response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - -# ------------------- RELATED IDENTIFIER METADATA ------------------- # -def test_post_dataset_related_identifier_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - related identifier metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", - json=[ - { - "identifier": "test identifier", - "identifier_type": "test identifier type", - "relation_type": "test relation type", - "related_metadata_scheme": "test", - "scheme_uri": "test", - "scheme_type": "test", - "resource_type": "test", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - - pytest.global_dataset_related_identifier_id = response_data[0]["id"] - - assert response_data[0]["identifier"] == "test identifier" - assert response_data[0]["identifier_type"] == "test identifier type" - assert response_data[0]["relation_type"] == "test relation type" - assert response_data[0]["related_metadata_scheme"] == "test" - assert response_data[0]["scheme_uri"] == "test" - assert response_data[0]["scheme_type"] == "test" - assert response_data[0]["resource_type"] == "test" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", - json=[ - { - "identifier": "admin test identifier", - "identifier_type": "test identifier type", - "relation_type": "test relation type", - "related_metadata_scheme": "test", - "scheme_uri": "test", - "scheme_type": "test", - "resource_type": "test", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_related_identifier_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["identifier"] == "admin test identifier" - assert admin_response_data[0]["identifier_type"] == "test identifier type" - assert admin_response_data[0]["relation_type"] == "test relation type" - assert admin_response_data[0]["related_metadata_scheme"] == "test" - assert admin_response_data[0]["scheme_uri"] == "test" - assert admin_response_data[0]["scheme_type"] == "test" - assert admin_response_data[0]["resource_type"] == "test" - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", - json=[ - { - "identifier": "editor test identifier", - "identifier_type": "test identifier type", - "relation_type": "test relation type", - "related_metadata_scheme": "test", - "scheme_uri": "test", - "scheme_type": "test", - "resource_type": "test", - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_related_identifier_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["identifier"] == "editor test identifier" - assert editor_response_data[0]["identifier_type"] == "test identifier type" - assert editor_response_data[0]["relation_type"] == "test relation type" - assert editor_response_data[0]["related_metadata_scheme"] == "test" - assert editor_response_data[0]["scheme_uri"] == "test" - assert editor_response_data[0]["scheme_type"] == "test" - assert editor_response_data[0]["resource_type"] == "test" - viewer_client = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", - json=[ - { - "identifier": "viewer test identifier", - "identifier_type": "test identifier type", - "relation_type": "test relation type", - "related_metadata_scheme": "test", - "scheme_uri": "test", - "scheme_type": "test", - "resource_type": "test", - } - ], - ) - - assert viewer_client.status_code == 403 - - -def test_get_dataset_related_identifier_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - related identifier metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # seach for main title and subtitle index in response_data[n]["titles"] - # pylint: disable=line-too-long - - # assert len(response_data) == 3 - # assert len(admin_response_data) == 3 - # assert len(editor_response_data) == 3 - # assert len(viewer_response_data) == 3 - print(len(response_data), "lennnnnnnnnn") - assert response_data[0]["identifier"] == "test identifier" - assert response_data[0]["identifier_type"] == "test identifier type" - assert response_data[0]["relation_type"] == "test relation type" - assert response_data[0]["related_metadata_scheme"] == "test" - assert response_data[0]["scheme_uri"] == "test" - assert response_data[0]["scheme_type"] == "test" - assert response_data[0]["resource_type"] == "test" - assert response_data[1]["identifier"] == "admin test identifier" - assert response_data[1]["identifier_type"] == "test identifier type" - assert response_data[1]["relation_type"] == "test relation type" - assert response_data[1]["related_metadata_scheme"] == "test" - assert response_data[1]["scheme_uri"] == "test" - assert response_data[1]["scheme_type"] == "test" - assert response_data[1]["resource_type"] == "test" - assert response_data[2]["identifier"] == "editor test identifier" - assert response_data[2]["identifier_type"] == "test identifier type" - assert response_data[2]["relation_type"] == "test relation type" - assert response_data[2]["related_metadata_scheme"] == "test" - assert response_data[2]["scheme_uri"] == "test" - assert response_data[2]["scheme_type"] == "test" - assert response_data[2]["resource_type"] == "test" - - assert admin_response_data[0]["identifier"] == "test identifier" - assert admin_response_data[0]["identifier_type"] == "test identifier type" - assert admin_response_data[0]["relation_type"] == "test relation type" - assert admin_response_data[0]["related_metadata_scheme"] == "test" - assert admin_response_data[0]["scheme_uri"] == "test" - assert admin_response_data[0]["scheme_type"] == "test" - assert admin_response_data[0]["resource_type"] == "test" - assert admin_response_data[1]["identifier"] == "admin test identifier" - assert admin_response_data[1]["identifier_type"] == "test identifier type" - assert admin_response_data[1]["relation_type"] == "test relation type" - assert admin_response_data[1]["related_metadata_scheme"] == "test" - assert admin_response_data[1]["scheme_uri"] == "test" - assert admin_response_data[1]["scheme_type"] == "test" - assert admin_response_data[1]["resource_type"] == "test" - assert admin_response_data[2]["identifier"] == "editor test identifier" - assert admin_response_data[2]["identifier_type"] == "test identifier type" - assert admin_response_data[2]["relation_type"] == "test relation type" - assert admin_response_data[2]["related_metadata_scheme"] == "test" - assert admin_response_data[2]["scheme_uri"] == "test" - assert admin_response_data[2]["scheme_type"] == "test" - assert admin_response_data[2]["resource_type"] == "test" - - assert editor_response_data[0]["identifier"] == "test identifier" - assert editor_response_data[0]["identifier_type"] == "test identifier type" - assert editor_response_data[0]["relation_type"] == "test relation type" - assert editor_response_data[0]["related_metadata_scheme"] == "test" - assert editor_response_data[0]["scheme_uri"] == "test" - assert editor_response_data[0]["scheme_type"] == "test" - assert editor_response_data[0]["resource_type"] == "test" - assert editor_response_data[1]["identifier"] == "admin test identifier" - assert editor_response_data[1]["identifier_type"] == "test identifier type" - assert editor_response_data[1]["relation_type"] == "test relation type" - assert editor_response_data[1]["related_metadata_scheme"] == "test" - assert editor_response_data[1]["scheme_uri"] == "test" - assert editor_response_data[1]["scheme_type"] == "test" - assert editor_response_data[1]["resource_type"] == "test" - assert editor_response_data[2]["identifier"] == "editor test identifier" - assert editor_response_data[2]["identifier_type"] == "test identifier type" - assert editor_response_data[2]["relation_type"] == "test relation type" - assert editor_response_data[2]["related_metadata_scheme"] == "test" - assert editor_response_data[2]["scheme_uri"] == "test" - assert editor_response_data[2]["scheme_type"] == "test" - assert editor_response_data[2]["resource_type"] == "test" - - assert viewer_response_data[0]["identifier"] == "test identifier" - assert viewer_response_data[0]["identifier_type"] == "test identifier type" - assert viewer_response_data[0]["relation_type"] == "test relation type" - assert viewer_response_data[0]["related_metadata_scheme"] == "test" - assert viewer_response_data[0]["scheme_uri"] == "test" - assert viewer_response_data[0]["scheme_type"] == "test" - assert viewer_response_data[0]["resource_type"] == "test" - assert viewer_response_data[1]["identifier"] == "admin test identifier" - assert viewer_response_data[1]["identifier_type"] == "test identifier type" - assert viewer_response_data[1]["relation_type"] == "test relation type" - assert viewer_response_data[1]["related_metadata_scheme"] == "test" - assert viewer_response_data[1]["scheme_uri"] == "test" - assert viewer_response_data[1]["scheme_type"] == "test" - assert viewer_response_data[1]["resource_type"] == "test" - assert viewer_response_data[2]["identifier"] == "editor test identifier" - assert viewer_response_data[2]["identifier_type"] == "test identifier type" - assert viewer_response_data[2]["relation_type"] == "test relation type" - assert viewer_response_data[2]["related_metadata_scheme"] == "test" - assert viewer_response_data[2]["scheme_uri"] == "test" - assert viewer_response_data[2]["scheme_type"] == "test" - assert viewer_response_data[2]["resource_type"] == "test" - - -def test_delete_dataset_related_identifier_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (DELETE) - Then check that the response is valid and retrieves the dataset - related identifier metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - identifier_id = pytest.global_dataset_related_identifier_id - a_identifier_id = pytest.global_dataset_related_identifier_id_admin - e_identifier_id = pytest.global_dataset_related_identifier_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{a_identifier_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{e_identifier_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# # ------------------- RIGHTS METADATA ------------------- # -def test_post_dataset_rights_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - rights metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_rights_id = response_data[0]["id"] - - assert response_data[0]["identifier"] == "Identifier" - assert response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert response_data[0]["rights"] == "Rights" - assert response_data[0]["uri"] == "URI" - assert response_data[0]["license_text"] == "license text" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Admin Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_rights_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["identifier"] == "Admin Identifier" - assert admin_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert admin_response_data[0]["rights"] == "Rights" - assert admin_response_data[0]["uri"] == "URI" - assert admin_response_data[0]["license_text"] == "license text" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Editor Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_rights_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["identifier"] == "Editor Identifier" - assert editor_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert editor_response_data[0]["rights"] == "Rights" - assert editor_response_data[0]["uri"] == "URI" - assert editor_response_data[0]["license_text"] == "license text" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Viewer Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_rights_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - rights metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data[0]["identifier"] == "Identifier" - assert response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert response_data[0]["rights"] == "Rights" - assert response_data[0]["uri"] == "URI" - assert response_data[0]["license_text"] == "license text" - - assert admin_response_data[0]["identifier"] == "Identifier" - assert admin_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert admin_response_data[0]["rights"] == "Rights" - assert admin_response_data[0]["uri"] == "URI" - assert admin_response_data[0]["license_text"] == "license text" - - assert editor_response_data[0]["identifier"] == "Identifier" - assert editor_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert editor_response_data[0]["rights"] == "Rights" - assert editor_response_data[0]["uri"] == "URI" - assert editor_response_data[0]["license_text"] == "license text" - - assert response_data[1]["identifier"] == "Admin Identifier" - assert response_data[1]["identifier_scheme"] == "Identifier Scheme" - assert response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" - assert response_data[1]["rights"] == "Rights" - assert response_data[1]["uri"] == "URI" - assert response_data[1]["license_text"] == "license text" - - assert admin_response_data[1]["identifier"] == "Admin Identifier" - assert admin_response_data[1]["identifier_scheme"] == "Identifier Scheme" - assert admin_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" - assert admin_response_data[1]["rights"] == "Rights" - assert admin_response_data[1]["uri"] == "URI" - assert admin_response_data[1]["license_text"] == "license text" - - assert editor_response_data[1]["identifier"] == "Admin Identifier" - assert editor_response_data[1]["identifier_scheme"] == "Identifier Scheme" - assert editor_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" - assert editor_response_data[1]["rights"] == "Rights" - assert editor_response_data[1]["uri"] == "URI" - assert editor_response_data[1]["license_text"] == "license text" - - assert response_data[2]["identifier"] == "Editor Identifier" - assert response_data[2]["identifier_scheme"] == "Identifier Scheme" - assert response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" - assert response_data[2]["rights"] == "Rights" - assert response_data[2]["uri"] == "URI" - assert response_data[2]["license_text"] == "license text" - - assert admin_response_data[2]["identifier"] == "Editor Identifier" - assert admin_response_data[2]["identifier_scheme"] == "Identifier Scheme" - assert admin_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" - assert admin_response_data[2]["rights"] == "Rights" - assert admin_response_data[2]["uri"] == "URI" - assert admin_response_data[2]["license_text"] == "license text" - - assert editor_response_data[2]["identifier"] == "Editor Identifier" - assert editor_response_data[2]["identifier_scheme"] == "Identifier Scheme" - assert editor_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" - assert editor_response_data[2]["rights"] == "Rights" - assert editor_response_data[2]["uri"] == "URI" - assert editor_response_data[2]["license_text"] == "license text" - - assert viewer_response_data[0]["identifier"] == "Identifier" - assert viewer_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert viewer_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert viewer_response_data[0]["rights"] == "Rights" - assert viewer_response_data[0]["uri"] == "URI" - assert viewer_response_data[0]["license_text"] == "license text" - - assert viewer_response_data[1]["identifier"] == "Admin Identifier" - assert viewer_response_data[1]["identifier_scheme"] == "Identifier Scheme" - assert viewer_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" - assert viewer_response_data[1]["rights"] == "Rights" - assert viewer_response_data[1]["uri"] == "URI" - assert viewer_response_data[1]["license_text"] == "license text" - - assert viewer_response_data[2]["identifier"] == "Editor Identifier" - assert viewer_response_data[2]["identifier_scheme"] == "Identifier Scheme" - assert viewer_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" - assert viewer_response_data[2]["rights"] == "Rights" - assert viewer_response_data[2]["uri"] == "URI" - assert viewer_response_data[2]["license_text"] == "license text" - - -def test_delete_dataset_rights_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/rights' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - rights metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - rights_id = pytest.global_dataset_rights_id - a_rights_id = pytest.global_dataset_rights_id_admin - e_rights_id = pytest.global_dataset_rights_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{a_rights_id}" - ) - editor_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{e_rights_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- SUBJECTS METADATA ------------------- # -def test_post_dataset_subjects_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - subjects metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Value URI", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_subject_id = response_data[0]["id"] - - assert response_data[0]["scheme"] == "Scheme" - assert response_data[0]["scheme_uri"] == "Scheme URI" - assert response_data[0]["subject"] == "Subject" - assert response_data[0]["value_uri"] == "Value URI" - assert response_data[0]["classification_code"] == "Classification Code" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Admin Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Admin Value URI", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_subject_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["scheme"] == "Admin Scheme" - assert admin_response_data[0]["scheme_uri"] == "Scheme URI" - assert admin_response_data[0]["subject"] == "Subject" - assert admin_response_data[0]["value_uri"] == "Admin Value URI" - assert admin_response_data[0]["classification_code"] == "Classification Code" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Editor Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Editor Value URI", - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_subject_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["scheme"] == "Editor Scheme" - assert editor_response_data[0]["scheme_uri"] == "Scheme URI" - assert editor_response_data[0]["subject"] == "Subject" - assert editor_response_data[0]["value_uri"] == "Editor Value URI" - assert editor_response_data[0]["classification_code"] == "Classification Code" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Viewer Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Viewer Value URI", - } - ], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_subjects_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - subjects metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject" - ) - - assert response.status_code == 200 - - -def test_delete_dataset_subject_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - subjects metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - subject_id = pytest.global_dataset_subject_id - admin_sub_id = pytest.global_dataset_subject_id_admin - editor_sub_id = pytest.global_dataset_subject_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{admin_sub_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{editor_sub_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- TITLE METADATA ------------------- # -def test_post_dataset_title_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - title metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Owner Title", "type": "Subtitle"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_title_id = response_data[0]["id"] - - assert response_data[0]["title"] == "Owner Title" - assert response_data[0]["type"] == "Subtitle" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Admin Title", "type": "Subtitle"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_title_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["title"] == "Admin Title" - assert admin_response_data[0]["type"] == "Subtitle" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Editor Title", "type": "Subtitle"}], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_title_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["title"] == "Editor Title" - assert editor_response_data[0]["type"] == "Subtitle" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Viewer Title", "type": "Subtitle"}], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_title_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - title metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert len(response_data) == 4 - assert len(admin_response_data) == 4 - assert len(editor_response_data) == 4 - assert len(viewer_response_data) == 4 - - # search for maintitle index - # pylint: disable=line-too-long - main_title = next( - (index for (index, d) in enumerate(response_data) if d["type"] == "MainTitle"), - None, - ) - a_main_title = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["type"] == "MainTitle" - ), - None, - ) - e_main_title = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["type"] == "MainTitle" - ), - None, - ) - v_main_title = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["type"] == "MainTitle" - ), - None, - ) - # search for admin title index - admin_title = next( - ( - index - for (index, d) in enumerate(response_data) - if d["title"] == "Admin Title" - ), - None, - ) - a_admin_title = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["title"] == "Admin Title" - ), - None, - ) - e_admin_title = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["title"] == "Admin Title" - ), - None, - ) - v_admin_title = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["title"] == "Admin Title" - ), - None, - ) - - # search for editor title index - editor_title = next( - ( - index - for (index, d) in enumerate(response_data) - if d["title"] == "Editor Title" - ), - None, - ) - a_editor_title = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["title"] == "Editor Title" - ), - None, - ) - e_editor_title = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["title"] == "Editor Title" - ), - None, - ) - v_editor_title = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["title"] == "Editor Title" - ), - None, - ) - - # search for owner title index - own_title = next( - ( - index - for (index, d) in enumerate(response_data) - if d["title"] == "Owner Title" - ), - None, - ) - a_own_title = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["title"] == "Owner Title" - ), - None, - ) - e_own_title = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["title"] == "Owner Title" - ), - None, - ) - v_own_title = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["title"] == "Owner Title" - ), - None, - ) - - assert response_data[main_title]["title"] == "Dataset Title" - assert response_data[main_title]["type"] == "MainTitle" - assert response_data[own_title]["title"] == "Owner Title" - assert response_data[own_title]["type"] == "Subtitle" - assert response_data[admin_title]["title"] == "Admin Title" - assert response_data[admin_title]["type"] == "Subtitle" - assert response_data[editor_title]["title"] == "Editor Title" - assert response_data[editor_title]["type"] == "Subtitle" - - assert admin_response_data[a_main_title]["title"] == "Dataset Title" - assert admin_response_data[a_main_title]["type"] == "MainTitle" - assert admin_response_data[a_own_title]["title"] == "Owner Title" - assert admin_response_data[a_own_title]["type"] == "Subtitle" - assert admin_response_data[a_admin_title]["title"] == "Admin Title" - assert admin_response_data[a_admin_title]["type"] == "Subtitle" - assert admin_response_data[a_editor_title]["title"] == "Editor Title" - assert admin_response_data[a_editor_title]["type"] == "Subtitle" - - assert editor_response_data[e_main_title]["title"] == "Dataset Title" - assert editor_response_data[e_main_title]["type"] == "MainTitle" - assert editor_response_data[e_own_title]["title"] == "Owner Title" - assert editor_response_data[e_own_title]["type"] == "Subtitle" - assert editor_response_data[e_admin_title]["title"] == "Admin Title" - assert editor_response_data[e_admin_title]["type"] == "Subtitle" - assert editor_response_data[e_editor_title]["title"] == "Editor Title" - assert editor_response_data[e_editor_title]["type"] == "Subtitle" - - assert viewer_response_data[v_main_title]["title"] == "Dataset Title" - assert viewer_response_data[v_main_title]["type"] == "MainTitle" - assert viewer_response_data[v_own_title]["title"] == "Owner Title" - assert viewer_response_data[v_own_title]["type"] == "Subtitle" - assert viewer_response_data[v_admin_title]["title"] == "Admin Title" - assert viewer_response_data[v_admin_title]["type"] == "Subtitle" - assert viewer_response_data[v_editor_title]["title"] == "Editor Title" - assert viewer_response_data[v_editor_title]["type"] == "Subtitle" - - -def test_delete_dataset_title_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - title metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - title_id = pytest.global_dataset_title_id - admin_title_id = pytest.global_dataset_title_id_admin - editor_title_id = pytest.global_dataset_title_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{admin_title_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{editor_title_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 diff --git a/tests/functional/test_user.py b/tests/functional/test_user.py deleted file mode 100644 index d325e713..00000000 --- a/tests/functional/test_user.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Tests for user settings""" - - -# ------------------- Password Change ------------------- # -def test_post_password_change(clients): - """ - Given a Flask application configured for testing - WHEN the '/auth/password/change' endpoint is requested (PUT) - THEN check that the response is valid and the password is changed - """ - _logged_in_client = clients[0] - - response = _logged_in_client.post( - "/auth/password/change", - json={ - "confirm_password": "Updatedpassword4testing!", - "new_password": "Updatedpassword4testing!", - "old_password": "Testingyeshello11!", - }, - ) - - assert response.status_code == 200 - - -def test_post_password_login_invalid_old_password(clients): - """ - Given a Flask application configured for testing - WHEN the '/auth/login' endpoint is requested (POST) - THEN check that the response is an error when old password is provided - """ - _logged_in_client = clients[0] - response = _logged_in_client.post( - "/auth/login", - json={ - "email_address": "test@fairhub.io", - "password": "Testingyeshello11!", - }, - ) - - assert response.status_code == 401 - - -def test_post_login_new_password(clients): - """ - Given a Flask application configured for testing - WHEN the '/auth/login' endpoint is requested (POST) - THEN check that the response is valid when new password is provided - """ - _logged_in_client = clients[0] - response = _logged_in_client.post( - "/auth/login", - json={ - "email_address": "test@fairhub.io", - "password": "Updatedpassword4testing!", - }, - ) - - assert response.status_code == 200