From 2fcc8d1e5df959f95396cbdaaf92374efb091b9e Mon Sep 17 00:00:00 2001 From: jamshale Date: Thu, 18 Jul 2024 15:29:01 -0700 Subject: [PATCH 01/11] Allow lite plugins to be ignored by management and integration tests Signed-off-by: jamshale --- .github/workflows/pr-integration-tests.yaml | 29 +++++++++++++++++++++ lite_plugins | 1 + repo_manager.py | 8 +++--- 3 files changed, 34 insertions(+), 4 deletions(-) create mode 100644 lite_plugins diff --git a/.github/workflows/pr-integration-tests.yaml b/.github/workflows/pr-integration-tests.yaml index 63e87210f..c8bc9ba82 100644 --- a/.github/workflows/pr-integration-tests.yaml +++ b/.github/workflows/pr-integration-tests.yaml @@ -54,6 +54,35 @@ jobs: done done + # Remove any lite plugins from the changed_dirs array + readarray -t lite_plugin_array < lite_plugins + + echo "${changed_dirs[@]}" + echo "${lite_plugin_array[@]}" + # Function to remove items in array2 from array1 + remove_items() { + local -n source_array=$1 + local -n remove_array=$2 + local temp_array=() + + for item in "${source_array[@]}"; do + skip=false + for remove_item in "${remove_array[@]}"; do + if [[ "$item" == "$remove_item" ]]; then + skip=true + break + fi + done + if ! $skip; then + temp_array+=("$item") + fi + done + + source_array=("${temp_array[@]}") + } + + remove_items changed_dirs lite_plugin_array + echo "changed-plugins=${changed_dirs[*]}" >> $GITHUB_OUTPUT #---------------------------------------------- diff --git a/lite_plugins b/lite_plugins new file mode 100644 index 000000000..19aa00dc4 --- /dev/null +++ b/lite_plugins @@ -0,0 +1 @@ +basicmessage_storage \ No newline at end of file diff --git a/repo_manager.py b/repo_manager.py index 8e992e18d..c4aa06d5a 100644 --- a/repo_manager.py +++ b/repo_manager.py @@ -5,6 +5,7 @@ import sys from copy import deepcopy from enum import Enum +from pathlib import Path from typing import Optional GLOBAL_PLUGIN_DIR = "plugin_globals" @@ -289,10 +290,12 @@ def replace_global_sections(name: str) -> None: def is_plugin_directory(plugin_name: str) -> bool: # If there is a directory which is not a plugin it should be ignored here + lite_plugins = Path('lite_plugins').read_text().splitlines() return ( os.path.isdir(plugin_name) and plugin_name != GLOBAL_PLUGIN_DIR and not plugin_name.startswith(".") + and plugin_name not in lite_plugins ) @@ -302,10 +305,7 @@ def update_all_poetry_locks(): print(f"Updating poetry.lock in {root}") subprocess.run(["poetry", "lock"], cwd=root) -def upgrade_library_in_all_plugins(library: str = None): - if library is None: - library = input("Enter the library to upgrade: ") - +def upgrade_library_in_all_plugins(library: str = None): for root, _, files in os.walk("."): if "poetry.lock" in files: with open(f"{root}/poetry.lock", "r") as file: From cbafc6b1ae4d72307aecf681bf615ebd45dc4fb5 Mon Sep 17 00:00:00 2001 From: jamshale Date: Fri, 19 Jul 2024 08:44:01 -0700 Subject: [PATCH 02/11] Add upcoming lite plugins Signed-off-by: jamshale --- lite_plugins | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lite_plugins b/lite_plugins index 19aa00dc4..08d90c91d 100644 --- a/lite_plugins +++ b/lite_plugins @@ -1 +1,2 @@ -basicmessage_storage \ No newline at end of file +jwt_vc_json +mso_mdoc \ No newline at end of file From 2371ac7f1be4c135f48e77d3a7a52a6d6fd034d0 Mon Sep 17 00:00:00 2001 From: jamshale Date: Fri, 19 Jul 2024 08:45:01 -0700 Subject: [PATCH 03/11] Install all extras for unit tests workflow Signed-off-by: jamshale --- .github/workflows/pr-linting-and-unit-tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-linting-and-unit-tests.yaml b/.github/workflows/pr-linting-and-unit-tests.yaml index 6c4aa28ff..75834765f 100644 --- a/.github/workflows/pr-linting-and-unit-tests.yaml +++ b/.github/workflows/pr-linting-and-unit-tests.yaml @@ -71,7 +71,7 @@ jobs: run: | for dir in ${{ steps.changed-plugins.outputs.changed-plugins }}; do cd $dir - poetry install --no-interaction --no-root --extras "aca-py" + poetry install --no-interaction --no-root --all-extras cd .. done #---------------------------------------------- From fb752db91adcab5fcf35f4f38c75a3d395a6db43 Mon Sep 17 00:00:00 2001 From: jamshale Date: Mon, 22 Jul 2024 13:08:15 -0700 Subject: [PATCH 04/11] README update and another exclude condition in repo_manager Signed-off-by: jamshale --- README.md | 10 ++++++++++ repo_manager.py | 27 ++++++++++++++++++--------- 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 22c432398..dcca51e74 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,16 @@ A script was developed to help with maitenance of the repo called `repo_manager. Run `python repo_manager.py` and you will be met with 2 options. - (1) Is used for starting or adding a new plugin. It will generate all the common scaffolding for a plugin which has the expected format. - (2) Is used for updating and changing common poetry dependencies and configurations. It takes the poetry sections in the `pyproject.toml` files from the `plugin_globals` directory and combines them with the local plugin poetry sections. For the dependencies the common will be overridden by the globals. The other config sections will be replaced by the global configs. Then the lock files will be removed and re-installed. + - (3) Is used for updating the plugin versions in the `plugin_globals` directory. It will update the versions of the plugins in the `plugin_globals` directory to the latest version on the main branch of the plugin repo. It will also update the `plugin_globals` directory to the latest version on the main branch of the plugin repo. + - (4) This option is used by the CI/CD release pipeline. It updates the release notes and the individual plugins with a new version of aries_cloudagent. + - (5) This option is also used by the CI/CD release pipeline. It gets any plugins that have succeeded the tests after a new version of aries_cloudagent has been released if their changes were not reverted than the plugin has been updated to the new version of aries_cloudagent. + - (6) This option will run a general update for all poetry lock files in all plugins. + - (7) This option is used for upgrading a particular library for all plugins. It's useful for when you don't want to do a general upgrade for every library. + +## Lite plugins + +Sometimes is desirable to have a plugin that doesn't need integration tests or extra scaffholding. However, we need a way to avoid these plugins running integration tests in the CI/CD pipeline. To do this, we can simple add the plugin name to the `lite_plugins` file. Which is a line seperated list of plugin names. +``` ## Plugin Documentation diff --git a/repo_manager.py b/repo_manager.py index c4aa06d5a..37ea94803 100644 --- a/repo_manager.py +++ b/repo_manager.py @@ -284,20 +284,27 @@ def replace_global_sections(name: str) -> None: """ global_sections, plugin_sections = get_and_combine_main_poetry_sections(name) process_main_config_sections(name, plugin_sections, global_sections) - global_sections, plugin_sections = get_and_combine_integration_poetry_sections(name) - process_integration_config_sections(name, plugin_sections, global_sections) + if is_plugin_directory(name, True): + global_sections, plugin_sections = get_and_combine_integration_poetry_sections(name) + process_integration_config_sections(name, plugin_sections, global_sections) -def is_plugin_directory(plugin_name: str) -> bool: +def is_plugin_directory(plugin_name: str, exclude_lite_plugins: bool = False) -> bool: # If there is a directory which is not a plugin it should be ignored here - lite_plugins = Path('lite_plugins').read_text().splitlines() + if exclude_lite_plugins: + lite_plugins = Path('lite_plugins').read_text().splitlines() + return ( + os.path.isdir(plugin_name) + and plugin_name != GLOBAL_PLUGIN_DIR + and not plugin_name.startswith(".") + and plugin_name not in lite_plugins + ) return ( os.path.isdir(plugin_name) and plugin_name != GLOBAL_PLUGIN_DIR and not plugin_name.startswith(".") - and plugin_name not in lite_plugins ) - + def update_all_poetry_locks(): for root, _, files in os.walk("."): @@ -367,9 +374,11 @@ def main(arg_1=None, arg_2=None): print(f"Updating common poetry sections in {plugin_name}\n") replace_global_sections(plugin_name) os.system(f"cd {plugin_name} && rm poetry.lock && poetry lock") - os.system( - f"cd {plugin_name}/integration && rm poetry.lock && poetry lock" - ) + # Don't update lite plugin integration files (They don't have any) + if is_plugin_directory(plugin_name, True): + os.system( + f"cd {plugin_name}/integration && rm poetry.lock && poetry lock" + ) elif selection == "3": # Upgrade plugin globals lock file From 5747d0462812fadca155d5d1e3d7adf3031e5c72 Mon Sep 17 00:00:00 2001 From: jamshale Date: Tue, 23 Jul 2024 10:22:15 -0700 Subject: [PATCH 05/11] fix typing Signed-off-by: jamshale --- repo_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/repo_manager.py b/repo_manager.py index 37ea94803..54b132c30 100644 --- a/repo_manager.py +++ b/repo_manager.py @@ -6,7 +6,7 @@ from copy import deepcopy from enum import Enum from pathlib import Path -from typing import Optional +from typing import Optional, Tuple GLOBAL_PLUGIN_DIR = "plugin_globals" @@ -165,7 +165,7 @@ def get_section_output( return j - i -def get_and_combine_main_poetry_sections(name: str) -> (dict, dict): +def get_and_combine_main_poetry_sections(name: str) -> Tuple[dict, dict]: """Get the global main sections and combine them with the plugin specific sections.""" global_sections = deepcopy(sections) plugin_sections = deepcopy(sections) From dce6256869640d57f39794638677d1ff6291099e Mon Sep 17 00:00:00 2001 From: jamshale Date: Tue, 23 Jul 2024 10:30:45 -0700 Subject: [PATCH 06/11] fix spelling Signed-off-by: jamshale --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index dcca51e74..e33cd5478 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ Run `python repo_manager.py` and you will be met with 2 options. ## Lite plugins -Sometimes is desirable to have a plugin that doesn't need integration tests or extra scaffholding. However, we need a way to avoid these plugins running integration tests in the CI/CD pipeline. To do this, we can simple add the plugin name to the `lite_plugins` file. Which is a line seperated list of plugin names. +Sometimes is desirable to have a plugin that doesn't need integration tests or extra scaffholding. However, we need a way to avoid these plugins running integration tests in the CI/CD pipeline. To do this, we can simply add the plugin name to the `lite_plugins` file. Which is a line seperated list of plugin names. ``` ## Plugin Documentation From ca3a226cd2b10187eba2c56d6fec5501ddae35ef Mon Sep 17 00:00:00 2001 From: Ivan Wei Date: Wed, 17 Jul 2024 17:53:19 -0400 Subject: [PATCH 07/11] modular credential format support for oid4vci Signed-off-by: Ivan Wei --- .gitignore | 197 +++++++++++++++++- jwt_vc_json/.devcontainer/Dockerfile | 22 ++ jwt_vc_json/.devcontainer/devcontainer.json | 50 +++++ jwt_vc_json/.devcontainer/post-install.sh | 14 ++ jwt_vc_json/README.md | 7 + jwt_vc_json/docker/Dockerfile | 35 ++++ jwt_vc_json/docker/default.yml | 22 ++ jwt_vc_json/docker/integration.yml | 20 ++ .../integration/Dockerfile.test.runner | 20 ++ jwt_vc_json/integration/README.md | 14 ++ jwt_vc_json/integration/docker-compose.yml | 34 +++ jwt_vc_json/integration/pyproject.toml | 17 ++ jwt_vc_json/integration/tests/__init__.py | 108 ++++++++++ jwt_vc_json/integration/tests/test_example.py | 32 +++ jwt_vc_json/jwt_vc_json/__init__.py | 1 + jwt_vc_json/jwt_vc_json/definition.py | 10 + jwt_vc_json/jwt_vc_json/v1_0/__init__.py | 6 + .../jwt_vc_json/v1_0/cred_processor.py | 63 ++++++ .../jwt_vc_json/v1_0/tests/__init__.py | 1 + .../jwt_vc_json/v1_0/tests/conftest.py | 62 ++++++ .../v1_0/tests/test_cred_processor.py | 29 +++ .../jwt_vc_json/v1_0/tests/test_init.py | 12 ++ jwt_vc_json/pyproject.toml | 88 ++++++++ mso_mdoc/.devcontainer/Dockerfile | 22 ++ mso_mdoc/.devcontainer/devcontainer.json | 50 +++++ mso_mdoc/.devcontainer/post-install.sh | 14 ++ mso_mdoc/README.md | 7 + mso_mdoc/docker/Dockerfile | 36 ++++ mso_mdoc/docker/default.yml | 22 ++ mso_mdoc/docker/integration.yml | 20 ++ mso_mdoc/integration/Dockerfile.test.runner | 20 ++ mso_mdoc/integration/README.md | 14 ++ mso_mdoc/integration/docker-compose.yml | 33 +++ mso_mdoc/integration/pyproject.toml | 17 ++ mso_mdoc/integration/tests/__init__.py | 108 ++++++++++ mso_mdoc/integration/tests/test_example.py | 32 +++ mso_mdoc/mso_mdoc/__init__.py | 1 + mso_mdoc/mso_mdoc/definition.py | 10 + mso_mdoc/mso_mdoc/v1_0/__init__.py | 6 + mso_mdoc/mso_mdoc/v1_0/cred_processor.py | 53 +++++ mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py | 6 + mso_mdoc/mso_mdoc/v1_0/mdoc/exceptions.py | 20 ++ mso_mdoc/mso_mdoc/v1_0/mdoc/issuer.py | 144 +++++++++++++ mso_mdoc/mso_mdoc/v1_0/mdoc/verifier.py | 102 +++++++++ mso_mdoc/mso_mdoc/v1_0/mso/__init__.py | 3 + mso_mdoc/mso_mdoc/v1_0/mso/issuer.py | 124 +++++++++++ mso_mdoc/mso_mdoc/v1_0/mso/verifier.py | 62 ++++++ mso_mdoc/mso_mdoc/v1_0/routes.py | 157 ++++++++++++++ mso_mdoc/mso_mdoc/v1_0/tests/__init__.py | 1 + mso_mdoc/mso_mdoc/v1_0/tests/conftest.py | 117 +++++++++++ mso_mdoc/mso_mdoc/v1_0/tests/mdoc/__init__.py | 1 + .../mso_mdoc/v1_0/tests/mdoc/test_issuer.py | 12 ++ .../mso_mdoc/v1_0/tests/mdoc/test_verifier.py | 12 ++ mso_mdoc/mso_mdoc/v1_0/tests/mso/__init__.py | 1 + .../mso_mdoc/v1_0/tests/mso/test_issuer.py | 32 +++ .../mso_mdoc/v1_0/tests/mso/test_verifier.py | 17 ++ mso_mdoc/mso_mdoc/v1_0/x509.py | 30 +++ mso_mdoc/pyproject.toml | 93 +++++++++ oid4vci/.DS_Store | Bin 6148 -> 0 bytes oid4vci/README.md | 2 + oid4vci/demo/.DS_Store | Bin 6148 -> 0 bytes oid4vci/demo/docker-compose.yaml | 1 + oid4vci/docker/Dockerfile | 18 +- oid4vci/integration/Dockerfile | 1 + oid4vci/integration/docker-compose.yml | 7 +- oid4vci/integration/poetry.lock | 2 +- oid4vci/oid4vci/.DS_Store | Bin 6148 -> 0 bytes oid4vci/oid4vci/config.py | 9 +- oid4vci/oid4vci/models/exchange.py | 2 +- oid4vci/oid4vci/models/supported_cred.py | 3 +- oid4vci/oid4vci/public_routes.py | 94 +++++---- oid4vci/oid4vci/routes.py | 12 +- oid4vci/oid4vci/tests/routes/conftest.py | 1 + oid4vci/poetry.lock | 146 +++++++------ oid4vci/pyproject.toml | 3 + 75 files changed, 2435 insertions(+), 129 deletions(-) create mode 100644 jwt_vc_json/.devcontainer/Dockerfile create mode 100644 jwt_vc_json/.devcontainer/devcontainer.json create mode 100644 jwt_vc_json/.devcontainer/post-install.sh create mode 100644 jwt_vc_json/README.md create mode 100644 jwt_vc_json/docker/Dockerfile create mode 100644 jwt_vc_json/docker/default.yml create mode 100644 jwt_vc_json/docker/integration.yml create mode 100644 jwt_vc_json/integration/Dockerfile.test.runner create mode 100644 jwt_vc_json/integration/README.md create mode 100644 jwt_vc_json/integration/docker-compose.yml create mode 100644 jwt_vc_json/integration/pyproject.toml create mode 100644 jwt_vc_json/integration/tests/__init__.py create mode 100644 jwt_vc_json/integration/tests/test_example.py create mode 100644 jwt_vc_json/jwt_vc_json/__init__.py create mode 100644 jwt_vc_json/jwt_vc_json/definition.py create mode 100644 jwt_vc_json/jwt_vc_json/v1_0/__init__.py create mode 100644 jwt_vc_json/jwt_vc_json/v1_0/cred_processor.py create mode 100644 jwt_vc_json/jwt_vc_json/v1_0/tests/__init__.py create mode 100644 jwt_vc_json/jwt_vc_json/v1_0/tests/conftest.py create mode 100644 jwt_vc_json/jwt_vc_json/v1_0/tests/test_cred_processor.py create mode 100644 jwt_vc_json/jwt_vc_json/v1_0/tests/test_init.py create mode 100644 jwt_vc_json/pyproject.toml create mode 100644 mso_mdoc/.devcontainer/Dockerfile create mode 100644 mso_mdoc/.devcontainer/devcontainer.json create mode 100644 mso_mdoc/.devcontainer/post-install.sh create mode 100644 mso_mdoc/README.md create mode 100644 mso_mdoc/docker/Dockerfile create mode 100644 mso_mdoc/docker/default.yml create mode 100644 mso_mdoc/docker/integration.yml create mode 100644 mso_mdoc/integration/Dockerfile.test.runner create mode 100644 mso_mdoc/integration/README.md create mode 100644 mso_mdoc/integration/docker-compose.yml create mode 100644 mso_mdoc/integration/pyproject.toml create mode 100644 mso_mdoc/integration/tests/__init__.py create mode 100644 mso_mdoc/integration/tests/test_example.py create mode 100644 mso_mdoc/mso_mdoc/__init__.py create mode 100644 mso_mdoc/mso_mdoc/definition.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/__init__.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/cred_processor.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/mdoc/exceptions.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/mdoc/issuer.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/mdoc/verifier.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/mso/__init__.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/mso/issuer.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/mso/verifier.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/routes.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/__init__.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/conftest.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/mdoc/__init__.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/mdoc/test_issuer.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/mdoc/test_verifier.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/mso/__init__.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/mso/test_issuer.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/mso/test_verifier.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/x509.py create mode 100644 mso_mdoc/pyproject.toml delete mode 100644 oid4vci/.DS_Store delete mode 100644 oid4vci/demo/.DS_Store delete mode 100644 oid4vci/oid4vci/.DS_Store diff --git a/.gitignore b/.gitignore index 0dd219fc6..4c995e74e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,200 @@ +### +### Python +### + +# Byte-compiled / optimized / DLL files __pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ .pytest_cache/ +test-reports/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +*.lock +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +Pipfile +Pipfile.lock + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +### +### Visual Studio Code +### + +.vscode/ + +### +### MacOS +### + +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### +### IntelliJ IDEs +### + +.idea/* +**/.idea/* + +### +### Windows +### + +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# Docs build +_build/ +**/*.iml + +# Open API build +open-api/.build + +# devcontainer +.pytest.ini + +# project specific .ruff_cache/ .test-reports/ **/test-reports/ -.coverage -coverage.xml settings.json -.env \ No newline at end of file diff --git a/jwt_vc_json/.devcontainer/Dockerfile b/jwt_vc_json/.devcontainer/Dockerfile new file mode 100644 index 000000000..28cc62697 --- /dev/null +++ b/jwt_vc_json/.devcontainer/Dockerfile @@ -0,0 +1,22 @@ +# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.134.0/containers/python-3/.devcontainer/base.Dockerfile +ARG VARIANT="3.9-bullseye" +FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} + +ARG POETRY_VERSION="1.7.1" +ENV POETRY_HOME="/opt/poetry" \ + POETRY_VERSION=${POETRY_VERSION} + +RUN curl -sSL https://install.python-poetry.org | python3 - \ + && update-alternatives --install /usr/local/bin/poetry poetry /opt/poetry/bin/poetry 900 \ + # Enable tab completion for bash + && poetry completions bash >> /home/vscode/.bash_completion \ + # Enable tab completion for Zsh + && mkdir -p /home/vscode/.zfunc/ \ + && poetry completions zsh > /home/vscode/.zfunc/_poetry \ + && echo "fpath+=~/.zfunc\nautoload -Uz compinit && compinit" >> /home/vscode/.zshrc + +COPY pyproject.toml ./ +# COPY pyproject.toml poetry.lock ./ +RUN poetry config virtualenvs.create false \ + && poetry install --no-root --no-interaction --with integration --extras "aca-py" \ + && rm -rf /root/.cache/pypoetry \ No newline at end of file diff --git a/jwt_vc_json/.devcontainer/devcontainer.json b/jwt_vc_json/.devcontainer/devcontainer.json new file mode 100644 index 000000000..0d0eaf410 --- /dev/null +++ b/jwt_vc_json/.devcontainer/devcontainer.json @@ -0,0 +1,50 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/python +{ + "name": "jwt_vc_json", + "build": { + "dockerfile": "Dockerfile", + "context": "..", + "args": { + "VARIANT": "3.9-bullseye", + "POETRY_VERSION": "1.7.1" + } + }, + "customizations": { + "vscode": { + "extensions": ["ms-python.python", "ms-python.vscode-pylance"], + "settings": { + "python.testing.pytestArgs": ["./jwt_vc_json", "--no-cov"], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.testing.pytestPath": "pytest", + "editor.defaultFormatter": null, + "editor.formatOnSave": false, // enable per language + "[python]": { + "editor.formatOnSave": true + }, + "python.formatting.provider": "black", + "python.formatting.blackPath": "/usr/local/py-utils/bin/black", + "python.formatting.blackArgs": [] + } + } + }, + + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": { + "moby": false + } + }, + + // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. + "remoteUser": "vscode", + + "remoteEnv": { + "RUST_LOG": "aries-askar::log::target=error" + }, + + "mounts": [], + // Use 'forwardPorts' to make a list of ports inside the container available locally. + "forwardPorts": [3000, 3001], + "postCreateCommand": "bash ./.devcontainer/post-install.sh" +} diff --git a/jwt_vc_json/.devcontainer/post-install.sh b/jwt_vc_json/.devcontainer/post-install.sh new file mode 100644 index 000000000..83bc8c94c --- /dev/null +++ b/jwt_vc_json/.devcontainer/post-install.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -ex + +# Convenience workspace directory for later use +WORKSPACE_DIR=$(pwd) + +# install all ACA-Py requirements +python -m pip install --upgrade pip + +# install black for formatting +pip3 install black + +# Generate Poetry Lock file +poetry lock --no-update \ No newline at end of file diff --git a/jwt_vc_json/README.md b/jwt_vc_json/README.md new file mode 100644 index 000000000..e3598fc68 --- /dev/null +++ b/jwt_vc_json/README.md @@ -0,0 +1,7 @@ +### Description: + +< Replace with information about the reason this plugin was produced and a brief overview of the features > + +### Configuration: + +< Replace this section with an outline of configuation options and basic defaults for deploying the plugin > diff --git a/jwt_vc_json/docker/Dockerfile b/jwt_vc_json/docker/Dockerfile new file mode 100644 index 000000000..5b410ce9d --- /dev/null +++ b/jwt_vc_json/docker/Dockerfile @@ -0,0 +1,35 @@ +FROM python:3.9-slim AS base +USER root + +# Install oid4vci plugin +WORKDIR /usr/src +RUN mkdir oid4vci +COPY oid4vci oid4vci + +# Install and configure poetry +WORKDIR /usr/src/app +ENV POETRY_VERSION=1.7.1 +ENV POETRY_HOME=/opt/poetry +RUN apt-get update && apt-get install -y curl && apt-get clean +RUN curl -sSL https://install.python-poetry.org | python - + +ENV PATH="/opt/poetry/bin:$PATH" +RUN poetry config virtualenvs.in-project true + +# Setup project +RUN mkdir jwt_vc_json && touch jwt_vc_json/__init__.py +COPY jwt_vc_json/pyproject.toml jwt_vc_json/poetry.lock jwt_vc_json/README.md ./ +ARG install_flags='--with integration --all-extras' +RUN poetry install ${install_flags} +USER $user + +FROM python:3.9-bullseye +WORKDIR /usr/src/app +COPY --from=base /usr/src/app/.venv /usr/src/app/.venv +ENV PATH="/usr/src/app/.venv/bin:$PATH" + +COPY jwt_vc_json/jwt_vc_json/ jwt_vc_json/ +COPY jwt_vc_json/docker/*.yml ./ + +ENTRYPOINT ["/bin/bash", "-c", "aca-py \"$@\"", "--"] +CMD ["start", "--arg-file", "default.yml"] \ No newline at end of file diff --git a/jwt_vc_json/docker/default.yml b/jwt_vc_json/docker/default.yml new file mode 100644 index 000000000..c6e4f0f3b --- /dev/null +++ b/jwt_vc_json/docker/default.yml @@ -0,0 +1,22 @@ +label: jwt_vc_json + +admin: [0.0.0.0, 3001] +admin-insecure-mode: false +admin-api-key: change-me + +inbound-transport: + - [http, 0.0.0.0, 3000] + - [ws, 0.0.0.0, 3002] +outbound-transport: http +endpoint: + - http://host.docker.internal:3000 + +plugin: + - jwt_vc_json.v1_0 + +genesis-url: http://test.bcovrin.vonx.io/genesis + +log-level: info + +auto-accept-invites: true +auto-respond-messages: true diff --git a/jwt_vc_json/docker/integration.yml b/jwt_vc_json/docker/integration.yml new file mode 100644 index 000000000..c07aaefde --- /dev/null +++ b/jwt_vc_json/docker/integration.yml @@ -0,0 +1,20 @@ +label: jwt_vc_json + +admin: [0.0.0.0, 3001] +admin-insecure-mode: true + +inbound-transport: + - [http, 0.0.0.0, 3000] +outbound-transport: http +endpoint: + - http://host.docker.internal:3000 + +plugin: + - jwt_vc_json.v1_0 + +genesis-url: http://test.bcovrin.vonx.io/genesis + +log-level: info + +auto-accept-invites: true +auto-respond-messages: true diff --git a/jwt_vc_json/integration/Dockerfile.test.runner b/jwt_vc_json/integration/Dockerfile.test.runner new file mode 100644 index 000000000..95f0d2461 --- /dev/null +++ b/jwt_vc_json/integration/Dockerfile.test.runner @@ -0,0 +1,20 @@ +FROM python:3.9-slim +WORKDIR /usr/src/app + +# install poetry +RUN pip3 install --no-cache-dir poetry + +# Add docker-compose-wait tool +ENV WAIT_VERSION 2.7.2 +ADD https://github.com/ufoscout/docker-compose-wait/releases/download/$WAIT_VERSION/wait /wait +RUN chmod +x /wait + +# install dependencies +COPY pyproject.toml . +COPY poetry.lock . +RUN poetry install --only main + +# add tests to image +COPY tests/* tests/ + +ENTRYPOINT ["/bin/sh", "-c", "/wait && poetry run pytest \"$@\"", "--"] \ No newline at end of file diff --git a/jwt_vc_json/integration/README.md b/jwt_vc_json/integration/README.md new file mode 100644 index 000000000..60cfe0f17 --- /dev/null +++ b/jwt_vc_json/integration/README.md @@ -0,0 +1,14 @@ +# Integration Tests + +All plugins should have a suite of integration tests. We use `docker compose` to set up the environment, and make use of the [Dockerfile](../docker/Dockerfile) to produce our ACA-Py/Plugin image. To simplify, we have another [Dockerfile](Dockerfile.test.runner) for running those [tests](/tests/). + +## Build and run Tests + +The integration tests will start 2 agents - bob and alice - and a juggernaut container that will execute the tests. Test results will be found in the juggernaut container output. The juggernaut container should close itself down, the logs can be reviewed in the `Docker` view, open `Containers`, open `integration`, right-click the `integration-tests` container and select `View Logs` + +```sh +# open a terminal in vs code +cd integration +docker compose build +docker compose up +``` diff --git a/jwt_vc_json/integration/docker-compose.yml b/jwt_vc_json/integration/docker-compose.yml new file mode 100644 index 000000000..aedb233ff --- /dev/null +++ b/jwt_vc_json/integration/docker-compose.yml @@ -0,0 +1,34 @@ +version: '3' +#*************************************************************** +# integration level test agents * +#*************************************************************** + +services: + bob: + image: plugin-image + build: + context: ../.. + dockerfile: jwt_vc_json/docker/Dockerfile + args: + - install_flags=--no-interaction --with integration --all-extras + - plugins_path= + command: start --arg-file integration.yml --label bob -e http://bob:3000 --log-level debug + + alice: + image: plugin-image + command: start --arg-file integration.yml --label alice -e http://alice:3000 --log-level debug + + tests: + container_name: juggernaut + build: + context: . + dockerfile: Dockerfile.test.runner + environment: + - WAIT_BEFORE_HOSTS=3 + - WAIT_HOSTS=bob:3000, alice:3000 + - WAIT_HOSTS_TIMEOUT=60 + - WAIT_SLEEP_INTERVAL=1 + - WAIT_HOST_CONNECT_TIMEOUT=30 + depends_on: + - bob + - alice \ No newline at end of file diff --git a/jwt_vc_json/integration/pyproject.toml b/jwt_vc_json/integration/pyproject.toml new file mode 100644 index 000000000..4ff3f74eb --- /dev/null +++ b/jwt_vc_json/integration/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "plugin-globals-tests" +version = "0.1.0" +description = "" +authors = [] + +[tool.poetry.dependencies] +python = "^3.9" +pytest = "^8.2.0" +pytest-asyncio = "~0.23.7" +requests = "^2.31.0" + +[tool.poetry.dev-dependencies] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/jwt_vc_json/integration/tests/__init__.py b/jwt_vc_json/integration/tests/__init__.py new file mode 100644 index 000000000..6224ca13d --- /dev/null +++ b/jwt_vc_json/integration/tests/__init__.py @@ -0,0 +1,108 @@ +from functools import wraps + +import pytest +import requests + +AUTO_ACCEPT = "false" + +BOB = "http://bob:3001" +ALICE = "http://alice:3001" + + +def get(agent: str, path: str, **kwargs): + """Get.""" + return requests.get(f"{agent}{path}", **kwargs) + + +def post(agent: str, path: str, **kwargs): + """Post.""" + return requests.post(f"{agent}{path}", **kwargs) + + +def fail_if_not_ok(message: str): + """Fail the current test if wrapped call fails with message.""" + + def _fail_if_not_ok(func): + @wraps(func) + def _wrapper(*args, **kwargs): + response = func(*args, **kwargs) + if not response.ok: + pytest.fail(f"{message}: {response.content}") + return response + + return _wrapper + + return _fail_if_not_ok + + +def unwrap_json_response(func): + """Unwrap a requests response object to json.""" + + @wraps(func) + def _wrapper(*args, **kwargs) -> dict: + response = func(*args, **kwargs) + return response.json() + + return _wrapper + + +class Agent: + """Class for interacting with Agent over Admin API""" + + def __init__(self, url: str): + self.url = url + + @unwrap_json_response + @fail_if_not_ok("Create invitation failed") + def create_invitation(self, **kwargs): + """Create invitation.""" + return post(self.url, "/connections/create-invitation", params=kwargs) + + @unwrap_json_response + @fail_if_not_ok("Receive invitation failed") + def receive_invite(self, invite: dict, **kwargs): + """Receive invitation.""" + return post( + self.url, "/connections/receive-invitation", params=kwargs, json=invite + ) + + @unwrap_json_response + @fail_if_not_ok("Accept invitation failed") + def accept_invite(self, connection_id: str): + """Accept invitation.""" + return post( + self.url, + f"/connections/{connection_id}/accept-invitation", + ) + + @unwrap_json_response + @fail_if_not_ok("Failed to send basic message") + def send_message(self, connection_id, content): + """Set connection metadata.""" + return post( + self.url, + f"/connections/{connection_id}/send-message", + json={"content": content}, + ) + + def get(self, path: str, return_json: bool = True, fail_with: str = None, **kwargs): + """Do get to agent endpoint.""" + wrapped_get = get + if fail_with: + wrapped_get = fail_if_not_ok(fail_with)(wrapped_get) + if return_json: + wrapped_get = unwrap_json_response(wrapped_get) + + return wrapped_get(self.url, path, **kwargs) + + def post( + self, path: str, return_json: bool = True, fail_with: str = None, **kwargs + ): + """Do get to agent endpoint.""" + wrapped_post = post + if fail_with: + wrapped_post = fail_if_not_ok(fail_with)(wrapped_post) + if return_json: + wrapped_post = unwrap_json_response(wrapped_post) + + return wrapped_post(self.url, path, **kwargs) diff --git a/jwt_vc_json/integration/tests/test_example.py b/jwt_vc_json/integration/tests/test_example.py new file mode 100644 index 000000000..64a59b3e0 --- /dev/null +++ b/jwt_vc_json/integration/tests/test_example.py @@ -0,0 +1,32 @@ +import time + +import pytest + +from . import ALICE, BOB, Agent + + +@pytest.fixture(scope="session") +def bob(): + """bob agent fixture.""" + yield Agent(BOB) + + +@pytest.fixture(scope="session") +def alice(): + """resolver agent fixture.""" + yield Agent(ALICE) + + +@pytest.fixture(scope="session", autouse=True) +def established_connection(bob, alice): + """Established connection filter.""" + invite = bob.create_invitation(auto_accept="true")["invitation"] + resp = alice.receive_invite(invite, auto_accept="true") + yield resp["connection_id"] + + +def test_send_message(bob, alice, established_connection): + # make sure connection is active... + time.sleep(1) + + alice.send_message(established_connection, "hello bob") diff --git a/jwt_vc_json/jwt_vc_json/__init__.py b/jwt_vc_json/jwt_vc_json/__init__.py new file mode 100644 index 000000000..63e11e986 --- /dev/null +++ b/jwt_vc_json/jwt_vc_json/__init__.py @@ -0,0 +1 @@ +"""jwt_vc_json credential handler plugin.""" \ No newline at end of file diff --git a/jwt_vc_json/jwt_vc_json/definition.py b/jwt_vc_json/jwt_vc_json/definition.py new file mode 100644 index 000000000..e5ece9273 --- /dev/null +++ b/jwt_vc_json/jwt_vc_json/definition.py @@ -0,0 +1,10 @@ +"""Version definitions for this plugin.""" + +versions = [ + { + "major_version": 1, + "minimum_minor_version": 0, + "current_minor_version": 0, + "path": "v1_0", + } +] diff --git a/jwt_vc_json/jwt_vc_json/v1_0/__init__.py b/jwt_vc_json/jwt_vc_json/v1_0/__init__.py new file mode 100644 index 000000000..a3c103ed6 --- /dev/null +++ b/jwt_vc_json/jwt_vc_json/v1_0/__init__.py @@ -0,0 +1,6 @@ +"""Initialize processor.""" + +from .cred_processor import CredProcessor + + +cred_processor = CredProcessor() diff --git a/jwt_vc_json/jwt_vc_json/v1_0/cred_processor.py b/jwt_vc_json/jwt_vc_json/v1_0/cred_processor.py new file mode 100644 index 000000000..884fe91b9 --- /dev/null +++ b/jwt_vc_json/jwt_vc_json/v1_0/cred_processor.py @@ -0,0 +1,63 @@ +"""Issue a jwt_vc_json credential.""" + +import datetime +import logging +import uuid + +from aiohttp import web +from aries_cloudagent.admin.request_context import AdminRequestContext +from aries_cloudagent.wallet.jwt import jwt_sign + +from oid4vci.models.exchange import OID4VCIExchangeRecord +from oid4vci.models.supported_cred import SupportedCredential +from oid4vci.public_routes import types_are_subset, PopResult, ICredProcessor + +LOGGER = logging.getLogger(__name__) + + +class CredProcessor(ICredProcessor): + """Credential processor class for jwt_vc_json format.""" + + async def issue_cred( + self, + body: any, + supported: SupportedCredential, + ex_record: OID4VCIExchangeRecord, + pop: PopResult, + context: AdminRequestContext, + ): + """Return signed credential in JWT format.""" + if not types_are_subset(body.get("types"), supported.format_data.get("types")): + raise web.HTTPBadRequest(reason="Requested types does not match offer.") + + current_time = datetime.datetime.now(datetime.timezone.utc) + current_time_unix_timestamp = int(current_time.timestamp()) + formatted_time = current_time.strftime("%Y-%m-%dT%H:%M:%SZ") + cred_id = str(uuid.uuid4()) + + # note: Some wallets require that the "jti" and "id" are a uri + payload = { + "vc": { + **(supported.vc_additional_data or {}), + "id": f"urn:uuid:{cred_id}", + "issuer": ex_record.issuer_id, + "issuanceDate": formatted_time, + "credentialSubject": { + **(ex_record.credential_subject or {}), + "id": pop.holder_kid, + }, + }, + "iss": ex_record.issuer_id, + "nbf": current_time_unix_timestamp, + "jti": f"urn:uuid:{cred_id}", + "sub": pop.holder_kid, + } + + jws = await jwt_sign( + context.profile, + {}, + payload, + verification_method=ex_record.verification_method, + ) + + return jws diff --git a/jwt_vc_json/jwt_vc_json/v1_0/tests/__init__.py b/jwt_vc_json/jwt_vc_json/v1_0/tests/__init__.py new file mode 100644 index 000000000..5a595d474 --- /dev/null +++ b/jwt_vc_json/jwt_vc_json/v1_0/tests/__init__.py @@ -0,0 +1 @@ +"""CredentialProcessor test.""" diff --git a/jwt_vc_json/jwt_vc_json/v1_0/tests/conftest.py b/jwt_vc_json/jwt_vc_json/v1_0/tests/conftest.py new file mode 100644 index 000000000..5bb52df65 --- /dev/null +++ b/jwt_vc_json/jwt_vc_json/v1_0/tests/conftest.py @@ -0,0 +1,62 @@ +import pytest +from unittest.mock import MagicMock + +from aries_cloudagent.admin.request_context import AdminRequestContext + +from oid4vci.models.exchange import OID4VCIExchangeRecord +from oid4vci.models.supported_cred import SupportedCredential +from oid4vci.public_routes import PopResult + + +@pytest.fixture +def body(): + items = {"format": "jwt_vc_json", "types": ["OntarioTestPhotoCard"], "proof": {}} + mock = MagicMock() + mock.__getitem__ = lambda _, k: items[k] + yield mock + + +@pytest.fixture +def supported(): + yield SupportedCredential( + format_data={"types": ["VerifiableCredential", "PhotoCard"]}, + vc_additional_data={ + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://issuer-controller1.stg.ngrok.io/url/schema/photo-card.jsonld", + ], + "type": ["VerifiableCredential", "PhotoCard"], + }, + ) + + +@pytest.fixture +def ex_record(): + yield OID4VCIExchangeRecord( + state=OID4VCIExchangeRecord.STATE_OFFER_CREATED, + verification_method="did:example:123#key-1", + issuer_id="did:example:123", + supported_cred_id="456", + credential_subject={"name": "alice"}, + nonce="789", + pin="000", + code="111", + token="222", + ) + + +@pytest.fixture +def pop(): + yield PopResult( + headers=None, + payload=None, + verified=True, + holder_kid="did:key:example-kid#0", + holder_jwk=None, + ) + + +@pytest.fixture +def context(): + """Test AdminRequestContext.""" + yield AdminRequestContext.test_context() diff --git a/jwt_vc_json/jwt_vc_json/v1_0/tests/test_cred_processor.py b/jwt_vc_json/jwt_vc_json/v1_0/tests/test_cred_processor.py new file mode 100644 index 000000000..4c699cbd4 --- /dev/null +++ b/jwt_vc_json/jwt_vc_json/v1_0/tests/test_cred_processor.py @@ -0,0 +1,29 @@ +import pytest +from aries_cloudagent.admin.request_context import AdminRequestContext + +from oid4vci.models.exchange import OID4VCIExchangeRecord +from oid4vci.models.supported_cred import SupportedCredential +from oid4vci.public_routes import PopResult + +from ..cred_processor import CredProcessor + + +class TestCredentialProcessor: + """Tests for CredentialProcessor.""" + + @pytest.mark.asyncio + async def test_issue_credential( + self, + body: any, + supported: SupportedCredential, + ex_record: OID4VCIExchangeRecord, + pop: PopResult, + context: AdminRequestContext, + ): + """Test issue_credential method.""" + + cred_processor = CredProcessor() + + jws = cred_processor.issue_cred(body, supported, ex_record, pop, context) + + assert jws diff --git a/jwt_vc_json/jwt_vc_json/v1_0/tests/test_init.py b/jwt_vc_json/jwt_vc_json/v1_0/tests/test_init.py new file mode 100644 index 000000000..1a8020b42 --- /dev/null +++ b/jwt_vc_json/jwt_vc_json/v1_0/tests/test_init.py @@ -0,0 +1,12 @@ +import pytest + +from ..cred_processor import CredProcessor + + +@pytest.mark.asyncio +async def test__init__(): + """Test __init.""" + + cred_processor = CredProcessor() + + assert cred_processor diff --git a/jwt_vc_json/pyproject.toml b/jwt_vc_json/pyproject.toml new file mode 100644 index 000000000..1230d9389 --- /dev/null +++ b/jwt_vc_json/pyproject.toml @@ -0,0 +1,88 @@ +[tool.poetry] +name = "jwt_vc_json" +version = "0.1.0" +description = "jwt_vc_json credential handler plugin" +authors = [] + +[tool.poetry.dependencies] +python = "^3.9" + +# Define ACA-Py as an optional/extra dependancy so it can be +# explicitly installed with the plugin if desired. +aries-cloudagent = { version = ">=0.10.3, < 1.0.0", optional = true } +oid4vci = { path = "../oid4vci", optional = true, develop = true } + +[tool.poetry.extras] +aca-py = ["aries-cloudagent"] +oid4vci = ["oid4vci"] + +[tool.poetry.dev-dependencies] +ruff = "^0.5.0" +black = "~24.4.2" +pytest = "^8.2.0" +pytest-asyncio = "~0.23.7" +pytest-cov = "^5.0.0" +pytest-ruff = "^0.3.2" +setuptools = "^70.3.0" + +[tool.poetry.group.integration.dependencies] +aries-askar = { version = "~0.3.0" } +indy-credx = { version = "~1.1.1" } +indy-vdr = { version = "~0.4.1" } +ursa-bbs-signatures = { version = "~1.0.1" } +python3-indy = { version = "^1.11.1" } +anoncreds = { version = "0.2.0" } + +[tool.ruff] +line-length = 90 + +[tool.ruff.lint] +select = ["E", "F", "C", "D"] +ignore = [ + # Google Python Doc Style + "D203", "D204", "D213", "D215", "D400", "D401", "D404", "D406", "D407", + "D408", "D409", "D413", + "D202", # Allow blank line after docstring + "D104", # Don't require docstring in public package + # Things that we should fix, but are too much work right now + "D417", "C901", +] + +[tool.ruff.per-file-ignores] +"**/{tests}/*" = ["F841", "D", "E501"] + +[tool.pytest.ini_options] +testpaths = "jwt_vc_json" +addopts = """ + -p no:warnings + --quiet --junitxml=./.test-reports/junit.xml + --cov-config .coveragerc --cov=jwt_vc_json --cov-report term --cov-report xml +""" +markers = [] +junit_family = "xunit1" +asyncio_mode = "auto" + +[tool.coverage.run] +omit = [ + "*/tests/*", + "docker/*", + "integration/*", + "*/definition.py" +] +data_file = ".test-reports/.coverage" + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "@abstract" +] +precision = 2 +skip_covered = true +show_missing = true + +[tool.coverage.xml] +output = ".test-reports/coverage.xml" + +[build-system] +requires = ["setuptools", "poetry-core>=1.2"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/mso_mdoc/.devcontainer/Dockerfile b/mso_mdoc/.devcontainer/Dockerfile new file mode 100644 index 000000000..28cc62697 --- /dev/null +++ b/mso_mdoc/.devcontainer/Dockerfile @@ -0,0 +1,22 @@ +# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.134.0/containers/python-3/.devcontainer/base.Dockerfile +ARG VARIANT="3.9-bullseye" +FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} + +ARG POETRY_VERSION="1.7.1" +ENV POETRY_HOME="/opt/poetry" \ + POETRY_VERSION=${POETRY_VERSION} + +RUN curl -sSL https://install.python-poetry.org | python3 - \ + && update-alternatives --install /usr/local/bin/poetry poetry /opt/poetry/bin/poetry 900 \ + # Enable tab completion for bash + && poetry completions bash >> /home/vscode/.bash_completion \ + # Enable tab completion for Zsh + && mkdir -p /home/vscode/.zfunc/ \ + && poetry completions zsh > /home/vscode/.zfunc/_poetry \ + && echo "fpath+=~/.zfunc\nautoload -Uz compinit && compinit" >> /home/vscode/.zshrc + +COPY pyproject.toml ./ +# COPY pyproject.toml poetry.lock ./ +RUN poetry config virtualenvs.create false \ + && poetry install --no-root --no-interaction --with integration --extras "aca-py" \ + && rm -rf /root/.cache/pypoetry \ No newline at end of file diff --git a/mso_mdoc/.devcontainer/devcontainer.json b/mso_mdoc/.devcontainer/devcontainer.json new file mode 100644 index 000000000..2c0f216d4 --- /dev/null +++ b/mso_mdoc/.devcontainer/devcontainer.json @@ -0,0 +1,50 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/python +{ + "name": "mso_mdoc", + "build": { + "dockerfile": "Dockerfile", + "context": "..", + "args": { + "VARIANT": "3.9-bullseye", + "POETRY_VERSION": "1.7.1" + } + }, + "customizations": { + "vscode": { + "extensions": ["ms-python.python", "ms-python.vscode-pylance"], + "settings": { + "python.testing.pytestArgs": ["./mso_mdoc", "--no-cov"], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.testing.pytestPath": "pytest", + "editor.defaultFormatter": null, + "editor.formatOnSave": false, // enable per language + "[python]": { + "editor.formatOnSave": true + }, + "python.formatting.provider": "black", + "python.formatting.blackPath": "/usr/local/py-utils/bin/black", + "python.formatting.blackArgs": [] + } + } + }, + + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": { + "moby": false + } + }, + + // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. + "remoteUser": "vscode", + + "remoteEnv": { + "RUST_LOG": "aries-askar::log::target=error" + }, + + "mounts": [], + // Use 'forwardPorts' to make a list of ports inside the container available locally. + "forwardPorts": [3000, 3001], + "postCreateCommand": "bash ./.devcontainer/post-install.sh" +} diff --git a/mso_mdoc/.devcontainer/post-install.sh b/mso_mdoc/.devcontainer/post-install.sh new file mode 100644 index 000000000..83bc8c94c --- /dev/null +++ b/mso_mdoc/.devcontainer/post-install.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -ex + +# Convenience workspace directory for later use +WORKSPACE_DIR=$(pwd) + +# install all ACA-Py requirements +python -m pip install --upgrade pip + +# install black for formatting +pip3 install black + +# Generate Poetry Lock file +poetry lock --no-update \ No newline at end of file diff --git a/mso_mdoc/README.md b/mso_mdoc/README.md new file mode 100644 index 000000000..e3598fc68 --- /dev/null +++ b/mso_mdoc/README.md @@ -0,0 +1,7 @@ +### Description: + +< Replace with information about the reason this plugin was produced and a brief overview of the features > + +### Configuration: + +< Replace this section with an outline of configuation options and basic defaults for deploying the plugin > diff --git a/mso_mdoc/docker/Dockerfile b/mso_mdoc/docker/Dockerfile new file mode 100644 index 000000000..c98db10d9 --- /dev/null +++ b/mso_mdoc/docker/Dockerfile @@ -0,0 +1,36 @@ +FROM python:3.9-slim AS base +USER root + +# Install oid4vci plugin +WORKDIR /usr/src +RUN mkdir oid4vci +COPY oid4vci oid4vci + +# Install and configure poetry +WORKDIR /usr/src/app + +ENV POETRY_VERSION=1.7.1 +ENV POETRY_HOME=/opt/poetry +RUN apt-get update && apt-get install -y curl && apt-get clean +RUN curl -sSL https://install.python-poetry.org | python - + +ENV PATH="/opt/poetry/bin:$PATH" +RUN poetry config virtualenvs.in-project true + +# Setup project +RUN mkdir mso_mdoc && touch mso_mdoc/__init__.py +COPY mso_mdoc/pyproject.toml mso_mdoc/poetry.lock mso_mdoc/README.md ./ +ARG install_flags='--with integration --all-extras' +RUN poetry install ${install_flags} +USER $user + +FROM python:3.9-bullseye +WORKDIR /usr/src/app +COPY --from=base /usr/src/app/.venv /usr/src/app/.venv +ENV PATH="/usr/src/app/.venv/bin:$PATH" + +COPY mso_mdoc/mso_mdoc/ mso_mdoc/ +COPY mso_mdoc/docker/*.yml ./ + +ENTRYPOINT ["/bin/bash", "-c", "aca-py \"$@\"", "--"] +CMD ["start", "--arg-file", "default.yml"] \ No newline at end of file diff --git a/mso_mdoc/docker/default.yml b/mso_mdoc/docker/default.yml new file mode 100644 index 000000000..0991775c2 --- /dev/null +++ b/mso_mdoc/docker/default.yml @@ -0,0 +1,22 @@ +label: mso_mdoc + +admin: [0.0.0.0, 3001] +admin-insecure-mode: false +admin-api-key: change-me + +inbound-transport: + - [http, 0.0.0.0, 3000] + - [ws, 0.0.0.0, 3002] +outbound-transport: http +endpoint: + - http://host.docker.internal:3000 + +plugin: + - mso_mdoc.v1_0 + +genesis-url: http://test.bcovrin.vonx.io/genesis + +log-level: info + +auto-accept-invites: true +auto-respond-messages: true diff --git a/mso_mdoc/docker/integration.yml b/mso_mdoc/docker/integration.yml new file mode 100644 index 000000000..ac7ba980b --- /dev/null +++ b/mso_mdoc/docker/integration.yml @@ -0,0 +1,20 @@ +label: mso_mdoc + +admin: [0.0.0.0, 3001] +admin-insecure-mode: true + +inbound-transport: + - [http, 0.0.0.0, 3000] +outbound-transport: http +endpoint: + - http://host.docker.internal:3000 + +plugin: + - mso_mdoc.v1_0 + +genesis-url: http://test.bcovrin.vonx.io/genesis + +log-level: info + +auto-accept-invites: true +auto-respond-messages: true diff --git a/mso_mdoc/integration/Dockerfile.test.runner b/mso_mdoc/integration/Dockerfile.test.runner new file mode 100644 index 000000000..95f0d2461 --- /dev/null +++ b/mso_mdoc/integration/Dockerfile.test.runner @@ -0,0 +1,20 @@ +FROM python:3.9-slim +WORKDIR /usr/src/app + +# install poetry +RUN pip3 install --no-cache-dir poetry + +# Add docker-compose-wait tool +ENV WAIT_VERSION 2.7.2 +ADD https://github.com/ufoscout/docker-compose-wait/releases/download/$WAIT_VERSION/wait /wait +RUN chmod +x /wait + +# install dependencies +COPY pyproject.toml . +COPY poetry.lock . +RUN poetry install --only main + +# add tests to image +COPY tests/* tests/ + +ENTRYPOINT ["/bin/sh", "-c", "/wait && poetry run pytest \"$@\"", "--"] \ No newline at end of file diff --git a/mso_mdoc/integration/README.md b/mso_mdoc/integration/README.md new file mode 100644 index 000000000..60cfe0f17 --- /dev/null +++ b/mso_mdoc/integration/README.md @@ -0,0 +1,14 @@ +# Integration Tests + +All plugins should have a suite of integration tests. We use `docker compose` to set up the environment, and make use of the [Dockerfile](../docker/Dockerfile) to produce our ACA-Py/Plugin image. To simplify, we have another [Dockerfile](Dockerfile.test.runner) for running those [tests](/tests/). + +## Build and run Tests + +The integration tests will start 2 agents - bob and alice - and a juggernaut container that will execute the tests. Test results will be found in the juggernaut container output. The juggernaut container should close itself down, the logs can be reviewed in the `Docker` view, open `Containers`, open `integration`, right-click the `integration-tests` container and select `View Logs` + +```sh +# open a terminal in vs code +cd integration +docker compose build +docker compose up +``` diff --git a/mso_mdoc/integration/docker-compose.yml b/mso_mdoc/integration/docker-compose.yml new file mode 100644 index 000000000..34c03f859 --- /dev/null +++ b/mso_mdoc/integration/docker-compose.yml @@ -0,0 +1,33 @@ +version: '3' +#*************************************************************** +# integration level test agents * +#*************************************************************** + +services: + bob: + image: plugin-image + build: + context: ../.. + dockerfile: mso_mdoc/docker/Dockerfile + args: + - install_flags=--no-interaction --with integration --all-extras + command: start --arg-file integration.yml --label bob -e http://bob:3000 --log-level debug + + alice: + image: plugin-image + command: start --arg-file integration.yml --label alice -e http://alice:3000 --log-level debug + + tests: + container_name: juggernaut + build: + context: . + dockerfile: Dockerfile.test.runner + environment: + - WAIT_BEFORE_HOSTS=3 + - WAIT_HOSTS=bob:3000, alice:3000 + - WAIT_HOSTS_TIMEOUT=60 + - WAIT_SLEEP_INTERVAL=1 + - WAIT_HOST_CONNECT_TIMEOUT=30 + depends_on: + - bob + - alice \ No newline at end of file diff --git a/mso_mdoc/integration/pyproject.toml b/mso_mdoc/integration/pyproject.toml new file mode 100644 index 000000000..4ff3f74eb --- /dev/null +++ b/mso_mdoc/integration/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "plugin-globals-tests" +version = "0.1.0" +description = "" +authors = [] + +[tool.poetry.dependencies] +python = "^3.9" +pytest = "^8.2.0" +pytest-asyncio = "~0.23.7" +requests = "^2.31.0" + +[tool.poetry.dev-dependencies] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/mso_mdoc/integration/tests/__init__.py b/mso_mdoc/integration/tests/__init__.py new file mode 100644 index 000000000..6224ca13d --- /dev/null +++ b/mso_mdoc/integration/tests/__init__.py @@ -0,0 +1,108 @@ +from functools import wraps + +import pytest +import requests + +AUTO_ACCEPT = "false" + +BOB = "http://bob:3001" +ALICE = "http://alice:3001" + + +def get(agent: str, path: str, **kwargs): + """Get.""" + return requests.get(f"{agent}{path}", **kwargs) + + +def post(agent: str, path: str, **kwargs): + """Post.""" + return requests.post(f"{agent}{path}", **kwargs) + + +def fail_if_not_ok(message: str): + """Fail the current test if wrapped call fails with message.""" + + def _fail_if_not_ok(func): + @wraps(func) + def _wrapper(*args, **kwargs): + response = func(*args, **kwargs) + if not response.ok: + pytest.fail(f"{message}: {response.content}") + return response + + return _wrapper + + return _fail_if_not_ok + + +def unwrap_json_response(func): + """Unwrap a requests response object to json.""" + + @wraps(func) + def _wrapper(*args, **kwargs) -> dict: + response = func(*args, **kwargs) + return response.json() + + return _wrapper + + +class Agent: + """Class for interacting with Agent over Admin API""" + + def __init__(self, url: str): + self.url = url + + @unwrap_json_response + @fail_if_not_ok("Create invitation failed") + def create_invitation(self, **kwargs): + """Create invitation.""" + return post(self.url, "/connections/create-invitation", params=kwargs) + + @unwrap_json_response + @fail_if_not_ok("Receive invitation failed") + def receive_invite(self, invite: dict, **kwargs): + """Receive invitation.""" + return post( + self.url, "/connections/receive-invitation", params=kwargs, json=invite + ) + + @unwrap_json_response + @fail_if_not_ok("Accept invitation failed") + def accept_invite(self, connection_id: str): + """Accept invitation.""" + return post( + self.url, + f"/connections/{connection_id}/accept-invitation", + ) + + @unwrap_json_response + @fail_if_not_ok("Failed to send basic message") + def send_message(self, connection_id, content): + """Set connection metadata.""" + return post( + self.url, + f"/connections/{connection_id}/send-message", + json={"content": content}, + ) + + def get(self, path: str, return_json: bool = True, fail_with: str = None, **kwargs): + """Do get to agent endpoint.""" + wrapped_get = get + if fail_with: + wrapped_get = fail_if_not_ok(fail_with)(wrapped_get) + if return_json: + wrapped_get = unwrap_json_response(wrapped_get) + + return wrapped_get(self.url, path, **kwargs) + + def post( + self, path: str, return_json: bool = True, fail_with: str = None, **kwargs + ): + """Do get to agent endpoint.""" + wrapped_post = post + if fail_with: + wrapped_post = fail_if_not_ok(fail_with)(wrapped_post) + if return_json: + wrapped_post = unwrap_json_response(wrapped_post) + + return wrapped_post(self.url, path, **kwargs) diff --git a/mso_mdoc/integration/tests/test_example.py b/mso_mdoc/integration/tests/test_example.py new file mode 100644 index 000000000..64a59b3e0 --- /dev/null +++ b/mso_mdoc/integration/tests/test_example.py @@ -0,0 +1,32 @@ +import time + +import pytest + +from . import ALICE, BOB, Agent + + +@pytest.fixture(scope="session") +def bob(): + """bob agent fixture.""" + yield Agent(BOB) + + +@pytest.fixture(scope="session") +def alice(): + """resolver agent fixture.""" + yield Agent(ALICE) + + +@pytest.fixture(scope="session", autouse=True) +def established_connection(bob, alice): + """Established connection filter.""" + invite = bob.create_invitation(auto_accept="true")["invitation"] + resp = alice.receive_invite(invite, auto_accept="true") + yield resp["connection_id"] + + +def test_send_message(bob, alice, established_connection): + # make sure connection is active... + time.sleep(1) + + alice.send_message(established_connection, "hello bob") diff --git a/mso_mdoc/mso_mdoc/__init__.py b/mso_mdoc/mso_mdoc/__init__.py new file mode 100644 index 000000000..410ec3dd1 --- /dev/null +++ b/mso_mdoc/mso_mdoc/__init__.py @@ -0,0 +1 @@ +"""MSO_MDOC Crendential Handler Plugin.""" \ No newline at end of file diff --git a/mso_mdoc/mso_mdoc/definition.py b/mso_mdoc/mso_mdoc/definition.py new file mode 100644 index 000000000..e5ece9273 --- /dev/null +++ b/mso_mdoc/mso_mdoc/definition.py @@ -0,0 +1,10 @@ +"""Version definitions for this plugin.""" + +versions = [ + { + "major_version": 1, + "minimum_minor_version": 0, + "current_minor_version": 0, + "path": "v1_0", + } +] diff --git a/mso_mdoc/mso_mdoc/v1_0/__init__.py b/mso_mdoc/mso_mdoc/v1_0/__init__.py new file mode 100644 index 000000000..a3c103ed6 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/__init__.py @@ -0,0 +1,6 @@ +"""Initialize processor.""" + +from .cred_processor import CredProcessor + + +cred_processor = CredProcessor() diff --git a/mso_mdoc/mso_mdoc/v1_0/cred_processor.py b/mso_mdoc/mso_mdoc/v1_0/cred_processor.py new file mode 100644 index 000000000..34f1ee91e --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/cred_processor.py @@ -0,0 +1,53 @@ +"""Issue a mso_mdoc credential.""" + +import logging +import json +import re + +from aiohttp import web +from aries_cloudagent.admin.request_context import AdminRequestContext + +from oid4vci.models.exchange import OID4VCIExchangeRecord +from oid4vci.models.supported_cred import SupportedCredential +from oid4vci.public_routes import PopResult, ICredProcessor + +from .mdoc import mso_mdoc_sign + +LOGGER = logging.getLogger(__name__) + + +class CredProcessor(ICredProcessor): + """Credential processor class for mso_mdoc credential format.""" + + async def issue_cred( + self, + body: any, + supported: SupportedCredential, + ex_record: OID4VCIExchangeRecord, + pop: PopResult, + context: AdminRequestContext, + ): + """Return signed credential in COBR format.""" + if body.get("doctype") != supported.format_data.get("doctype"): + raise web.HTTPBadRequest(reason="Requested types does not match offer.") + + try: + headers = { + "doctype": supported.format_data.get("doctype"), + "deviceKey": re.sub( + "did:(.+?):(.+?)#(.*)", + "\\2", + json.dumps(pop.holder_jwk or pop.holder_kid), + ), + } + did = None + verification_method = ex_record.verification_method + payload = ex_record.credential_subject + mso_mdoc = await mso_mdoc_sign( + context.profile, headers, payload, did, verification_method + ) + mso_mdoc = mso_mdoc[2:-1] if mso_mdoc.startswith("b'") else None + except ValueError as err: + raise web.HTTPBadRequest(reason="Failed to issue credential") from err + + return mso_mdoc diff --git a/mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py b/mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py new file mode 100644 index 000000000..27387538b --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py @@ -0,0 +1,6 @@ +"""MDoc module.""" + +from .issuer import mso_mdoc_sign, mdoc_sign +from .verifier import mso_mdoc_verify, mdoc_verify, MdocVerifyResult +from .exceptions import MissingPrivateKey, MissingIssuerAuth +from .exceptions import NoDocumentTypeProvided, NoSignedDocumentProvided diff --git a/mso_mdoc/mso_mdoc/v1_0/mdoc/exceptions.py b/mso_mdoc/mso_mdoc/v1_0/mdoc/exceptions.py new file mode 100644 index 000000000..fd5f8fed4 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/mdoc/exceptions.py @@ -0,0 +1,20 @@ +"""Exceptions module.""" + +class MissingPrivateKey(Exception): + """Missing private key error.""" + pass + + +class NoDocumentTypeProvided(Exception): + """No document type error.""" + pass + + +class NoSignedDocumentProvided(Exception): + """No signed document provider error.""" + pass + + +class MissingIssuerAuth(Exception): + """Missing issuer authentication error.""" + pass diff --git a/mso_mdoc/mso_mdoc/v1_0/mdoc/issuer.py b/mso_mdoc/mso_mdoc/v1_0/mdoc/issuer.py new file mode 100644 index 000000000..519e55085 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/mdoc/issuer.py @@ -0,0 +1,144 @@ +"""Operations supporting mso_mdoc issuance.""" + +import os +import json +import logging +import cbor2 +from binascii import hexlify +from pycose.keys import CoseKey +from pydid import DIDUrl +from typing import Any, Mapping, Optional + +from aries_cloudagent.core.profile import Profile +from aries_cloudagent.wallet.default_verification_key_strategy import ( + BaseVerificationKeyStrategy, +) +from aries_cloudagent.wallet.base import BaseWallet +from aries_cloudagent.wallet.util import b64_to_bytes, bytes_to_b64 + +from ..mso import MsoIssuer +from ..x509 import selfsigned_x509cert + +LOGGER = logging.getLogger(__name__) + + +def dict_to_b64(value: Mapping[str, Any]) -> str: + """Encode a dictionary as a b64 string.""" + return bytes_to_b64(json.dumps(value).encode(), urlsafe=True, pad=False) + + +def b64_to_dict(value: str) -> Mapping[str, Any]: + """Decode a dictionary from a b64 encoded value.""" + return json.loads(b64_to_bytes(value, urlsafe=True)) + + +def nym_to_did(value: str) -> str: + """Return a did from nym if passed value is nym, else return value.""" + return value if value.startswith("did:") else f"did:sov:{value}" + + +def did_lookup_name(value: str) -> str: + """Return the value used to lookup a DID in the wallet. + + If value is did:sov, return the unqualified value. Else, return value. + """ + return value.split(":", 3)[2] if value.startswith("did:sov:") else value + + +async def mso_mdoc_sign( + profile: Profile, + headers: Mapping[str, Any], + payload: Mapping[str, Any], + did: Optional[str] = None, + verification_method: Optional[str] = None, +) -> str: + """Create a signed mso_mdoc given headers, payload, and signing DID or DID URL.""" + if verification_method is None: + if did is None: + raise ValueError("did or verificationMethod required.") + + did = nym_to_did(did) + + verkey_strat = profile.inject(BaseVerificationKeyStrategy) + verification_method = await verkey_strat.get_verification_method_id_for_did( + did, profile + ) + if not verification_method: + raise ValueError("Could not determine verification method from DID") + else: + # We look up keys by did for now + did = DIDUrl.parse(verification_method).did + if not did: + raise ValueError("DID URL must be absolute") + + async with profile.session() as session: + wallet = session.inject(BaseWallet) + LOGGER.info(f"mso_mdoc sign: {did}") + + did_info = await wallet.get_local_did(did_lookup_name(did)) + key_pair = await wallet._session.handle.fetch_key(did_info.verkey) + jwk_bytes = key_pair.key.get_jwk_secret() + jwk = json.loads(jwk_bytes) + + return mdoc_sign(jwk, headers, payload) + + +def mdoc_sign( + jwk: dict, headers: Mapping[str, Any], payload: Mapping[str, Any] +) -> str: + """Create a signed mso_mdoc given headers, payload, and private key.""" + pk_dict = { + "KTY": jwk.get("kty") or "", # OKP, EC + "CURVE": jwk.get("crv") or "", # ED25519, P_256 + "ALG": "EdDSA" if jwk.get("kty") == "OKP" else "ES256", + "D": b64_to_bytes(jwk.get("d") or "", True), # EdDSA + "X": b64_to_bytes(jwk.get("x") or "", True), # EdDSA, EcDSA + "Y": b64_to_bytes(jwk.get("y") or "", True), # EcDSA + "KID": os.urandom(32), + } + cose_key = CoseKey.from_dict(pk_dict) + + if isinstance(headers, dict): + doctype = headers.get("doctype") or "" + device_key = headers.get("deviceKey") or "" + else: + raise ValueError("missing headers.") + + if isinstance(payload, dict): + doctype = headers.get("doctype") + data = [{"doctype": doctype, "data": payload}] + else: + raise ValueError("missing payload.") + + documents = [] + for doc in data: + _cert = selfsigned_x509cert(private_key=cose_key) + msoi = MsoIssuer(data=doc["data"], private_key=cose_key, x509_cert=_cert) + mso = msoi.sign(device_key=device_key, doctype=doctype) + issuer_auth = mso.encode() + issuer_auth = cbor2.loads(issuer_auth).value + issuer_auth[2] = cbor2.dumps(cbor2.CBORTag(24, issuer_auth[2])) + document = { + "docType": doctype, + "issuerSigned": { + "nameSpaces": { + ns: [cbor2.CBORTag(24, cbor2.dumps(v)) for k, v in dgst.items()] + for ns, dgst in msoi.disclosure_map.items() + }, + "issuerAuth": issuer_auth, + }, + # this is required during the presentation. + # 'deviceSigned': { + # # TODO + # } + } + documents.append(document) + + signed = { + "version": "1.0", + "documents": documents, + "status": 0, + } + signed_hex = hexlify(cbor2.dumps(signed)) + + return f"{signed_hex}" diff --git a/mso_mdoc/mso_mdoc/v1_0/mdoc/verifier.py b/mso_mdoc/mso_mdoc/v1_0/mdoc/verifier.py new file mode 100644 index 000000000..003594014 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/mdoc/verifier.py @@ -0,0 +1,102 @@ +"""Operations supporting mso_mdoc creation and verification.""" + +import logging +import re +from binascii import unhexlify +from typing import Any, Mapping +from marshmallow import fields +from aries_cloudagent.core.profile import Profile +from aries_cloudagent.messaging.models.base import BaseModel, BaseModelSchema +from aries_cloudagent.wallet.error import WalletNotFoundError +from aries_cloudagent.wallet.base import BaseWallet +from aries_cloudagent.wallet.util import bytes_to_b58 +import cbor2 +from cbor_diag import cbor2diag +from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat +from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PublicKey +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey + +from ..mso import MsoVerifier + +LOGGER = logging.getLogger(__name__) + + +class MdocVerifyResult(BaseModel): + """Result from verify.""" + + class Meta: + """MdocVerifyResult metadata.""" + + schema_class = "MdocVerifyResultSchema" + + def __init__( + self, + headers: Mapping[str, Any], + payload: Mapping[str, Any], + valid: bool, + kid: str, + ): + """Initialize a MdocVerifyResult instance.""" + self.headers = headers + self.payload = payload + self.valid = valid + self.kid = kid + + +class MdocVerifyResultSchema(BaseModelSchema): + """MdocVerifyResult schema.""" + + class Meta: + """MdocVerifyResultSchema metadata.""" + + model_class = MdocVerifyResult + + headers = fields.Dict( + required=False, metadata={"description": "Headers from verified mso_mdoc."} + ) + payload = fields.Dict( + required=True, metadata={"description": "Payload from verified mso_mdoc"} + ) + valid = fields.Bool(required=True) + kid = fields.Str(required=False, metadata={"description": "kid of signer"}) + error = fields.Str(required=False, metadata={"description": "Error text"}) + + +async def mso_mdoc_verify(profile: Profile, mdoc_str: str) -> MdocVerifyResult: + """Verify a mso_mdoc CBOR string.""" + result = mdoc_verify(mdoc_str) + verkey = result.kid + + async with profile.session() as session: + wallet = session.inject(BaseWallet) + try: + did_info = await wallet.get_local_did_for_verkey(verkey) + except WalletNotFoundError: + did_info = None + verification_method = did_info.did if did_info else "" + result.kid = verification_method + + return result + + +def mdoc_verify(mdoc_str: str) -> MdocVerifyResult: + """Verify a mso_mdoc CBOR string.""" + mdoc_bytes = unhexlify(mdoc_str) + mso_mdoc = cbor2.loads(mdoc_bytes) + mso_verifier = MsoVerifier(mso_mdoc["documents"][0]["issuerSigned"]["issuerAuth"]) + valid = mso_verifier.verify_signature() + + headers = {} + mdoc_str = str(cbor2diag(mdoc_bytes)).replace("\n", "").replace("h'", "'") + mdoc_str = re.sub(r'\s+(?=(?:[^"]*"[^"]*")*[^"]*$)', "", mdoc_str) + payload = {"mso_mdoc": mdoc_str} + + if isinstance(mso_verifier.public_key, Ed25519PublicKey): + public_bytes = mso_verifier.public_key.public_bytes_raw() + elif isinstance(mso_verifier.public_key, EllipticCurvePublicKey): + public_bytes = mso_verifier.public_key.public_bytes( + Encoding.DER, PublicFormat.SubjectPublicKeyInfo + ) + verkey = bytes_to_b58(public_bytes) + + return MdocVerifyResult(headers, payload, valid, verkey) diff --git a/mso_mdoc/mso_mdoc/v1_0/mso/__init__.py b/mso_mdoc/mso_mdoc/v1_0/mso/__init__.py new file mode 100644 index 000000000..0b356db01 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/mso/__init__.py @@ -0,0 +1,3 @@ +"""MSO module.""" +from .issuer import MsoIssuer +from .verifier import MsoVerifier diff --git a/mso_mdoc/mso_mdoc/v1_0/mso/issuer.py b/mso_mdoc/mso_mdoc/v1_0/mso/issuer.py new file mode 100644 index 000000000..b6c0deb40 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/mso/issuer.py @@ -0,0 +1,124 @@ +"""MsoIssuer helper class to issue a mso.""" + +from typing import Union +import logging +from datetime import datetime, timedelta, timezone +import random +import hashlib +import os +import cbor2 +from pycose.headers import Algorithm, KID +from pycose.keys import CoseKey +from pycose.messages import Sign1Message + +LOGGER = logging.getLogger(__name__) +DIGEST_SALT_LENGTH = 32 +CBORTAGS_ATTR_MAP = {"birth_date": 1004, "expiry_date": 1004, "issue_date": 1004} + + +def shuffle_dict(d: dict): + """Shuffle a dictionary.""" + keys = list(d.keys()) + for i in range(random.randint(3, 27)): # nosec: B311 + random.shuffle(keys) + return {key: d[key] for key in keys} + + +class MsoIssuer: + """MsoIssuer helper class to issue a mso.""" + + def __init__( + self, + data: dict, + private_key: CoseKey, + x509_cert: str, + digest_alg: str = "sha256", + ): + """Constructor.""" + + self.data: dict = data + self.hash_map: dict = {} + self.disclosure_map: dict = {} + self.digest_alg: str = digest_alg + self.private_key: CoseKey = private_key + self.x509_cert = x509_cert + + hashfunc = getattr(hashlib, self.digest_alg) + + digest_cnt = 0 + for ns, values in data.items(): + if not isinstance(values, dict): + continue + self.disclosure_map[ns] = {} + self.hash_map[ns] = {} + + for k, v in shuffle_dict(values).items(): + _rnd_salt = os.urandom(32) + _value_cbortag = CBORTAGS_ATTR_MAP.get(k, None) + + if _value_cbortag: + v = cbor2.CBORTag(_value_cbortag, v) + + self.disclosure_map[ns][digest_cnt] = { + "digestID": digest_cnt, + "random": _rnd_salt, + "elementIdentifier": k, + "elementValue": v, + } + self.hash_map[ns][digest_cnt] = hashfunc( + cbor2.dumps( + cbor2.CBORTag( + 24, self.disclosure_map[ns][digest_cnt] + ) + ) + ).digest() + + digest_cnt += 1 + + def format_datetime_repr(self, dt: datetime) -> str: + """Format a datetime object to a string representation.""" + return dt.isoformat().split(".")[0] + "Z" + + def sign( + self, + device_key: Union[dict, None] = None, + valid_from: Union[None, datetime] = None, + doctype: str = None, + ) -> Sign1Message: + """Sign a mso and returns it in Sign1Message type.""" + utcnow = datetime.now(timezone.utc) + exp = utcnow + timedelta(hours=(24 * 365)) + + payload = { + "version": "1.0", + "digestAlgorithm": self.digest_alg, + "valueDigests": self.hash_map, + "deviceKeyInfo": {"deviceKey": device_key}, + "docType": doctype or list(self.hash_map)[0], + "validityInfo": { + "signed": cbor2.dumps( + cbor2.CBORTag(0, self.format_datetime_repr(utcnow)) + ), + "validFrom": cbor2.dumps( + cbor2.CBORTag(0, self.format_datetime_repr(valid_from or utcnow)) + ), + "validUntil": cbor2.dumps( + cbor2.CBORTag(0, self.format_datetime_repr(exp)) + ), + }, + } + mso = Sign1Message( + phdr={ + Algorithm: self.private_key.alg, + KID: self.private_key.kid, + 33: self.x509_cert, + }, + # TODO: x509 (cbor2.CBORTag(33)) and federation trust_chain support + # (cbor2.CBORTag(27?)) here + # 33 means x509chain standing to rfc9360 + # in both protected and unprotected for interop purpose .. for now. + uhdr={33: self.x509_cert}, + payload=cbor2.dumps(payload), + ) + mso.key = self.private_key + return mso diff --git a/mso_mdoc/mso_mdoc/v1_0/mso/verifier.py b/mso_mdoc/mso_mdoc/v1_0/mso/verifier.py new file mode 100644 index 000000000..49106930e --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/mso/verifier.py @@ -0,0 +1,62 @@ +"""MsoVerifier helper class to verify a mso.""" + +import logging +from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat +from pycose.keys import CoseKey +from pycose.messages import Sign1Message +import cryptography +import cbor2 + +LOGGER = logging.getLogger(__name__) + + +class MsoVerifier: + """MsoVerifier helper class to verify a mso.""" + + def __init__(self, data: cbor2.CBORTag) -> None: + """Create a new MsoParser instance.""" + if isinstance(data, list): + data = cbor2.dumps(cbor2.CBORTag(18, value=data)) + + self.object: Sign1Message = Sign1Message.decode(data) + self.public_key: ( + cryptography.hazmat.backends.openssl.ec._EllipticCurvePublicKey + ) = None + self.x509_certificates: list = [] + + @property + def raw_public_keys(self) -> bytes: + """Extract public key from x509 certificates.""" + _mixed_heads = list(self.object.phdr.items()) + list(self.object.uhdr.items()) + for h, v in _mixed_heads: + if h.identifier == 33: + return list(self.object.uhdr.values()) + + def attest_public_key(self) -> None: + LOGGER.warning( + "TODO: in next releases. " + "The certificate is to be considered as untrusted, this release " + "doesn't validate x.509 certificate chain. See next releases and " + "python certvalidator or cryptography for that." + ) + + def load_public_key(self) -> None: + """Load the public key from the x509 certificate.""" + self.attest_public_key() + + for i in self.raw_public_keys: + self.x509_certificates.append( + cryptography.x509.load_der_x509_certificate(i) + ) + + self.public_key = self.x509_certificates[0].public_key() + pem_public = self.public_key.public_bytes( + Encoding.PEM, PublicFormat.SubjectPublicKeyInfo + ).decode() + self.object.key = CoseKey.from_pem_public_key(pem_public) + + def verify_signature(self) -> bool: + """Verify the signature.""" + self.load_public_key() + + return self.object.verify_signature() diff --git a/mso_mdoc/mso_mdoc/v1_0/routes.py b/mso_mdoc/mso_mdoc/v1_0/routes.py new file mode 100644 index 000000000..28c7da1f1 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/routes.py @@ -0,0 +1,157 @@ +"""mso_mdoc admin routes.""" + +import logging +from aiohttp import web +from aiohttp_apispec import docs, request_schema, response_schema +from marshmallow import fields + +from aries_cloudagent.admin.request_context import AdminRequestContext +from aries_cloudagent.messaging.jsonld.error import ( + BadJWSHeaderError, + InvalidVerificationMethod, +) +from aries_cloudagent.messaging.models.openapi import OpenAPISchema +from aries_cloudagent.messaging.valid import ( + GENERIC_DID_EXAMPLE, + GENERIC_DID_VALIDATE, + Uri, +) +from aries_cloudagent.resolver.base import ResolverError + +from .mdoc import mso_mdoc_sign, mso_mdoc_verify + +SPEC_URI = "https://www.iso.org/obp/ui/#iso:std:iso-iec:18013:-5:dis:ed-1:v1:en" +LOGGER = logging.getLogger(__name__) + + +class MdocPluginResponseSchema(OpenAPISchema): + """Response schema for mso_mdoc Plugin.""" + + +class MdocCreateSchema(OpenAPISchema): + """Request schema to create a jws with a particular DID.""" + + headers = fields.Dict() + payload = fields.Dict(required=True) + did = fields.Str( + required=False, + validate=GENERIC_DID_VALIDATE, + metadata={"description": "DID of interest", "example": GENERIC_DID_EXAMPLE}, + ) + verification_method = fields.Str( + data_key="verificationMethod", + required=False, + validate=Uri(), + metadata={ + "description": "Information used for proof verification", + "example": ( + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg34" + "2Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + ), + }, + ) + + +class MdocVerifySchema(OpenAPISchema): + """Request schema to verify a mso_mdoc.""" + + mso_mdoc = fields.Str( + validate=None, metadata={"example": "a36776657273696f6e63312e..."} + ) + + +class MdocVerifyResponseSchema(OpenAPISchema): + """Response schema for mso_mdoc verification result.""" + + valid = fields.Bool(required=True) + error = fields.Str(required=False, metadata={"description": "Error text"}) + kid = fields.Str(required=True, metadata={"description": "kid of signer"}) + headers = fields.Dict( + required=True, metadata={"description": "Headers from verified mso_mdoc."} + ) + payload = fields.Dict( + required=True, metadata={"description": "Payload from verified mso_mdoc"} + ) + + +@docs( + tags=["mso_mdoc"], + summary="Creates mso_mdoc CBOR encoded binaries according to ISO 18013-5", +) +@request_schema(MdocCreateSchema) +@response_schema(MdocPluginResponseSchema(), description="") +async def mdoc_sign(request: web.BaseRequest): + """Request handler for sd-jws creation using did. + + Args: + "headers": { ... }, + "payload": { ... }, + "did": "did:example:123", + "verificationMethod": "did:example:123#keys-1" + with did and verification being mutually exclusive. + """ + context: AdminRequestContext = request["context"] + body = await request.json() + did = body.get("did") + verification_method = body.get("verificationMethod") + headers = body.get("headers", {}) + payload = body.get("payload", {}) + + try: + mso_mdoc = await mso_mdoc_sign( + context.profile, headers, payload, did, verification_method + ) + except ValueError as err: + raise web.HTTPBadRequest(reason="Bad did or verification method") from err + + return web.json_response(mso_mdoc) + + +@docs( + tags=["mso_mdoc"], + summary="Verify mso_mdoc CBOR encoded binaries according to ISO 18013-5", +) +@request_schema(MdocVerifySchema()) +@response_schema(MdocVerifyResponseSchema(), 200, description="") +async def mdoc_verify(request: web.BaseRequest): + """Request handler for mso_mdoc validation. + + Args: + "mso_mdoc": { ... } + """ + context: AdminRequestContext = request["context"] + body = await request.json() + mso_mdoc = body["mso_mdoc"] + try: + result = await mso_mdoc_verify(context.profile, mso_mdoc) + except (BadJWSHeaderError, InvalidVerificationMethod) as err: + raise web.HTTPBadRequest(reason=err.roll_up) from err + except ResolverError as err: + raise web.HTTPNotFound(reason=err.roll_up) from err + + return web.json_response(result.serialize()) + + +async def register(app: web.Application): + """Register routes.""" + app.add_routes( + [ + web.post("/mso_mdoc/sign", mdoc_sign), + web.post("/mso_mdoc/verify", mdoc_verify), + ] + ) + + +def post_process_routes(app: web.Application): + """Amend swagger API.""" + + # Add top-level tags description + if "tags" not in app._state["swagger_dict"]: + app._state["swagger_dict"]["tags"] = [] + app._state["swagger_dict"]["tags"].append( + { + "name": "mso_mdoc", + "description": "mso_mdoc plugin", + "externalDocs": {"description": "Specification", "url": SPEC_URI}, + } + ) diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/__init__.py b/mso_mdoc/mso_mdoc/v1_0/tests/__init__.py new file mode 100644 index 000000000..203414061 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/__init__.py @@ -0,0 +1 @@ +"""Test cases.""" \ No newline at end of file diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/conftest.py b/mso_mdoc/mso_mdoc/v1_0/tests/conftest.py new file mode 100644 index 000000000..6d4a5f2f2 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/conftest.py @@ -0,0 +1,117 @@ +import pytest +from unittest.mock import MagicMock + +from aries_cloudagent.admin.request_context import AdminRequestContext + +from oid4vci.models.exchange import OID4VCIExchangeRecord +from oid4vci.models.supported_cred import SupportedCredential +from oid4vci.public_routes import PopResult + + +@pytest.fixture +def body(): + yield { + "format": "mso_mdoc", + "doctype": "org.iso.18013.5.1.mDL", + "proof": { + "proof_type": "jwt", + "jwt": "eyJhbGciOiJFUzI1NiIsImp3ayI6eyJjcnYiOiJQLTI1NiIsImt0eSI6IkVDIiwieCI6IkJHVU5OTlpFSm9Cd05UU25NOW93WGVCdTBOTFJEVjR4d1llTm9kMVpxQUEiLCJ5IjoiZjlJTVhQS2xlU0FGb2tRdTc1Qlk3Nkl0QWpjVUxHWDlCeVZ0ZFVINEs0YyJ9LCJ0eXAiOiJvcGVuaWQ0dmNpLXByb29mK2p3dCJ9.eyJpYXQiOjE3MjA3MzMxMTAsImV4cCI6MTcyMDczNDkxMCwiYXVkIjoiaHR0cHM6Ly9vaWQ0dmNpLnN0Zy5uZ3Jvay5pbyIsIm5vbmNlIjoiWWM4em9odk9XREFTVzh6QnA5Y1MxZyIsImlzcyI6ImRpZDprZXk6NjZhVVVobzhQdjNVaU16ZHBxdUFGVHJWa01DeEpocUJTN3BVdjFqQzhleHdFZ2FndVRNUEppa3NlV2N1U0RqYUtlMzZKanM3cnlVWnZKQVp4UGZZVUVKIn0.1ozjqUDtYzBecSEln9dANpSNBXNxEkws2ZWWaYim5B07QmlELi0nvoh3ooUUeu4Q_7ru_FXjQCIM7xgAVCrbxw", + } + } + + +@pytest.fixture +def supported(): + yield SupportedCredential(format_data={"doctype": "org.iso.18013.5.1.mDL"}) + + +@pytest.fixture +def ex_record(): + yield OID4VCIExchangeRecord( + state=OID4VCIExchangeRecord.STATE_OFFER_CREATED, + verification_method="did:key:z6Mkn6z3Eg2mrgQmripNPGDybZYYojwZw1VPjRkCzbNV7JfN#0", + issuer_id="did:key:z6Mkn6z3Eg2mrgQmripNPGDybZYYojwZw1VPjRkCzbNV7JfN", + supported_cred_id="456", + credential_subject={"name": "alice"}, + nonce="789", + pin="000", + code="111", + token="222", + ) + + +@pytest.fixture +def pop(): + yield PopResult( + headers=None, + payload=None, + verified=True, + holder_kid="did:key:example-kid#0", + holder_jwk=None, + ) + + +@pytest.fixture +def context(): + """Test AdminRequestContext.""" + yield AdminRequestContext.test_context() + + +@pytest.fixture +def jwk(): + yield { + "kty": "OKP", + "crv": "ED25519", + "x": "cavH81X96jQL8vj3gbLQBkeE7p9cyVu8MJcC5N6lXOU=", + "d": "NsSTmfmS-D15umO64Ongi22HYcHBr7l1nl7OGurQReA", + } + + +@pytest.fixture +def did(): + yield { + "did": "did:key:z6Mkn6z3Eg2mrgQmripNPGDybZYYojwZw1VPjRkCzbNV7JfN", + "verkey": "8eizeRnLX8vJkDyfhhG8kTzYzAfiX8F33QqHAKQUC5sz", + "private_key": "NsSTmfmS-D15umO64Ongi22HYcHBr7l1nl7OGurQReA", + "public_key": "cavH81X96jQL8vj3gbLQBkeE7p9cyVu8MJcC5N6lXOU=", + } + + +@pytest.fixture +def headers(): + yield { + "doctype": "org.iso.18013.5.1.mDL", + "deviceKey": "12345678123456781234567812345678", + } + + +@pytest.fixture +def payload(): + yield { + "did": "did:key:z6Mkn6z3Eg2mrgQmripNPGDybZYYojwZw1VPjRkCzbNV7JfN", + "headers": {"deviceKey": "12345678123456781234567812345678"}, + "payload": { + "org.iso.18013.5.1": { + "expiry_date": "2029-03-31", + "issue_date": "2024-04-01", + "issuing_country": "CA", + "issuing_authority": "Ontario Ministry of Transportation", + "family_name": "Doe", + "given_name": "John", + "birth_date": "1990-03-31", + "document_number": "DJ123-45678-90123", + "un_distinguishing_sign": "CDN" + } + } + } + + +@pytest.fixture +def issuer_auth(): + """mso.encode()""" + yield "5904c7d28459012da301270458206196787ec61cf41d9f4cfa97dc4413907e8b8ff6c55694bc5ebd07c0d9b7950318215901023081ff3081b2a003020102021412978ff28a5d42d94382c1cfdcac025b9fc49e8d300506032b65703020310b300906035504061302434e3111300f06035504030c084c6f63616c204341301e170d3234303731373033343331335a170d3234303732373033343331335a3020310b300906035504061302434e3111300f06035504030c084c6f63616c204341302a300506032b657003210071abc7f355fdea340bf2f8f781b2d0064784ee9f5cc95bbc309702e4dea55ce5300506032b657003410080fe1045fc0ef68af9c3ddf53de8934826c78fb45f4c8d82e79b1f2673bb1e485ce2e7b482be6f398497ca56d2c2e192a8f8b39b05bb21fe7aa2d61cc5655506a118215901023081ff3081b2a003020102021412978ff28a5d42d94382c1cfdcac025b9fc49e8d300506032b65703020310b300906035504061302434e3111300f06035504030c084c6f63616c204341301e170d3234303731373033343331335a170d3234303732373033343331335a3020310b300906035504061302434e3111300f06035504030c084c6f63616c204341302a300506032b657003210071abc7f355fdea340bf2f8f781b2d0064784ee9f5cc95bbc309702e4dea55ce5300506032b657003410080fe1045fc0ef68af9c3ddf53de8934826c78fb45f4c8d82e79b1f2673bb1e485ce2e7b482be6f398497ca56d2c2e192a8f8b39b05bb21fe7aa2d61cc5655506590248a66776657273696f6e63312e306f646967657374416c676f726974686d667368613235366c76616c756544696765737473a1716f72672e69736f2e31383031332e352e31a90058200f80559d7f614f73cb8feb11d6fa6889c6cb3cce2e6116f2762e6bb18fe98686015820e7c276c74760d3004bb227627cf6bafb7d8260e8cdee7dd1e7417a1e5e4565a4025820d6701ca377cfd49b16c662abba87610e458e95163093d46004de3bc072976880035820c060377bc483de60cfc5a19ef0c61b5485127af944355d1eb64617972b9cf7c604582030d6f95910e800d2849992b0eba7de32998e2de1e91036fd3498c472a583c9a2055820155c35da62e635ab1b2ba78c7eea82c93436696643efe4ec86b9854711131602065820bdec6c1e2afea89273eaed5319379e89f04f816c647cdfe0dd50128fb69802a907582016e7d7f6d2c59d30851d8b9444456500790ddda6a2d9206c0081a5cad8087637085820cb52b000d1086b14f97f760f9c3ecc73c128db19579841f12a9b7c4e865ab7736d6465766963654b6579496e666fa1696465766963654b65797820313233343536373831323334353637383132333435363738313233343536373867646f6354797065756f72672e69736f2e31383031332e352e312e6d444c6c76616c6964697479496e666fa3667369676e656456c074323032342d30372d31375430333a34333a31335a6976616c696446726f6d56c074323032342d30372d31375430333a34333a31335a6a76616c6964556e74696c56c074323032352d30372d31375430333a34333a31335a58409de675d2fd0f64de7fd4ed6900344b3e04561324b616961b61e0caeb4d39d581226ae6131c87f6713af599f20183d777e1f260b56fb0f42212bd7f188e5c760c" + + +@pytest.fixture +def mso_mdoc(): + yield "a36776657273696f6e63312e3069646f63756d656e747381a267646f6354797065756f72672e69736f2e31383031332e352e312e6d444c6c6973737565725369676e6564a26a6e616d65537061636573a1716f72672e69736f2e31383031332e352e3189d818586ea4686469676573744944006672616e646f6d582061f2f331ac88ad719976a6cc9f0940f23851a601c001430511424ceee35afbc171656c656d656e744964656e74696669657276756e5f64697374696e6775697368696e675f7369676e6c656c656d656e7456616c75656343444ed8185866a4686469676573744944016672616e646f6d582099ce495059e7e0ae8a044774a8596247d5b33a02b9d35133e2dff8b49839d88e71656c656d656e744964656e7469666965726f69737375696e675f636f756e7472796c656c656d656e7456616c7565624341d818586ca4686469676573744944026672616e646f6d5820a43e5279c96bc9864f0ee21048d8d46ef5ad553be3c8d41ef95161f736f9cc3071656c656d656e744964656e7469666965726a69737375655f646174656c656c656d656e7456616c7565d903ec6a323032342d30342d3031d8185889a4686469676573744944036672616e646f6d5820bb9f9145a1aa4d4a7a984893908ccc6e3db77b9de80db82d55c96028bc24ffa671656c656d656e744964656e7469666965727169737375696e675f617574686f726974796c656c656d656e7456616c756578224f6e746172696f204d696e6973747279206f66205472616e73706f72746174696f6ed818586ca4686469676573744944046672616e646f6d5820f4e468dd304e1ca775d3ca2398983bbad56671bc54547b38d04b61bd9d0edc6271656c656d656e744964656e7469666965726a62697274685f646174656c656c656d656e7456616c7565d903ec6a313939302d30332d3331d8185875a4686469676573744944056672616e646f6d58200b7412d206bc6e92e10bdf5f9c1b93a52d5d42c5052423bccaa595bea8e46e1a71656c656d656e744964656e7469666965726f646f63756d656e745f6e756d6265726c656c656d656e7456616c756571444a3132332d34353637382d3930313233d8185863a4686469676573744944066672616e646f6d5820c5901315a7a97b9af60e78965ce0fd0e3465e7dbb5d1f60b5ddb7f4bd1b783c871656c656d656e744964656e7469666965726b66616d696c795f6e616d656c656c656d656e7456616c756563446f65d818586da4686469676573744944076672616e646f6d582060c7538805bfee9fbdb4ece8cb1e83dbdb17b99ca6fdc51dc3806ae791e6dbb171656c656d656e744964656e7469666965726b6578706972795f646174656c656c656d656e7456616c7565d903ec6a323032392d30332d3331d8185863a4686469676573744944086672616e646f6d5820db795a0aefad87042012dbc8adb7cad0c734cf66049570666f0b42555364cb5e71656c656d656e744964656e7469666965726a676976656e5f6e616d656c656c656d656e7456616c7565644a6f686e6a697373756572417574688459012da3012704582078872c0f24908935938c69960b05bab2766904db2ac26ed9928a08d232662ab818215901023081ff3081b2a00302010202147a498062fa06687807d711a26af37ef36811d5a9300506032b65703020310b300906035504061302434e3111300f06035504030c084c6f63616c204341301e170d3234303731373031323334315a170d3234303732373031323334315a3020310b300906035504061302434e3111300f06035504030c084c6f63616c204341302a300506032b657003210071abc7f355fdea340bf2f8f781b2d0064784ee9f5cc95bbc309702e4dea55ce5300506032b65700341001538625bdd0f1ded7b80ce7aed09ec00ec666283811b58c1034f735bd6d92d68b218ad91065ce36af8eacbd8ec9cd185c0ae77620af777b27b784af0af399d0ea118215901023081ff3081b2a00302010202147a498062fa06687807d711a26af37ef36811d5a9300506032b65703020310b300906035504061302434e3111300f06035504030c084c6f63616c204341301e170d3234303731373031323334315a170d3234303732373031323334315a3020310b300906035504061302434e3111300f06035504030c084c6f63616c204341302a300506032b657003210071abc7f355fdea340bf2f8f781b2d0064784ee9f5cc95bbc309702e4dea55ce5300506032b65700341001538625bdd0f1ded7b80ce7aed09ec00ec666283811b58c1034f735bd6d92d68b218ad91065ce36af8eacbd8ec9cd185c0ae77620af777b27b784af0af399d0e59024dd818590248a66776657273696f6e63312e306f646967657374416c676f726974686d667368613235366c76616c756544696765737473a1716f72672e69736f2e31383031332e352e31a90058208cd10d0dccfa82ae19f69d9fae862bd96fe9ada4408eca7a9b0bac23aa76c35801582052095b1cc5a77eb8c1a424c9b0800b3ca928eb4199cf2d27237076aaa3c410d402582071fbf717bf874cef36cdee8c50edb686e8f9eca3618634298f1dbc99cd590094035820b493bef6da0728d971243012ab9bd8514f910c5787dd899c2eadecda7c846d3704582032aeab097b60bf5698dd31e44349a9af03c968cc28b6f9ce35812846224b2c780558203c3bde3dd6499fad865079e968fabc547666014eaa5301bbdf194774017cb0380658202a493ea48cb7b6112a75e0f97988da30e161469071f2e2537b96931352201c230758203c747db61a07c1049738ddf8d4d493920c9ff712a7cb87b6f60f9ef3734b6c100858205feb7eae0e91f2959a633de186a933beac7efdf4effb2fe02aa27724e04c15686d6465766963654b6579496e666fa1696465766963654b65797820313233343536373831323334353637383132333435363738313233343536373867646f6354797065756f72672e69736f2e31383031332e352e312e6d444c6c76616c6964697479496e666fa3667369676e656456c074323032342d30372d31375430313a32333a34315a6976616c696446726f6d56c074323032342d30372d31375430313a32333a34315a6a76616c6964556e74696c56c074323032352d30372d31375430313a32333a34315a5840de150a918590a131a9188e0a2cb49d0a7eaae28447c322441512cd7cb77a77ede5d58f21a99c7fe7199b965b7a8b94e46960d898e0a880dd492a0786fad032036673746174757300" diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/__init__.py b/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/__init__.py new file mode 100644 index 000000000..1661884d9 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/__init__.py @@ -0,0 +1 @@ +"""MDOC test cases.""" \ No newline at end of file diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/test_issuer.py b/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/test_issuer.py new file mode 100644 index 000000000..7b381ca49 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/test_issuer.py @@ -0,0 +1,12 @@ +import pytest + +from ...mdoc import mdoc_sign + + +@pytest.mark.asyncio +def test_mdoc_sign(jwk, headers, payload): + """Test mdoc_sign() method.""" + + mso_mdoc = mdoc_sign(jwk, headers, payload) + + assert mso_mdoc diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/test_verifier.py b/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/test_verifier.py new file mode 100644 index 000000000..405bba0ff --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/mdoc/test_verifier.py @@ -0,0 +1,12 @@ +import pytest + +from ...mdoc import mdoc_verify, MdocVerifyResult + + +@pytest.mark.asyncio +def test_mdoc_verify(mso_mdoc): + """Test mdoc_sign() method.""" + + result: MdocVerifyResult = mdoc_verify(mso_mdoc) + + assert result diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/mso/__init__.py b/mso_mdoc/mso_mdoc/v1_0/tests/mso/__init__.py new file mode 100644 index 000000000..ad6888016 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/mso/__init__.py @@ -0,0 +1 @@ +"""MSO test cases.""" \ No newline at end of file diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/mso/test_issuer.py b/mso_mdoc/mso_mdoc/v1_0/tests/mso/test_issuer.py new file mode 100644 index 000000000..7745d3004 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/mso/test_issuer.py @@ -0,0 +1,32 @@ +import os +from pycose.keys import CoseKey +from binascii import hexlify + +from aries_cloudagent.wallet.util import b64_to_bytes + +from ...mso import MsoIssuer +from ...x509 import selfsigned_x509cert + +MDOC_TYPE = "org.iso.18013.5.1.mDL" + + +def test_mso_sign(jwk, headers, payload): + """Test mso_sign() method.""" + + pk_dict = { + "KTY": jwk.get("kty") or "", # OKP, EC + "CURVE": jwk.get("crv") or "", # ED25519, P_256 + "ALG": "EdDSA" if jwk.get("kty") == "OKP" else "ES256", + "D": b64_to_bytes(jwk.get("d") or "", True), # EdDSA + "X": b64_to_bytes(jwk.get("x") or "", True), # EdDSA, EcDSA + "Y": b64_to_bytes(jwk.get("y") or "", True), # EcDSA + "KID": os.urandom(32), + } + cose_key = CoseKey.from_dict(pk_dict) + x509_cert = selfsigned_x509cert(private_key=cose_key) + + msoi = MsoIssuer(data=payload, private_key=cose_key, x509_cert=x509_cert) + mso = msoi.sign(device_key=(headers.get("deviceKey") or ""), doctype=MDOC_TYPE) + mso_signature = hexlify(mso.encode()) + + assert mso_signature diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/mso/test_verifier.py b/mso_mdoc/mso_mdoc/v1_0/tests/mso/test_verifier.py new file mode 100644 index 000000000..68f3e744a --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/mso/test_verifier.py @@ -0,0 +1,17 @@ +import pytest +import cbor2 +from binascii import unhexlify + +from ...mso import MsoVerifier + + +@pytest.mark.asyncio +async def test_mso_verify(issuer_auth): + """Test verify_signature() method.""" + + issuer_auth_bytes = unhexlify(issuer_auth) + issuer_auth_obj = cbor2.loads(issuer_auth_bytes) + mso_verifier = MsoVerifier(issuer_auth_obj) + valid = mso_verifier.verify_signature() + + assert valid diff --git a/mso_mdoc/mso_mdoc/v1_0/x509.py b/mso_mdoc/mso_mdoc/v1_0/x509.py new file mode 100644 index 000000000..f194b3962 --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/x509.py @@ -0,0 +1,30 @@ +"""X.509 certificate utilities.""" +from datetime import datetime, timezone, timedelta +from cryptography import x509 +from cryptography.x509.oid import NameOID +from cryptography.hazmat.primitives import hashes, serialization +from cwt import COSEKey +from pycose.keys import CoseKey +from pycose.keys.keytype import KtyOKP + +def selfsigned_x509cert(private_key: CoseKey): + """Generate a self-signed X.509 certificate from a COSE key.""" + ckey = COSEKey.from_bytes(private_key.encode()) + subject = issuer = x509.Name( + [ + x509.NameAttribute(NameOID.COUNTRY_NAME, "CN"), + x509.NameAttribute(NameOID.COMMON_NAME, "Local CA"), + ] + ) + utcnow = datetime.now(timezone.utc) + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(ckey.key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(utcnow) + .not_valid_after(utcnow + timedelta(days=10)) + .sign(ckey.key, None if private_key.kty == KtyOKP else hashes.SHA256()) + ) + return cert.public_bytes(getattr(serialization.Encoding, "DER")) diff --git a/mso_mdoc/pyproject.toml b/mso_mdoc/pyproject.toml new file mode 100644 index 000000000..a03a1bfb5 --- /dev/null +++ b/mso_mdoc/pyproject.toml @@ -0,0 +1,93 @@ +[tool.poetry] +name = "mso_mdoc" +version = "0.1.0" +description = "" +authors = [] + +[tool.poetry.dependencies] +python = "^3.9" +cbor2 = "~5" +cbor-diag = "*" +cwt = "~2" +pycose = "~1" + +# Define ACA-Py as an optional/extra dependancy so it can be +# explicitly installed with the plugin if desired. +aries-cloudagent = { version = ">=0.10.3, < 1.0.0", optional = true } +oid4vci = {path = "../oid4vci", optional = true, develop = true} + +[tool.poetry.extras] +aca-py = ["aries-cloudagent"] +oid4vci = ["oid4vci"] + +[tool.poetry.dev-dependencies] +ruff = "^0.5.0" +black = "~24.4.2" +pytest = "^8.2.0" +pytest-asyncio = "~0.23.7" +pytest-cov = "^5.0.0" +pytest-ruff = "^0.3.2" +asynctest = "0.13.0" +setuptools = "^70.3.0" + +[tool.poetry.group.integration.dependencies] +aries-askar = { version = "~0.3.0" } +indy-credx = { version = "~1.1.1" } +indy-vdr = { version = "~0.4.1" } +ursa-bbs-signatures = { version = "~1.0.1" } +python3-indy = { version = "^1.11.1" } +anoncreds = { version = "0.2.0" } + +[tool.ruff] +line-length = 90 + +[tool.ruff.lint] +select = ["E", "F", "C", "D"] +ignore = [ + # Google Python Doc Style + "D203", "D204", "D213", "D215", "D400", "D401", "D404", "D406", "D407", + "D408", "D409", "D413", + "D202", # Allow blank line after docstring + "D104", # Don't require docstring in public package + # Things that we should fix, but are too much work right now + "D417", "C901", +] + +[tool.ruff.per-file-ignores] +"**/{tests}/*" = ["F841", "D", "E501"] + +[tool.pytest.ini_options] +testpaths = "mso_mdoc" +addopts = """ + -p no:warnings + --quiet --junitxml=./.test-reports/junit.xml + --cov-config .coveragerc --cov=mso_mdoc --cov-report term --cov-report xml +""" +markers = [] +junit_family = "xunit1" +asyncio_mode = "auto" + +[tool.coverage.run] +omit = [ + "*/tests/*", + "docker/*", + "integration/*", + "*/definition.py" +] +data_file = ".test-reports/.coverage" + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "@abstract" +] +precision = 2 +skip_covered = true +show_missing = true + +[tool.coverage.xml] +output = ".test-reports/coverage.xml" + +[build-system] +requires = ["setuptools", "poetry-core>=1.2"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/oid4vci/.DS_Store b/oid4vci/.DS_Store deleted file mode 100644 index d7e56fa676ba7cebaed7ab71fe65ebc26cc14e0c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHK%}T>S5T0$LO({YT3LY1{7HqMo;3d@h0!H+pQX3OAm}X0V52cWszL2lt^Ek7+ zEw<6CNS%S%Z+CVkVZMZ&9RLumao7N;0)UH3SSaK0jnF=6P72oIAPRa$Fdg=TL8Km0(Le{pVT3MsH*p%OX-kdMaHwN_v*DKAa;LgCn>B0lcwUpU zlloCjHtLPj`Mm7z9UPurc3zTRs$LC)0)I9wI~Eu4hLdG76Mv82Z>7PS`ih6`#kM zBny`A!IOxcf$TS#pCt3a=7%xHgIU~T%wvoRXowt@3PE$FYr_R2ax+H8Moh6d%BSOs zf&QWi*Dlxtn=lP4|F-@p)e-nUqH&r`vu^j3Z`A6WTMf|=EpZn<={(Ga*=!mNCpXx; z)H;nyeHdLOqkQ1)oN1K}6E&Kcgd`q8$nABaVx0$irsAo|4Xr~oMRVY_=kvpkJX&<* z{J4A2k-cv3WU*+9_TK*K#o#%8QToj?$$@ku+YU>32W6$G*KndztscQw=9Te;!~iis z3=jir#eh2oL~E_8recW!V&EqX;QkbTm zFH`x*UrwPOF+dFbGX{9QANB(%%AKt%<>6T?q1{76!MFky5YU${0novHWY;*YU!o3i YzQe*Ij)HcR4oDXPLkN9{fnQ+Y3y-Z#YybcN diff --git a/oid4vci/demo/docker-compose.yaml b/oid4vci/demo/docker-compose.yaml index cc2d835e2..065881d7b 100644 --- a/oid4vci/demo/docker-compose.yaml +++ b/oid4vci/demo/docker-compose.yaml @@ -20,6 +20,7 @@ services: TUNNEL_ENDPOINT: http://ngrok:4040 OID4VCI_HOST: 0.0.0.0 OID4VCI_PORT: 8081 + OID4VCI_CRED_HANDLER: '{"jwt_vc_json": "jwt_vc_json.v1_0"}' entrypoint: > /bin/sh -c '/entrypoint.sh aca-py "$$@"' -- command: > diff --git a/oid4vci/docker/Dockerfile b/oid4vci/docker/Dockerfile index ae305b437..fa9a6dffa 100644 --- a/oid4vci/docker/Dockerfile +++ b/oid4vci/docker/Dockerfile @@ -1,9 +1,13 @@ FROM python:3.9-slim-bullseye AS base -WORKDIR /usr/src/app - -# Install and configure poetry USER root +# Install jwt_vc_json plugin +WORKDIR /usr/src +RUN mkdir jwt_vc_json +COPY jwt_vc_json jwt_vc_json + +# Install and configure poetry +WORKDIR /usr/src/app ENV POETRY_VERSION=1.7.1 ENV POETRY_HOME=/opt/poetry RUN apt-get update && apt-get install -y curl jq && apt-get clean @@ -14,8 +18,8 @@ RUN poetry config virtualenvs.in-project true # Setup project RUN mkdir oid4vci && touch oid4vci/__init__.py -COPY pyproject.toml poetry.lock README.md ./ -RUN poetry install --without dev --extras "aca-py" +COPY oid4vci/pyproject.toml oid4vci/poetry.lock oid4vci/README.md ./ +RUN poetry install --without dev --all-extras USER $user FROM python:3.9-bullseye @@ -24,8 +28,8 @@ WORKDIR /usr/src/app COPY --from=base /usr/src/app/.venv /usr/src/app/.venv ENV PATH="/usr/src/app/.venv/bin:$PATH" RUN apt-get update && apt-get install -y curl jq && apt-get clean -COPY oid4vci/ oid4vci/ -COPY docker/*.yml ./ +COPY oid4vci/oid4vci/ oid4vci/ +COPY oid4vci/docker/*.yml ./ ENTRYPOINT ["/bin/bash", "-c", "aca-py \"$@\"", "--"] CMD ["start", "--arg-file", "default.yml"] diff --git a/oid4vci/integration/Dockerfile b/oid4vci/integration/Dockerfile index 13dab987c..8712a7a8c 100644 --- a/oid4vci/integration/Dockerfile +++ b/oid4vci/integration/Dockerfile @@ -17,5 +17,6 @@ RUN poetry install --only main COPY sphereon_wrapper/* sphereon_wrapper/ COPY credo_wrapper/* credo_wrapper/ COPY tests/* tests/ +COPY oid4vci_client/* oid4vci_client/ ENTRYPOINT ["poetry", "run", "pytest"] diff --git a/oid4vci/integration/docker-compose.yml b/oid4vci/integration/docker-compose.yml index 675b61c69..a419c81b6 100644 --- a/oid4vci/integration/docker-compose.yml +++ b/oid4vci/integration/docker-compose.yml @@ -3,8 +3,8 @@ services: issuer: image: oid4vci build: - dockerfile: docker/Dockerfile - context: .. + dockerfile: oid4vci/docker/Dockerfile + context: ../.. ports: - "3000:3000" - "3001:3001" @@ -17,6 +17,7 @@ services: OID4VCI_HOST: 0.0.0.0 OID4VCI_PORT: 8081 OID4VCI_ENDPOINT: "http://issuer:8081" + OID4VCI_CRED_HANDLER: '{"jwt_vc_json": "jwt_vc_json.v1_0"}' command: > start --inbound-transport http 0.0.0.0 3000 @@ -33,6 +34,7 @@ services: --log-level debug --debug-webhooks --plugin oid4vci + --plugin jwt_vs_json healthcheck: test: curl -s -o /dev/null -w '%{http_code}' "http://localhost:3001/status/live" | grep "200" > /dev/null start_period: 30s @@ -52,3 +54,4 @@ services: depends_on: issuer: condition: service_healthy + \ No newline at end of file diff --git a/oid4vci/integration/poetry.lock b/oid4vci/integration/poetry.lock index 35d62a0dc..3a3f1e6a8 100644 --- a/oid4vci/integration/poetry.lock +++ b/oid4vci/integration/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "acapy-controller" diff --git a/oid4vci/oid4vci/.DS_Store b/oid4vci/oid4vci/.DS_Store deleted file mode 100644 index 5008ddfcf53c02e82d7eee2e57c38e5672ef89f6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0 "Config": @@ -34,6 +36,7 @@ def from_settings(cls, settings: BaseSettings) -> "Config": host = plugin_settings.get("host") or getenv("OID4VCI_HOST") port = int(plugin_settings.get("port") or getenv("OID4VCI_PORT", "0")) endpoint = plugin_settings.get("endpoint") or getenv("OID4VCI_ENDPOINT") + cred_handler = plugin_settings.get("cred_handler") or getenv("OID4VCI_CRED_HANDLER") if not host: raise ConfigError("host", "OID4VCI_HOST") @@ -41,5 +44,9 @@ def from_settings(cls, settings: BaseSettings) -> "Config": raise ConfigError("port", "OID4VCI_PORT") if not endpoint: raise ConfigError("endpoint", "OID4VCI_ENDPOINT") + if not cred_handler: + raise ConfigError("cred_handler", "OID4VCI_CRED_HANDLER") - return cls(host, port, endpoint) + cred_handler = json.loads(cred_handler) + + return cls(host, port, endpoint, cred_handler) diff --git a/oid4vci/oid4vci/models/exchange.py b/oid4vci/oid4vci/models/exchange.py index ab0620824..6bb69896b 100644 --- a/oid4vci/oid4vci/models/exchange.py +++ b/oid4vci/oid4vci/models/exchange.py @@ -20,7 +20,7 @@ class Meta: schema_class = "OID4VCIExchangeRecordSchema" RECORD_TYPE = "oid4vci" - EVENT_NAMESPACE = "oid4vci" + # EVENT_NAMESPACE = "oid4vci" RECORD_TOPIC = "oid4vci" RECORD_ID_NAME = "exchange_id" STATE_CREATED = "created" diff --git a/oid4vci/oid4vci/models/supported_cred.py b/oid4vci/oid4vci/models/supported_cred.py index 2bf1d30da..5ef3891e4 100644 --- a/oid4vci/oid4vci/models/supported_cred.py +++ b/oid4vci/oid4vci/models/supported_cred.py @@ -14,7 +14,8 @@ class Meta: schema_class = "SupportedCredentialSchema" - EVENT_NAMESPACE = "oid4vci" + #EVENT_NAMESPACE = "oid4vci" + RECORD_TOPIC = "oid4vci" RECORD_ID_NAME = "supported_cred_id" RECORD_TYPE = "supported_cred" TAG_NAMES = {"identifier", "format"} diff --git a/oid4vci/oid4vci/public_routes.py b/oid4vci/oid4vci/public_routes.py index 891a69a27..04a5cca12 100644 --- a/oid4vci/oid4vci/public_routes.py +++ b/oid4vci/oid4vci/public_routes.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from secrets import token_urlsafe from typing import Any, Dict, List, Mapping, Optional -import uuid +from abc import ABC, abstractmethod import jwt from aiohttp import web @@ -17,6 +17,7 @@ from aries_cloudagent.messaging.models.openapi import OpenAPISchema from aries_cloudagent.resolver.did_resolver import DIDResolver from aries_cloudagent.storage.error import StorageError, StorageNotFoundError +from aries_cloudagent.utils.classloader import ClassLoader, ModuleLoadError from aries_cloudagent.wallet.base import WalletError from aries_cloudagent.wallet.error import WalletNotFoundError from aries_cloudagent.wallet.jwt import ( @@ -297,6 +298,31 @@ class IssueCredentialRequestSchema(OpenAPISchema): proof = fields.Dict(metadata={"description": ""}) +class ICredProcessor(ABC): + """Returns singed credential payload.""" + + @abstractmethod + def issue_cred( + self, + body: any, + supported: SupportedCredential, + ex_record: OID4VCIExchangeRecord, + pop: PopResult, + context: AdminRequestContext, + ): + """Method signature. + + Args: + body: any + supported: SupportedCredential + ex_record: OID4VCIExchangeRecord + pop: PopResult + context: AdminRequestContext + Returns: + encoded: signed credential payload. + """ + + @docs(tags=["oid4vci"], summary="Issue a credential") @request_schema(IssueCredentialRequestSchema()) async def issue_cred(request: web.Request): @@ -317,31 +343,28 @@ async def issue_cred(request: web.Request): supported = await SupportedCredential.retrieve_by_id( session, ex_record.supported_cred_id ) + config = Config.from_settings(context.settings) + handler_name = config.cred_handler[supported.format] except (StorageError, BaseModelError, StorageNotFoundError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err - if supported.format != "jwt_vc_json": - raise web.HTTPUnprocessableEntity(reason="Only jwt_vc_json is supported.") - if supported.format_data is None: - LOGGER.error("No format_data for supported credential of format jwt_vc_json") - raise web.HTTPInternalServerError() + if ex_record.nonce is None: + raise web.HTTPBadRequest( + reason="Invalid exchange; no offer created for this request" + ) if supported.format != body.get("format"): raise web.HTTPBadRequest(reason="Requested format does not match offer.") - if not types_are_subset(body.get("types"), supported.format_data.get("types")): - raise web.HTTPBadRequest(reason="Requested types does not match offer.") - current_time = datetime.datetime.now(datetime.timezone.utc) - current_time_unix_timestamp = int(current_time.timestamp()) - formatted_time = current_time.strftime("%Y-%m-%dT%H:%M:%SZ") + if handler_name is None: + raise web.HTTPUnprocessableEntity(reason=f"{supported.format} is supported.") + + if supported.format_data is None: + LOGGER.error(f"No format_data for supported credential {supported.format}.") + raise web.HTTPInternalServerError() - cred_id = f"urn:uuid:{str(uuid.uuid4())}" if "proof" not in body: - raise web.HTTPBadRequest(reason="proof is required for jwt_vc_json") - if ex_record.nonce is None: - raise web.HTTPBadRequest( - reason="Invalid exchange; no offer created for this request" - ) + raise web.HTTPBadRequest(reason=f"proof is required for {supported.format}") pop = await handle_proof_of_posession( context.profile, body["proof"], ex_record.nonce @@ -349,29 +372,16 @@ async def issue_cred(request: web.Request): if not pop.verified: raise web.HTTPBadRequest(reason="Invalid proof") - if not pop.holder_kid: - raise web.HTTPBadRequest(reason="No kid in proof; required for jwt_vc_json") - - # note: Some wallets require that the "jti" and "id" are a uri - payload = { - "vc": { - **(supported.vc_additional_data or {}), - "id": cred_id, - "issuer": ex_record.issuer_id, - "issuanceDate": formatted_time, - "credentialSubject": { - **(ex_record.credential_subject or {}), - "id": pop.holder_kid, - }, - }, - "iss": ex_record.issuer_id, - "nbf": current_time_unix_timestamp, - "jti": cred_id, - "sub": pop.holder_kid, - } - - jws = await jwt_sign( - context.profile, {}, payload, verification_method=ex_record.verification_method + try: + handler = ClassLoader.load_module(handler_name) + LOGGER.debug(f"Loaded module: {handler_name}") + except ModuleLoadError as e: + LOGGER.error(f"Error loading handler module: {e}") + raise web.HTTPInternalServerError( + reason=f"No handler to process {supported.format} credential." + ) + credential = await handler.cred_processor.issue_cred( + body, supported, ex_record, pop, context ) async with context.session() as session: @@ -384,8 +394,8 @@ async def issue_cred(request: web.Request): return web.json_response( { - "format": "jwt_vc_json", - "credential": jws, + "format": supported.format, + "credential": credential, } ) diff --git a/oid4vci/oid4vci/routes.py b/oid4vci/oid4vci/routes.py index 8438bc06b..7f36a815b 100644 --- a/oid4vci/oid4vci/routes.py +++ b/oid4vci/oid4vci/routes.py @@ -131,13 +131,11 @@ class ExchangeRecordCreateRequestSchema(OpenAPISchema): "description": "Identifier used to identify credential supported record", }, ) - credential_subject = ( - fields.Dict( - required=True, - metadata={ - "description": "desired claim and value in credential", - }, - ), + credential_subject = fields.Dict( + required=True, + metadata={ + "description": "desired claim and value in credential", + }, ) pin = fields.Str( required=False, diff --git a/oid4vci/oid4vci/tests/routes/conftest.py b/oid4vci/oid4vci/tests/routes/conftest.py index 890e844d1..45a1657a8 100644 --- a/oid4vci/oid4vci/tests/routes/conftest.py +++ b/oid4vci/oid4vci/tests/routes/conftest.py @@ -19,6 +19,7 @@ def context(): "endpoint": "http://localhost:8020", "host": "0.0.0.0", "port": 8020, + "cred_handler": '{"jwt_vc_json": "jwt_vc_json.v1_0"}', } } } diff --git a/oid4vci/poetry.lock b/oid4vci/poetry.lock index 6acd7c51a..300894b12 100644 --- a/oid4vci/poetry.lock +++ b/oid4vci/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -341,13 +341,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.3" +version = "5.4.0" description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, ] [[package]] @@ -566,63 +566,63 @@ yaml = ["PyYAML"] [[package]] name = "coverage" -version = "7.5.4" +version = "7.6.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, - {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, - {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, - {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, - {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, - {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, - {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, - {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, - {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, - {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, - {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, - {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, - {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, - {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, ] [package.dependencies] @@ -881,13 +881,13 @@ test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-typing" -version = "4.3.1" +version = "4.4.0" description = "eth-typing: Common type annotations for ethereum python packages" optional = true python-versions = "<4,>=3.8" files = [ - {file = "eth_typing-4.3.1-py3-none-any.whl", hash = "sha256:b4d7cee912c7779da75da4b42fa61475c1089d35a4df5081a786eaa29d5f6865"}, - {file = "eth_typing-4.3.1.tar.gz", hash = "sha256:4504559c87a9f71f4b99aa5a1e0549adaa7f192cbf8e37a295acfcddb1b5412d"}, + {file = "eth_typing-4.4.0-py3-none-any.whl", hash = "sha256:a5e30a6e69edda7b1d1e96e9d71bab48b9bb988a77909d8d1666242c5562f841"}, + {file = "eth_typing-4.4.0.tar.gz", hash = "sha256:93848083ac6bb4c20cc209ea9153a08b0a528be23337c889f89e1e5ffbe9807d"}, ] [package.dependencies] @@ -922,13 +922,13 @@ test = ["hypothesis (>=4.43.0)", "mypy (==1.5.1)", "pytest (>=7.0.0)", "pytest-x [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -1184,6 +1184,23 @@ files = [ cryptography = ">=3.4" typing-extensions = ">=4.5.0" +[[package]] +name = "jwt-vc-json" +version = "0.1.0" +description = "jwt_vc_json credential handler plugin" +optional = true +python-versions = "^3.9" +files = [] +develop = false + +[package.extras] +aca-py = ["aries-cloudagent (>=0.10.3,<1.0.0)"] +oid4vci = ["oid4vci @ file:///Users/weiiv/Workspace/di/vc/aries-acapy-plugins/oid4vci"] + +[package.source] +type = "directory" +url = "../jwt_vc_json" + [[package]] name = "lxml" version = "5.2.2" @@ -2580,8 +2597,9 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [extras] aca-py = ["aries-cloudagent"] +plugins = ["jwt-vc-json"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "59efd45324f2b297b6b8384a1fd7b572d6c2d7bc78038a717344c456b6123e5c" +content-hash = "a8c5a43ca5956d15aa57c8adbe6deb3d36244315017edda182fda46e3e3528be" diff --git a/oid4vci/pyproject.toml b/oid4vci/pyproject.toml index 1599fd93c..6207784ca 100644 --- a/oid4vci/pyproject.toml +++ b/oid4vci/pyproject.toml @@ -15,9 +15,12 @@ aiohttp = "^3.9.5" aries-askar = "~0.3.0" aiohttp-cors = "^0.7.0" marshmallow = "^3.20.1" +jwt-vc-json = {path = "../jwt_vc_json", optional = true} [tool.poetry.extras] aca-py = ["aries-cloudagent"] +# Credential format handler plugins +plugins = ["jwt-vc-json"] [tool.poetry.dev-dependencies] ruff = "^0.5.0" From 138a1f3a19513f831fea81777334781e035c2d68 Mon Sep 17 00:00:00 2001 From: Ivan Wei Date: Thu, 18 Jul 2024 10:43:02 -0400 Subject: [PATCH 08/11] github checks Signed-off-by: Ivan Wei --- jwt_vc_json/integration/pyproject.toml | 2 +- jwt_vc_json/pyproject.toml | 2 +- mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py | 12 ++++++++++++ mso_mdoc/mso_mdoc/v1_0/mso/__init__.py | 3 +++ mso_mdoc/mso_mdoc/v1_0/mso/verifier.py | 1 + mso_mdoc/mso_mdoc/v1_0/tests/conftest.py | 7 +++---- mso_mdoc/pyproject.toml | 2 +- 7 files changed, 22 insertions(+), 7 deletions(-) diff --git a/jwt_vc_json/integration/pyproject.toml b/jwt_vc_json/integration/pyproject.toml index 4ff3f74eb..d857f529d 100644 --- a/jwt_vc_json/integration/pyproject.toml +++ b/jwt_vc_json/integration/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "plugin-globals-tests" +name = "jwt-vc-json" version = "0.1.0" description = "" authors = [] diff --git a/jwt_vc_json/pyproject.toml b/jwt_vc_json/pyproject.toml index 1230d9389..e793824af 100644 --- a/jwt_vc_json/pyproject.toml +++ b/jwt_vc_json/pyproject.toml @@ -48,7 +48,7 @@ ignore = [ "D417", "C901", ] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "**/{tests}/*" = ["F841", "D", "E501"] [tool.pytest.ini_options] diff --git a/mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py b/mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py index 27387538b..9ad48b8cf 100644 --- a/mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py +++ b/mso_mdoc/mso_mdoc/v1_0/mdoc/__init__.py @@ -4,3 +4,15 @@ from .verifier import mso_mdoc_verify, mdoc_verify, MdocVerifyResult from .exceptions import MissingPrivateKey, MissingIssuerAuth from .exceptions import NoDocumentTypeProvided, NoSignedDocumentProvided + +__all__ = [ + mso_mdoc_sign, + mdoc_sign, + mso_mdoc_verify, + mdoc_verify, + MdocVerifyResult, + MissingPrivateKey, + MissingIssuerAuth, + NoDocumentTypeProvided, + NoSignedDocumentProvided, +] diff --git a/mso_mdoc/mso_mdoc/v1_0/mso/__init__.py b/mso_mdoc/mso_mdoc/v1_0/mso/__init__.py index 0b356db01..c19f909dd 100644 --- a/mso_mdoc/mso_mdoc/v1_0/mso/__init__.py +++ b/mso_mdoc/mso_mdoc/v1_0/mso/__init__.py @@ -1,3 +1,6 @@ """MSO module.""" + from .issuer import MsoIssuer from .verifier import MsoVerifier + +__all__ = [MsoIssuer, MsoVerifier] diff --git a/mso_mdoc/mso_mdoc/v1_0/mso/verifier.py b/mso_mdoc/mso_mdoc/v1_0/mso/verifier.py index 49106930e..5e9b8161e 100644 --- a/mso_mdoc/mso_mdoc/v1_0/mso/verifier.py +++ b/mso_mdoc/mso_mdoc/v1_0/mso/verifier.py @@ -33,6 +33,7 @@ def raw_public_keys(self) -> bytes: return list(self.object.uhdr.values()) def attest_public_key(self) -> None: + """Asstest public key.""" LOGGER.warning( "TODO: in next releases. " "The certificate is to be considered as untrusted, this release " diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/conftest.py b/mso_mdoc/mso_mdoc/v1_0/tests/conftest.py index 6d4a5f2f2..7f634b4aa 100644 --- a/mso_mdoc/mso_mdoc/v1_0/tests/conftest.py +++ b/mso_mdoc/mso_mdoc/v1_0/tests/conftest.py @@ -1,5 +1,4 @@ import pytest -from unittest.mock import MagicMock from aries_cloudagent.admin.request_context import AdminRequestContext @@ -16,7 +15,7 @@ def body(): "proof": { "proof_type": "jwt", "jwt": "eyJhbGciOiJFUzI1NiIsImp3ayI6eyJjcnYiOiJQLTI1NiIsImt0eSI6IkVDIiwieCI6IkJHVU5OTlpFSm9Cd05UU25NOW93WGVCdTBOTFJEVjR4d1llTm9kMVpxQUEiLCJ5IjoiZjlJTVhQS2xlU0FGb2tRdTc1Qlk3Nkl0QWpjVUxHWDlCeVZ0ZFVINEs0YyJ9LCJ0eXAiOiJvcGVuaWQ0dmNpLXByb29mK2p3dCJ9.eyJpYXQiOjE3MjA3MzMxMTAsImV4cCI6MTcyMDczNDkxMCwiYXVkIjoiaHR0cHM6Ly9vaWQ0dmNpLnN0Zy5uZ3Jvay5pbyIsIm5vbmNlIjoiWWM4em9odk9XREFTVzh6QnA5Y1MxZyIsImlzcyI6ImRpZDprZXk6NjZhVVVobzhQdjNVaU16ZHBxdUFGVHJWa01DeEpocUJTN3BVdjFqQzhleHdFZ2FndVRNUEppa3NlV2N1U0RqYUtlMzZKanM3cnlVWnZKQVp4UGZZVUVKIn0.1ozjqUDtYzBecSEln9dANpSNBXNxEkws2ZWWaYim5B07QmlELi0nvoh3ooUUeu4Q_7ru_FXjQCIM7xgAVCrbxw", - } + }, } @@ -100,9 +99,9 @@ def payload(): "given_name": "John", "birth_date": "1990-03-31", "document_number": "DJ123-45678-90123", - "un_distinguishing_sign": "CDN" + "un_distinguishing_sign": "CDN", } - } + }, } diff --git a/mso_mdoc/pyproject.toml b/mso_mdoc/pyproject.toml index a03a1bfb5..fdd91ed72 100644 --- a/mso_mdoc/pyproject.toml +++ b/mso_mdoc/pyproject.toml @@ -53,7 +53,7 @@ ignore = [ "D417", "C901", ] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "**/{tests}/*" = ["F841", "D", "E501"] [tool.pytest.ini_options] From 29c79362b57261f2f9d94c25070e11fe1097d025 Mon Sep 17 00:00:00 2001 From: Ivan Wei Date: Thu, 18 Jul 2024 11:26:17 -0400 Subject: [PATCH 09/11] ruff linting fixes Signed-off-by: Ivan Wei --- oid4vci/oid4vci/config.py | 4 +++- oid4vci/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/oid4vci/oid4vci/config.py b/oid4vci/oid4vci/config.py index 0eb596b51..b79e9b2c9 100644 --- a/oid4vci/oid4vci/config.py +++ b/oid4vci/oid4vci/config.py @@ -36,7 +36,9 @@ def from_settings(cls, settings: BaseSettings) -> "Config": host = plugin_settings.get("host") or getenv("OID4VCI_HOST") port = int(plugin_settings.get("port") or getenv("OID4VCI_PORT", "0")) endpoint = plugin_settings.get("endpoint") or getenv("OID4VCI_ENDPOINT") - cred_handler = plugin_settings.get("cred_handler") or getenv("OID4VCI_CRED_HANDLER") + cred_handler = plugin_settings.get("cred_handler") or getenv( + "OID4VCI_CRED_HANDLER" + ) if not host: raise ConfigError("host", "OID4VCI_HOST") diff --git a/oid4vci/pyproject.toml b/oid4vci/pyproject.toml index 6207784ca..b2b53a310 100644 --- a/oid4vci/pyproject.toml +++ b/oid4vci/pyproject.toml @@ -53,7 +53,7 @@ ignore = [ "D417", "C901", ] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "**/{tests}/*" = ["F841", "D", "E501"] [tool.pytest.ini_options] From c0bf7d889c49dfa012477fe939955aadcd40e8b9 Mon Sep 17 00:00:00 2001 From: Ivan Wei Date: Thu, 18 Jul 2024 22:44:15 -0400 Subject: [PATCH 10/11] "lite" version of credential format plugins Signed-off-by: Ivan Wei --- jwt_vc_json/.devcontainer/Dockerfile | 22 ---- jwt_vc_json/.devcontainer/devcontainer.json | 50 -------- jwt_vc_json/.devcontainer/post-install.sh | 14 --- jwt_vc_json/README.md | 8 +- jwt_vc_json/docker/Dockerfile | 35 ------ jwt_vc_json/docker/default.yml | 22 ---- jwt_vc_json/docker/integration.yml | 20 ---- .../integration/Dockerfile.test.runner | 20 ---- jwt_vc_json/integration/README.md | 14 --- jwt_vc_json/integration/docker-compose.yml | 34 ------ jwt_vc_json/integration/pyproject.toml | 17 --- jwt_vc_json/integration/tests/__init__.py | 108 ------------------ jwt_vc_json/integration/tests/test_example.py | 32 ------ jwt_vc_json/jwt_vc_json/definition.py | 10 -- mso_mdoc/.devcontainer/Dockerfile | 22 ---- mso_mdoc/.devcontainer/devcontainer.json | 50 -------- mso_mdoc/.devcontainer/post-install.sh | 14 --- mso_mdoc/README.md | 8 +- mso_mdoc/docker/Dockerfile | 36 ------ mso_mdoc/docker/default.yml | 22 ---- mso_mdoc/docker/integration.yml | 20 ---- mso_mdoc/integration/Dockerfile.test.runner | 20 ---- mso_mdoc/integration/README.md | 14 --- mso_mdoc/integration/docker-compose.yml | 33 ------ mso_mdoc/integration/pyproject.toml | 17 --- mso_mdoc/integration/tests/__init__.py | 108 ------------------ mso_mdoc/integration/tests/test_example.py | 32 ------ mso_mdoc/mso_mdoc/definition.py | 10 -- mso_mdoc/mso_mdoc/v1_0/tests/test_x509.py | 28 +++++ oid4vci/poetry.lock | 44 +++---- 30 files changed, 58 insertions(+), 826 deletions(-) delete mode 100644 jwt_vc_json/.devcontainer/Dockerfile delete mode 100644 jwt_vc_json/.devcontainer/devcontainer.json delete mode 100644 jwt_vc_json/.devcontainer/post-install.sh delete mode 100644 jwt_vc_json/docker/Dockerfile delete mode 100644 jwt_vc_json/docker/default.yml delete mode 100644 jwt_vc_json/docker/integration.yml delete mode 100644 jwt_vc_json/integration/Dockerfile.test.runner delete mode 100644 jwt_vc_json/integration/README.md delete mode 100644 jwt_vc_json/integration/docker-compose.yml delete mode 100644 jwt_vc_json/integration/pyproject.toml delete mode 100644 jwt_vc_json/integration/tests/__init__.py delete mode 100644 jwt_vc_json/integration/tests/test_example.py delete mode 100644 jwt_vc_json/jwt_vc_json/definition.py delete mode 100644 mso_mdoc/.devcontainer/Dockerfile delete mode 100644 mso_mdoc/.devcontainer/devcontainer.json delete mode 100644 mso_mdoc/.devcontainer/post-install.sh delete mode 100644 mso_mdoc/docker/Dockerfile delete mode 100644 mso_mdoc/docker/default.yml delete mode 100644 mso_mdoc/docker/integration.yml delete mode 100644 mso_mdoc/integration/Dockerfile.test.runner delete mode 100644 mso_mdoc/integration/README.md delete mode 100644 mso_mdoc/integration/docker-compose.yml delete mode 100644 mso_mdoc/integration/pyproject.toml delete mode 100644 mso_mdoc/integration/tests/__init__.py delete mode 100644 mso_mdoc/integration/tests/test_example.py delete mode 100644 mso_mdoc/mso_mdoc/definition.py create mode 100644 mso_mdoc/mso_mdoc/v1_0/tests/test_x509.py diff --git a/jwt_vc_json/.devcontainer/Dockerfile b/jwt_vc_json/.devcontainer/Dockerfile deleted file mode 100644 index 28cc62697..000000000 --- a/jwt_vc_json/.devcontainer/Dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.134.0/containers/python-3/.devcontainer/base.Dockerfile -ARG VARIANT="3.9-bullseye" -FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} - -ARG POETRY_VERSION="1.7.1" -ENV POETRY_HOME="/opt/poetry" \ - POETRY_VERSION=${POETRY_VERSION} - -RUN curl -sSL https://install.python-poetry.org | python3 - \ - && update-alternatives --install /usr/local/bin/poetry poetry /opt/poetry/bin/poetry 900 \ - # Enable tab completion for bash - && poetry completions bash >> /home/vscode/.bash_completion \ - # Enable tab completion for Zsh - && mkdir -p /home/vscode/.zfunc/ \ - && poetry completions zsh > /home/vscode/.zfunc/_poetry \ - && echo "fpath+=~/.zfunc\nautoload -Uz compinit && compinit" >> /home/vscode/.zshrc - -COPY pyproject.toml ./ -# COPY pyproject.toml poetry.lock ./ -RUN poetry config virtualenvs.create false \ - && poetry install --no-root --no-interaction --with integration --extras "aca-py" \ - && rm -rf /root/.cache/pypoetry \ No newline at end of file diff --git a/jwt_vc_json/.devcontainer/devcontainer.json b/jwt_vc_json/.devcontainer/devcontainer.json deleted file mode 100644 index 0d0eaf410..000000000 --- a/jwt_vc_json/.devcontainer/devcontainer.json +++ /dev/null @@ -1,50 +0,0 @@ -// For format details, see https://aka.ms/devcontainer.json. For config options, see the -// README at: https://github.com/devcontainers/templates/tree/main/src/python -{ - "name": "jwt_vc_json", - "build": { - "dockerfile": "Dockerfile", - "context": "..", - "args": { - "VARIANT": "3.9-bullseye", - "POETRY_VERSION": "1.7.1" - } - }, - "customizations": { - "vscode": { - "extensions": ["ms-python.python", "ms-python.vscode-pylance"], - "settings": { - "python.testing.pytestArgs": ["./jwt_vc_json", "--no-cov"], - "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true, - "python.testing.pytestPath": "pytest", - "editor.defaultFormatter": null, - "editor.formatOnSave": false, // enable per language - "[python]": { - "editor.formatOnSave": true - }, - "python.formatting.provider": "black", - "python.formatting.blackPath": "/usr/local/py-utils/bin/black", - "python.formatting.blackArgs": [] - } - } - }, - - "features": { - "ghcr.io/devcontainers/features/docker-in-docker:2": { - "moby": false - } - }, - - // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. - "remoteUser": "vscode", - - "remoteEnv": { - "RUST_LOG": "aries-askar::log::target=error" - }, - - "mounts": [], - // Use 'forwardPorts' to make a list of ports inside the container available locally. - "forwardPorts": [3000, 3001], - "postCreateCommand": "bash ./.devcontainer/post-install.sh" -} diff --git a/jwt_vc_json/.devcontainer/post-install.sh b/jwt_vc_json/.devcontainer/post-install.sh deleted file mode 100644 index 83bc8c94c..000000000 --- a/jwt_vc_json/.devcontainer/post-install.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -set -ex - -# Convenience workspace directory for later use -WORKSPACE_DIR=$(pwd) - -# install all ACA-Py requirements -python -m pip install --upgrade pip - -# install black for formatting -pip3 install black - -# Generate Poetry Lock file -poetry lock --no-update \ No newline at end of file diff --git a/jwt_vc_json/README.md b/jwt_vc_json/README.md index e3598fc68..0db5ae895 100644 --- a/jwt_vc_json/README.md +++ b/jwt_vc_json/README.md @@ -1,7 +1,7 @@ -### Description: +# JWT_VC_JSON credential format plugin -< Replace with information about the reason this plugin was produced and a brief overview of the features > +This plugin provides `jwt_vc_json` credential support for the OID4VCI plugin. It acts as a module, dynamically loaded by the OID4VCI plugin, takes input parameters, and constructs and signs `jwt_vc_json` credentials. -### Configuration: +## Configuration: -< Replace this section with an outline of configuation options and basic defaults for deploying the plugin > +No configuration is required for this plugin. diff --git a/jwt_vc_json/docker/Dockerfile b/jwt_vc_json/docker/Dockerfile deleted file mode 100644 index 5b410ce9d..000000000 --- a/jwt_vc_json/docker/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM python:3.9-slim AS base -USER root - -# Install oid4vci plugin -WORKDIR /usr/src -RUN mkdir oid4vci -COPY oid4vci oid4vci - -# Install and configure poetry -WORKDIR /usr/src/app -ENV POETRY_VERSION=1.7.1 -ENV POETRY_HOME=/opt/poetry -RUN apt-get update && apt-get install -y curl && apt-get clean -RUN curl -sSL https://install.python-poetry.org | python - - -ENV PATH="/opt/poetry/bin:$PATH" -RUN poetry config virtualenvs.in-project true - -# Setup project -RUN mkdir jwt_vc_json && touch jwt_vc_json/__init__.py -COPY jwt_vc_json/pyproject.toml jwt_vc_json/poetry.lock jwt_vc_json/README.md ./ -ARG install_flags='--with integration --all-extras' -RUN poetry install ${install_flags} -USER $user - -FROM python:3.9-bullseye -WORKDIR /usr/src/app -COPY --from=base /usr/src/app/.venv /usr/src/app/.venv -ENV PATH="/usr/src/app/.venv/bin:$PATH" - -COPY jwt_vc_json/jwt_vc_json/ jwt_vc_json/ -COPY jwt_vc_json/docker/*.yml ./ - -ENTRYPOINT ["/bin/bash", "-c", "aca-py \"$@\"", "--"] -CMD ["start", "--arg-file", "default.yml"] \ No newline at end of file diff --git a/jwt_vc_json/docker/default.yml b/jwt_vc_json/docker/default.yml deleted file mode 100644 index c6e4f0f3b..000000000 --- a/jwt_vc_json/docker/default.yml +++ /dev/null @@ -1,22 +0,0 @@ -label: jwt_vc_json - -admin: [0.0.0.0, 3001] -admin-insecure-mode: false -admin-api-key: change-me - -inbound-transport: - - [http, 0.0.0.0, 3000] - - [ws, 0.0.0.0, 3002] -outbound-transport: http -endpoint: - - http://host.docker.internal:3000 - -plugin: - - jwt_vc_json.v1_0 - -genesis-url: http://test.bcovrin.vonx.io/genesis - -log-level: info - -auto-accept-invites: true -auto-respond-messages: true diff --git a/jwt_vc_json/docker/integration.yml b/jwt_vc_json/docker/integration.yml deleted file mode 100644 index c07aaefde..000000000 --- a/jwt_vc_json/docker/integration.yml +++ /dev/null @@ -1,20 +0,0 @@ -label: jwt_vc_json - -admin: [0.0.0.0, 3001] -admin-insecure-mode: true - -inbound-transport: - - [http, 0.0.0.0, 3000] -outbound-transport: http -endpoint: - - http://host.docker.internal:3000 - -plugin: - - jwt_vc_json.v1_0 - -genesis-url: http://test.bcovrin.vonx.io/genesis - -log-level: info - -auto-accept-invites: true -auto-respond-messages: true diff --git a/jwt_vc_json/integration/Dockerfile.test.runner b/jwt_vc_json/integration/Dockerfile.test.runner deleted file mode 100644 index 95f0d2461..000000000 --- a/jwt_vc_json/integration/Dockerfile.test.runner +++ /dev/null @@ -1,20 +0,0 @@ -FROM python:3.9-slim -WORKDIR /usr/src/app - -# install poetry -RUN pip3 install --no-cache-dir poetry - -# Add docker-compose-wait tool -ENV WAIT_VERSION 2.7.2 -ADD https://github.com/ufoscout/docker-compose-wait/releases/download/$WAIT_VERSION/wait /wait -RUN chmod +x /wait - -# install dependencies -COPY pyproject.toml . -COPY poetry.lock . -RUN poetry install --only main - -# add tests to image -COPY tests/* tests/ - -ENTRYPOINT ["/bin/sh", "-c", "/wait && poetry run pytest \"$@\"", "--"] \ No newline at end of file diff --git a/jwt_vc_json/integration/README.md b/jwt_vc_json/integration/README.md deleted file mode 100644 index 60cfe0f17..000000000 --- a/jwt_vc_json/integration/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Integration Tests - -All plugins should have a suite of integration tests. We use `docker compose` to set up the environment, and make use of the [Dockerfile](../docker/Dockerfile) to produce our ACA-Py/Plugin image. To simplify, we have another [Dockerfile](Dockerfile.test.runner) for running those [tests](/tests/). - -## Build and run Tests - -The integration tests will start 2 agents - bob and alice - and a juggernaut container that will execute the tests. Test results will be found in the juggernaut container output. The juggernaut container should close itself down, the logs can be reviewed in the `Docker` view, open `Containers`, open `integration`, right-click the `integration-tests` container and select `View Logs` - -```sh -# open a terminal in vs code -cd integration -docker compose build -docker compose up -``` diff --git a/jwt_vc_json/integration/docker-compose.yml b/jwt_vc_json/integration/docker-compose.yml deleted file mode 100644 index aedb233ff..000000000 --- a/jwt_vc_json/integration/docker-compose.yml +++ /dev/null @@ -1,34 +0,0 @@ -version: '3' -#*************************************************************** -# integration level test agents * -#*************************************************************** - -services: - bob: - image: plugin-image - build: - context: ../.. - dockerfile: jwt_vc_json/docker/Dockerfile - args: - - install_flags=--no-interaction --with integration --all-extras - - plugins_path= - command: start --arg-file integration.yml --label bob -e http://bob:3000 --log-level debug - - alice: - image: plugin-image - command: start --arg-file integration.yml --label alice -e http://alice:3000 --log-level debug - - tests: - container_name: juggernaut - build: - context: . - dockerfile: Dockerfile.test.runner - environment: - - WAIT_BEFORE_HOSTS=3 - - WAIT_HOSTS=bob:3000, alice:3000 - - WAIT_HOSTS_TIMEOUT=60 - - WAIT_SLEEP_INTERVAL=1 - - WAIT_HOST_CONNECT_TIMEOUT=30 - depends_on: - - bob - - alice \ No newline at end of file diff --git a/jwt_vc_json/integration/pyproject.toml b/jwt_vc_json/integration/pyproject.toml deleted file mode 100644 index d857f529d..000000000 --- a/jwt_vc_json/integration/pyproject.toml +++ /dev/null @@ -1,17 +0,0 @@ -[tool.poetry] -name = "jwt-vc-json" -version = "0.1.0" -description = "" -authors = [] - -[tool.poetry.dependencies] -python = "^3.9" -pytest = "^8.2.0" -pytest-asyncio = "~0.23.7" -requests = "^2.31.0" - -[tool.poetry.dev-dependencies] - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/jwt_vc_json/integration/tests/__init__.py b/jwt_vc_json/integration/tests/__init__.py deleted file mode 100644 index 6224ca13d..000000000 --- a/jwt_vc_json/integration/tests/__init__.py +++ /dev/null @@ -1,108 +0,0 @@ -from functools import wraps - -import pytest -import requests - -AUTO_ACCEPT = "false" - -BOB = "http://bob:3001" -ALICE = "http://alice:3001" - - -def get(agent: str, path: str, **kwargs): - """Get.""" - return requests.get(f"{agent}{path}", **kwargs) - - -def post(agent: str, path: str, **kwargs): - """Post.""" - return requests.post(f"{agent}{path}", **kwargs) - - -def fail_if_not_ok(message: str): - """Fail the current test if wrapped call fails with message.""" - - def _fail_if_not_ok(func): - @wraps(func) - def _wrapper(*args, **kwargs): - response = func(*args, **kwargs) - if not response.ok: - pytest.fail(f"{message}: {response.content}") - return response - - return _wrapper - - return _fail_if_not_ok - - -def unwrap_json_response(func): - """Unwrap a requests response object to json.""" - - @wraps(func) - def _wrapper(*args, **kwargs) -> dict: - response = func(*args, **kwargs) - return response.json() - - return _wrapper - - -class Agent: - """Class for interacting with Agent over Admin API""" - - def __init__(self, url: str): - self.url = url - - @unwrap_json_response - @fail_if_not_ok("Create invitation failed") - def create_invitation(self, **kwargs): - """Create invitation.""" - return post(self.url, "/connections/create-invitation", params=kwargs) - - @unwrap_json_response - @fail_if_not_ok("Receive invitation failed") - def receive_invite(self, invite: dict, **kwargs): - """Receive invitation.""" - return post( - self.url, "/connections/receive-invitation", params=kwargs, json=invite - ) - - @unwrap_json_response - @fail_if_not_ok("Accept invitation failed") - def accept_invite(self, connection_id: str): - """Accept invitation.""" - return post( - self.url, - f"/connections/{connection_id}/accept-invitation", - ) - - @unwrap_json_response - @fail_if_not_ok("Failed to send basic message") - def send_message(self, connection_id, content): - """Set connection metadata.""" - return post( - self.url, - f"/connections/{connection_id}/send-message", - json={"content": content}, - ) - - def get(self, path: str, return_json: bool = True, fail_with: str = None, **kwargs): - """Do get to agent endpoint.""" - wrapped_get = get - if fail_with: - wrapped_get = fail_if_not_ok(fail_with)(wrapped_get) - if return_json: - wrapped_get = unwrap_json_response(wrapped_get) - - return wrapped_get(self.url, path, **kwargs) - - def post( - self, path: str, return_json: bool = True, fail_with: str = None, **kwargs - ): - """Do get to agent endpoint.""" - wrapped_post = post - if fail_with: - wrapped_post = fail_if_not_ok(fail_with)(wrapped_post) - if return_json: - wrapped_post = unwrap_json_response(wrapped_post) - - return wrapped_post(self.url, path, **kwargs) diff --git a/jwt_vc_json/integration/tests/test_example.py b/jwt_vc_json/integration/tests/test_example.py deleted file mode 100644 index 64a59b3e0..000000000 --- a/jwt_vc_json/integration/tests/test_example.py +++ /dev/null @@ -1,32 +0,0 @@ -import time - -import pytest - -from . import ALICE, BOB, Agent - - -@pytest.fixture(scope="session") -def bob(): - """bob agent fixture.""" - yield Agent(BOB) - - -@pytest.fixture(scope="session") -def alice(): - """resolver agent fixture.""" - yield Agent(ALICE) - - -@pytest.fixture(scope="session", autouse=True) -def established_connection(bob, alice): - """Established connection filter.""" - invite = bob.create_invitation(auto_accept="true")["invitation"] - resp = alice.receive_invite(invite, auto_accept="true") - yield resp["connection_id"] - - -def test_send_message(bob, alice, established_connection): - # make sure connection is active... - time.sleep(1) - - alice.send_message(established_connection, "hello bob") diff --git a/jwt_vc_json/jwt_vc_json/definition.py b/jwt_vc_json/jwt_vc_json/definition.py deleted file mode 100644 index e5ece9273..000000000 --- a/jwt_vc_json/jwt_vc_json/definition.py +++ /dev/null @@ -1,10 +0,0 @@ -"""Version definitions for this plugin.""" - -versions = [ - { - "major_version": 1, - "minimum_minor_version": 0, - "current_minor_version": 0, - "path": "v1_0", - } -] diff --git a/mso_mdoc/.devcontainer/Dockerfile b/mso_mdoc/.devcontainer/Dockerfile deleted file mode 100644 index 28cc62697..000000000 --- a/mso_mdoc/.devcontainer/Dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.134.0/containers/python-3/.devcontainer/base.Dockerfile -ARG VARIANT="3.9-bullseye" -FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} - -ARG POETRY_VERSION="1.7.1" -ENV POETRY_HOME="/opt/poetry" \ - POETRY_VERSION=${POETRY_VERSION} - -RUN curl -sSL https://install.python-poetry.org | python3 - \ - && update-alternatives --install /usr/local/bin/poetry poetry /opt/poetry/bin/poetry 900 \ - # Enable tab completion for bash - && poetry completions bash >> /home/vscode/.bash_completion \ - # Enable tab completion for Zsh - && mkdir -p /home/vscode/.zfunc/ \ - && poetry completions zsh > /home/vscode/.zfunc/_poetry \ - && echo "fpath+=~/.zfunc\nautoload -Uz compinit && compinit" >> /home/vscode/.zshrc - -COPY pyproject.toml ./ -# COPY pyproject.toml poetry.lock ./ -RUN poetry config virtualenvs.create false \ - && poetry install --no-root --no-interaction --with integration --extras "aca-py" \ - && rm -rf /root/.cache/pypoetry \ No newline at end of file diff --git a/mso_mdoc/.devcontainer/devcontainer.json b/mso_mdoc/.devcontainer/devcontainer.json deleted file mode 100644 index 2c0f216d4..000000000 --- a/mso_mdoc/.devcontainer/devcontainer.json +++ /dev/null @@ -1,50 +0,0 @@ -// For format details, see https://aka.ms/devcontainer.json. For config options, see the -// README at: https://github.com/devcontainers/templates/tree/main/src/python -{ - "name": "mso_mdoc", - "build": { - "dockerfile": "Dockerfile", - "context": "..", - "args": { - "VARIANT": "3.9-bullseye", - "POETRY_VERSION": "1.7.1" - } - }, - "customizations": { - "vscode": { - "extensions": ["ms-python.python", "ms-python.vscode-pylance"], - "settings": { - "python.testing.pytestArgs": ["./mso_mdoc", "--no-cov"], - "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true, - "python.testing.pytestPath": "pytest", - "editor.defaultFormatter": null, - "editor.formatOnSave": false, // enable per language - "[python]": { - "editor.formatOnSave": true - }, - "python.formatting.provider": "black", - "python.formatting.blackPath": "/usr/local/py-utils/bin/black", - "python.formatting.blackArgs": [] - } - } - }, - - "features": { - "ghcr.io/devcontainers/features/docker-in-docker:2": { - "moby": false - } - }, - - // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. - "remoteUser": "vscode", - - "remoteEnv": { - "RUST_LOG": "aries-askar::log::target=error" - }, - - "mounts": [], - // Use 'forwardPorts' to make a list of ports inside the container available locally. - "forwardPorts": [3000, 3001], - "postCreateCommand": "bash ./.devcontainer/post-install.sh" -} diff --git a/mso_mdoc/.devcontainer/post-install.sh b/mso_mdoc/.devcontainer/post-install.sh deleted file mode 100644 index 83bc8c94c..000000000 --- a/mso_mdoc/.devcontainer/post-install.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -set -ex - -# Convenience workspace directory for later use -WORKSPACE_DIR=$(pwd) - -# install all ACA-Py requirements -python -m pip install --upgrade pip - -# install black for formatting -pip3 install black - -# Generate Poetry Lock file -poetry lock --no-update \ No newline at end of file diff --git a/mso_mdoc/README.md b/mso_mdoc/README.md index e3598fc68..7f12545aa 100644 --- a/mso_mdoc/README.md +++ b/mso_mdoc/README.md @@ -1,7 +1,7 @@ -### Description: +# MSO_MDOC credential format plugin -< Replace with information about the reason this plugin was produced and a brief overview of the features > +This plugin provides `mso_mdoc` credential support for the OID4VCI plugin. It acts as a module, dynamically loaded by the OID4VCI plugin, takes input parameters, and constructs and signs `mso_mdoc` credentials. -### Configuration: +## Configuration: -< Replace this section with an outline of configuation options and basic defaults for deploying the plugin > +No configuration is required for this plugin. diff --git a/mso_mdoc/docker/Dockerfile b/mso_mdoc/docker/Dockerfile deleted file mode 100644 index c98db10d9..000000000 --- a/mso_mdoc/docker/Dockerfile +++ /dev/null @@ -1,36 +0,0 @@ -FROM python:3.9-slim AS base -USER root - -# Install oid4vci plugin -WORKDIR /usr/src -RUN mkdir oid4vci -COPY oid4vci oid4vci - -# Install and configure poetry -WORKDIR /usr/src/app - -ENV POETRY_VERSION=1.7.1 -ENV POETRY_HOME=/opt/poetry -RUN apt-get update && apt-get install -y curl && apt-get clean -RUN curl -sSL https://install.python-poetry.org | python - - -ENV PATH="/opt/poetry/bin:$PATH" -RUN poetry config virtualenvs.in-project true - -# Setup project -RUN mkdir mso_mdoc && touch mso_mdoc/__init__.py -COPY mso_mdoc/pyproject.toml mso_mdoc/poetry.lock mso_mdoc/README.md ./ -ARG install_flags='--with integration --all-extras' -RUN poetry install ${install_flags} -USER $user - -FROM python:3.9-bullseye -WORKDIR /usr/src/app -COPY --from=base /usr/src/app/.venv /usr/src/app/.venv -ENV PATH="/usr/src/app/.venv/bin:$PATH" - -COPY mso_mdoc/mso_mdoc/ mso_mdoc/ -COPY mso_mdoc/docker/*.yml ./ - -ENTRYPOINT ["/bin/bash", "-c", "aca-py \"$@\"", "--"] -CMD ["start", "--arg-file", "default.yml"] \ No newline at end of file diff --git a/mso_mdoc/docker/default.yml b/mso_mdoc/docker/default.yml deleted file mode 100644 index 0991775c2..000000000 --- a/mso_mdoc/docker/default.yml +++ /dev/null @@ -1,22 +0,0 @@ -label: mso_mdoc - -admin: [0.0.0.0, 3001] -admin-insecure-mode: false -admin-api-key: change-me - -inbound-transport: - - [http, 0.0.0.0, 3000] - - [ws, 0.0.0.0, 3002] -outbound-transport: http -endpoint: - - http://host.docker.internal:3000 - -plugin: - - mso_mdoc.v1_0 - -genesis-url: http://test.bcovrin.vonx.io/genesis - -log-level: info - -auto-accept-invites: true -auto-respond-messages: true diff --git a/mso_mdoc/docker/integration.yml b/mso_mdoc/docker/integration.yml deleted file mode 100644 index ac7ba980b..000000000 --- a/mso_mdoc/docker/integration.yml +++ /dev/null @@ -1,20 +0,0 @@ -label: mso_mdoc - -admin: [0.0.0.0, 3001] -admin-insecure-mode: true - -inbound-transport: - - [http, 0.0.0.0, 3000] -outbound-transport: http -endpoint: - - http://host.docker.internal:3000 - -plugin: - - mso_mdoc.v1_0 - -genesis-url: http://test.bcovrin.vonx.io/genesis - -log-level: info - -auto-accept-invites: true -auto-respond-messages: true diff --git a/mso_mdoc/integration/Dockerfile.test.runner b/mso_mdoc/integration/Dockerfile.test.runner deleted file mode 100644 index 95f0d2461..000000000 --- a/mso_mdoc/integration/Dockerfile.test.runner +++ /dev/null @@ -1,20 +0,0 @@ -FROM python:3.9-slim -WORKDIR /usr/src/app - -# install poetry -RUN pip3 install --no-cache-dir poetry - -# Add docker-compose-wait tool -ENV WAIT_VERSION 2.7.2 -ADD https://github.com/ufoscout/docker-compose-wait/releases/download/$WAIT_VERSION/wait /wait -RUN chmod +x /wait - -# install dependencies -COPY pyproject.toml . -COPY poetry.lock . -RUN poetry install --only main - -# add tests to image -COPY tests/* tests/ - -ENTRYPOINT ["/bin/sh", "-c", "/wait && poetry run pytest \"$@\"", "--"] \ No newline at end of file diff --git a/mso_mdoc/integration/README.md b/mso_mdoc/integration/README.md deleted file mode 100644 index 60cfe0f17..000000000 --- a/mso_mdoc/integration/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Integration Tests - -All plugins should have a suite of integration tests. We use `docker compose` to set up the environment, and make use of the [Dockerfile](../docker/Dockerfile) to produce our ACA-Py/Plugin image. To simplify, we have another [Dockerfile](Dockerfile.test.runner) for running those [tests](/tests/). - -## Build and run Tests - -The integration tests will start 2 agents - bob and alice - and a juggernaut container that will execute the tests. Test results will be found in the juggernaut container output. The juggernaut container should close itself down, the logs can be reviewed in the `Docker` view, open `Containers`, open `integration`, right-click the `integration-tests` container and select `View Logs` - -```sh -# open a terminal in vs code -cd integration -docker compose build -docker compose up -``` diff --git a/mso_mdoc/integration/docker-compose.yml b/mso_mdoc/integration/docker-compose.yml deleted file mode 100644 index 34c03f859..000000000 --- a/mso_mdoc/integration/docker-compose.yml +++ /dev/null @@ -1,33 +0,0 @@ -version: '3' -#*************************************************************** -# integration level test agents * -#*************************************************************** - -services: - bob: - image: plugin-image - build: - context: ../.. - dockerfile: mso_mdoc/docker/Dockerfile - args: - - install_flags=--no-interaction --with integration --all-extras - command: start --arg-file integration.yml --label bob -e http://bob:3000 --log-level debug - - alice: - image: plugin-image - command: start --arg-file integration.yml --label alice -e http://alice:3000 --log-level debug - - tests: - container_name: juggernaut - build: - context: . - dockerfile: Dockerfile.test.runner - environment: - - WAIT_BEFORE_HOSTS=3 - - WAIT_HOSTS=bob:3000, alice:3000 - - WAIT_HOSTS_TIMEOUT=60 - - WAIT_SLEEP_INTERVAL=1 - - WAIT_HOST_CONNECT_TIMEOUT=30 - depends_on: - - bob - - alice \ No newline at end of file diff --git a/mso_mdoc/integration/pyproject.toml b/mso_mdoc/integration/pyproject.toml deleted file mode 100644 index 4ff3f74eb..000000000 --- a/mso_mdoc/integration/pyproject.toml +++ /dev/null @@ -1,17 +0,0 @@ -[tool.poetry] -name = "plugin-globals-tests" -version = "0.1.0" -description = "" -authors = [] - -[tool.poetry.dependencies] -python = "^3.9" -pytest = "^8.2.0" -pytest-asyncio = "~0.23.7" -requests = "^2.31.0" - -[tool.poetry.dev-dependencies] - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/mso_mdoc/integration/tests/__init__.py b/mso_mdoc/integration/tests/__init__.py deleted file mode 100644 index 6224ca13d..000000000 --- a/mso_mdoc/integration/tests/__init__.py +++ /dev/null @@ -1,108 +0,0 @@ -from functools import wraps - -import pytest -import requests - -AUTO_ACCEPT = "false" - -BOB = "http://bob:3001" -ALICE = "http://alice:3001" - - -def get(agent: str, path: str, **kwargs): - """Get.""" - return requests.get(f"{agent}{path}", **kwargs) - - -def post(agent: str, path: str, **kwargs): - """Post.""" - return requests.post(f"{agent}{path}", **kwargs) - - -def fail_if_not_ok(message: str): - """Fail the current test if wrapped call fails with message.""" - - def _fail_if_not_ok(func): - @wraps(func) - def _wrapper(*args, **kwargs): - response = func(*args, **kwargs) - if not response.ok: - pytest.fail(f"{message}: {response.content}") - return response - - return _wrapper - - return _fail_if_not_ok - - -def unwrap_json_response(func): - """Unwrap a requests response object to json.""" - - @wraps(func) - def _wrapper(*args, **kwargs) -> dict: - response = func(*args, **kwargs) - return response.json() - - return _wrapper - - -class Agent: - """Class for interacting with Agent over Admin API""" - - def __init__(self, url: str): - self.url = url - - @unwrap_json_response - @fail_if_not_ok("Create invitation failed") - def create_invitation(self, **kwargs): - """Create invitation.""" - return post(self.url, "/connections/create-invitation", params=kwargs) - - @unwrap_json_response - @fail_if_not_ok("Receive invitation failed") - def receive_invite(self, invite: dict, **kwargs): - """Receive invitation.""" - return post( - self.url, "/connections/receive-invitation", params=kwargs, json=invite - ) - - @unwrap_json_response - @fail_if_not_ok("Accept invitation failed") - def accept_invite(self, connection_id: str): - """Accept invitation.""" - return post( - self.url, - f"/connections/{connection_id}/accept-invitation", - ) - - @unwrap_json_response - @fail_if_not_ok("Failed to send basic message") - def send_message(self, connection_id, content): - """Set connection metadata.""" - return post( - self.url, - f"/connections/{connection_id}/send-message", - json={"content": content}, - ) - - def get(self, path: str, return_json: bool = True, fail_with: str = None, **kwargs): - """Do get to agent endpoint.""" - wrapped_get = get - if fail_with: - wrapped_get = fail_if_not_ok(fail_with)(wrapped_get) - if return_json: - wrapped_get = unwrap_json_response(wrapped_get) - - return wrapped_get(self.url, path, **kwargs) - - def post( - self, path: str, return_json: bool = True, fail_with: str = None, **kwargs - ): - """Do get to agent endpoint.""" - wrapped_post = post - if fail_with: - wrapped_post = fail_if_not_ok(fail_with)(wrapped_post) - if return_json: - wrapped_post = unwrap_json_response(wrapped_post) - - return wrapped_post(self.url, path, **kwargs) diff --git a/mso_mdoc/integration/tests/test_example.py b/mso_mdoc/integration/tests/test_example.py deleted file mode 100644 index 64a59b3e0..000000000 --- a/mso_mdoc/integration/tests/test_example.py +++ /dev/null @@ -1,32 +0,0 @@ -import time - -import pytest - -from . import ALICE, BOB, Agent - - -@pytest.fixture(scope="session") -def bob(): - """bob agent fixture.""" - yield Agent(BOB) - - -@pytest.fixture(scope="session") -def alice(): - """resolver agent fixture.""" - yield Agent(ALICE) - - -@pytest.fixture(scope="session", autouse=True) -def established_connection(bob, alice): - """Established connection filter.""" - invite = bob.create_invitation(auto_accept="true")["invitation"] - resp = alice.receive_invite(invite, auto_accept="true") - yield resp["connection_id"] - - -def test_send_message(bob, alice, established_connection): - # make sure connection is active... - time.sleep(1) - - alice.send_message(established_connection, "hello bob") diff --git a/mso_mdoc/mso_mdoc/definition.py b/mso_mdoc/mso_mdoc/definition.py deleted file mode 100644 index e5ece9273..000000000 --- a/mso_mdoc/mso_mdoc/definition.py +++ /dev/null @@ -1,10 +0,0 @@ -"""Version definitions for this plugin.""" - -versions = [ - { - "major_version": 1, - "minimum_minor_version": 0, - "current_minor_version": 0, - "path": "v1_0", - } -] diff --git a/mso_mdoc/mso_mdoc/v1_0/tests/test_x509.py b/mso_mdoc/mso_mdoc/v1_0/tests/test_x509.py new file mode 100644 index 000000000..26c00d65a --- /dev/null +++ b/mso_mdoc/mso_mdoc/v1_0/tests/test_x509.py @@ -0,0 +1,28 @@ +import pytest + +import os +from pycose.keys import CoseKey + +from aries_cloudagent.wallet.util import b64_to_bytes + +from ..x509 import selfsigned_x509cert + + +@pytest.mark.asyncio +def test_selfsigned_x509cert(jwk, headers, payload): + """Test selfsigned_x509cert() method.""" + + pk_dict = { + "KTY": jwk.get("kty") or "", # OKP, EC + "CURVE": jwk.get("crv") or "", # ED25519, P_256 + "ALG": "EdDSA" if jwk.get("kty") == "OKP" else "ES256", + "D": b64_to_bytes(jwk.get("d") or "", True), # EdDSA + "X": b64_to_bytes(jwk.get("x") or "", True), # EdDSA, EcDSA + "Y": b64_to_bytes(jwk.get("y") or "", True), # EcDSA + "KID": os.urandom(32), + } + cose_key = CoseKey.from_dict(pk_dict) + + x509_cert = selfsigned_x509cert(private_key=cose_key) + + assert x509_cert diff --git a/oid4vci/poetry.lock b/oid4vci/poetry.lock index 300894b12..83cc5e788 100644 --- a/oid4vci/poetry.lock +++ b/oid4vci/poetry.lock @@ -1963,13 +1963,13 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.23.7" +version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, - {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, ] [package.dependencies] @@ -2212,29 +2212,29 @@ test = ["hypothesis (==5.19.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "ruff" -version = "0.5.2" +version = "0.5.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.2-py3-none-linux_armv6l.whl", hash = "sha256:7bab8345df60f9368d5f4594bfb8b71157496b44c30ff035d1d01972e764d3be"}, - {file = "ruff-0.5.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1aa7acad382ada0189dbe76095cf0a36cd0036779607c397ffdea16517f535b1"}, - {file = "ruff-0.5.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:aec618d5a0cdba5592c60c2dee7d9c865180627f1a4a691257dea14ac1aa264d"}, - {file = "ruff-0.5.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b62adc5ce81780ff04077e88bac0986363e4a3260ad3ef11ae9c14aa0e67ef"}, - {file = "ruff-0.5.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dc42ebf56ede83cb080a50eba35a06e636775649a1ffd03dc986533f878702a3"}, - {file = "ruff-0.5.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c15c6e9f88c67ffa442681365d11df38afb11059fc44238e71a9d9f1fd51de70"}, - {file = "ruff-0.5.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d3de9a5960f72c335ef00763d861fc5005ef0644cb260ba1b5a115a102157251"}, - {file = "ruff-0.5.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe5a968ae933e8f7627a7b2fc8893336ac2be0eb0aace762d3421f6e8f7b7f83"}, - {file = "ruff-0.5.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a04f54a9018f75615ae52f36ea1c5515e356e5d5e214b22609ddb546baef7132"}, - {file = "ruff-0.5.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed02fb52e3741f0738db5f93e10ae0fb5c71eb33a4f2ba87c9a2fa97462a649"}, - {file = "ruff-0.5.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3cf8fe659f6362530435d97d738eb413e9f090e7e993f88711b0377fbdc99f60"}, - {file = "ruff-0.5.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:237a37e673e9f3cbfff0d2243e797c4862a44c93d2f52a52021c1a1b0899f846"}, - {file = "ruff-0.5.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2a2949ce7c1cbd8317432ada80fe32156df825b2fd611688814c8557824ef060"}, - {file = "ruff-0.5.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:481af57c8e99da92ad168924fd82220266043c8255942a1cb87958b108ac9335"}, - {file = "ruff-0.5.2-py3-none-win32.whl", hash = "sha256:f1aea290c56d913e363066d83d3fc26848814a1fed3d72144ff9c930e8c7c718"}, - {file = "ruff-0.5.2-py3-none-win_amd64.whl", hash = "sha256:8532660b72b5d94d2a0a7a27ae7b9b40053662d00357bb2a6864dd7e38819084"}, - {file = "ruff-0.5.2-py3-none-win_arm64.whl", hash = "sha256:73439805c5cb68f364d826a5c5c4b6c798ded6b7ebaa4011f01ce6c94e4d5583"}, - {file = "ruff-0.5.2.tar.gz", hash = "sha256:2c0df2d2de685433794a14d8d2e240df619b748fbe3367346baa519d8e6f1ca2"}, + {file = "ruff-0.5.3-py3-none-linux_armv6l.whl", hash = "sha256:b12424d9db7347fa63c5ed9af010003338c63c629fb9c9c6adb2aa4f5699729b"}, + {file = "ruff-0.5.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8d72c5684bbd4ed304a9a955ee2e67f57b35f6193222ade910cca8a805490e3"}, + {file = "ruff-0.5.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d2fc2cdb85ccac1e816cc9d5d8cedefd93661bd957756d902543af32a6b04a71"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4bc751240b2fab5d19254571bcacb315c7b0b00bf3c912d52226a82bbec073"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc697ec874fdd7c7ba0a85ec76ab38f8595224868d67f097c5ffc21136e72fcd"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e791d34d3557a3819b3704bc1f087293c821083fa206812842fa363f6018a192"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:76bb5a87fd397520b91a83eae8a2f7985236d42dd9459f09eef58e7f5c1d8316"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8cfc7a26422c78e94f1ec78ec02501bbad2df5834907e75afe474cc6b83a8c1"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96066c4328a49fce2dd40e80f7117987369feec30ab771516cf95f1cc2db923c"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfe9ab5bdc0b08470c3b261643ad54ea86edc32b64d1e080892d7953add3ad"}, + {file = "ruff-0.5.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7704582a026fa02cca83efd76671a98ee6eb412c4230209efe5e2a006c06db62"}, + {file = "ruff-0.5.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:08058d077e21b856d32ebf483443390e29dc44d927608dc8f092ff6776519da9"}, + {file = "ruff-0.5.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77d49484429ed7c7e6e2e75a753f153b7b58f875bdb4158ad85af166a1ec1822"}, + {file = "ruff-0.5.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:642cbff6cbfa38d2566d8db086508d6f472edb136cbfcc4ea65997745368c29e"}, + {file = "ruff-0.5.3-py3-none-win32.whl", hash = "sha256:eafc45dd8bdc37a00b28e68cc038daf3ca8c233d73fea276dcd09defb1352841"}, + {file = "ruff-0.5.3-py3-none-win_amd64.whl", hash = "sha256:cbaec2ddf4f78e5e9ecf5456ea0f496991358a1d883862ed0b9e947e2b6aea93"}, + {file = "ruff-0.5.3-py3-none-win_arm64.whl", hash = "sha256:05fbd2cb404775d6cd7f2ff49504e2d20e13ef95fa203bd1ab22413af70d420b"}, + {file = "ruff-0.5.3.tar.gz", hash = "sha256:2a3eb4f1841771fa5b67a56be9c2d16fd3cc88e378bd86aaeaec2f7e6bcdd0a2"}, ] [[package]] From 589e159ac11ab3cceab2be8494131c9d16938f60 Mon Sep 17 00:00:00 2001 From: Ivan Wei Date: Fri, 19 Jul 2024 12:34:07 -0400 Subject: [PATCH 11/11] review fixes Signed-off-by: Ivan Wei --- .../jwt_vc_json/v1_0/cred_processor.py | 7 +-- mso_mdoc/mso_mdoc/v1_0/cred_processor.py | 10 ++-- oid4vci/integration/docker-compose.yml | 1 - oid4vci/oid4vci/cred_processor.py | 39 ++++++++++++++ oid4vci/oid4vci/pop_result.py | 15 ++++++ oid4vci/oid4vci/public_routes.py | 52 ++++--------------- 6 files changed, 73 insertions(+), 51 deletions(-) create mode 100644 oid4vci/oid4vci/cred_processor.py create mode 100644 oid4vci/oid4vci/pop_result.py diff --git a/jwt_vc_json/jwt_vc_json/v1_0/cred_processor.py b/jwt_vc_json/jwt_vc_json/v1_0/cred_processor.py index 884fe91b9..3d94f0d47 100644 --- a/jwt_vc_json/jwt_vc_json/v1_0/cred_processor.py +++ b/jwt_vc_json/jwt_vc_json/v1_0/cred_processor.py @@ -4,13 +4,14 @@ import logging import uuid -from aiohttp import web from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.wallet.jwt import jwt_sign from oid4vci.models.exchange import OID4VCIExchangeRecord from oid4vci.models.supported_cred import SupportedCredential -from oid4vci.public_routes import types_are_subset, PopResult, ICredProcessor +from oid4vci.public_routes import types_are_subset +from oid4vci.pop_result import PopResult +from oid4vci.cred_processor import ICredProcessor, CredIssueError LOGGER = logging.getLogger(__name__) @@ -28,7 +29,7 @@ async def issue_cred( ): """Return signed credential in JWT format.""" if not types_are_subset(body.get("types"), supported.format_data.get("types")): - raise web.HTTPBadRequest(reason="Requested types does not match offer.") + raise CredIssueError("Requested types does not match offer.") current_time = datetime.datetime.now(datetime.timezone.utc) current_time_unix_timestamp = int(current_time.timestamp()) diff --git a/mso_mdoc/mso_mdoc/v1_0/cred_processor.py b/mso_mdoc/mso_mdoc/v1_0/cred_processor.py index 34f1ee91e..77211bd99 100644 --- a/mso_mdoc/mso_mdoc/v1_0/cred_processor.py +++ b/mso_mdoc/mso_mdoc/v1_0/cred_processor.py @@ -4,12 +4,12 @@ import json import re -from aiohttp import web from aries_cloudagent.admin.request_context import AdminRequestContext from oid4vci.models.exchange import OID4VCIExchangeRecord from oid4vci.models.supported_cred import SupportedCredential -from oid4vci.public_routes import PopResult, ICredProcessor +from oid4vci.pop_result import PopResult +from oid4vci.cred_processor import ICredProcessor, CredIssueError from .mdoc import mso_mdoc_sign @@ -29,7 +29,7 @@ async def issue_cred( ): """Return signed credential in COBR format.""" if body.get("doctype") != supported.format_data.get("doctype"): - raise web.HTTPBadRequest(reason="Requested types does not match offer.") + raise CredIssueError("Requested doctype does not match offer.") try: headers = { @@ -47,7 +47,7 @@ async def issue_cred( context.profile, headers, payload, did, verification_method ) mso_mdoc = mso_mdoc[2:-1] if mso_mdoc.startswith("b'") else None - except ValueError as err: - raise web.HTTPBadRequest(reason="Failed to issue credential") from err + except Exception as ex: + raise CredIssueError("Failed to issue credential") from ex return mso_mdoc diff --git a/oid4vci/integration/docker-compose.yml b/oid4vci/integration/docker-compose.yml index a419c81b6..7e48dd2af 100644 --- a/oid4vci/integration/docker-compose.yml +++ b/oid4vci/integration/docker-compose.yml @@ -34,7 +34,6 @@ services: --log-level debug --debug-webhooks --plugin oid4vci - --plugin jwt_vs_json healthcheck: test: curl -s -o /dev/null -w '%{http_code}' "http://localhost:3001/status/live" | grep "200" > /dev/null start_period: 30s diff --git a/oid4vci/oid4vci/cred_processor.py b/oid4vci/oid4vci/cred_processor.py new file mode 100644 index 000000000..ba4405d78 --- /dev/null +++ b/oid4vci/oid4vci/cred_processor.py @@ -0,0 +1,39 @@ +"""CredProcessor interface and exception.""" + +from abc import ABC, abstractmethod + +from aries_cloudagent.core.error import BaseError +from aries_cloudagent.admin.request_context import AdminRequestContext + +from .models.exchange import OID4VCIExchangeRecord +from .models.supported_cred import SupportedCredential +from .pop_result import PopResult + + +class ICredProcessor(ABC): + """Returns singed credential payload.""" + + @abstractmethod + def issue_cred( + self, + body: any, + supported: SupportedCredential, + ex_record: OID4VCIExchangeRecord, + pop: PopResult, + context: AdminRequestContext, + ): + """Method signature. + + Args: + body: any + supported: SupportedCredential + ex_record: OID4VCIExchangeRecord + pop: PopResult + context: AdminRequestContext + Returns: + encoded: signed credential payload. + """ + + +class CredIssueError(BaseError): + """Base class for CredProcessor errors.""" diff --git a/oid4vci/oid4vci/pop_result.py b/oid4vci/oid4vci/pop_result.py new file mode 100644 index 000000000..a26efec78 --- /dev/null +++ b/oid4vci/oid4vci/pop_result.py @@ -0,0 +1,15 @@ +"""PopResult dataclass.""" + +from dataclasses import dataclass +from typing import Any, Dict, Mapping, Optional + + +@dataclass +class PopResult: + """Result from proof of posession.""" + + headers: Mapping[str, Any] + payload: Mapping[str, Any] + verified: bool + holder_kid: Optional[str] + holder_jwk: Optional[Dict[str, Any]] diff --git a/oid4vci/oid4vci/public_routes.py b/oid4vci/oid4vci/public_routes.py index 04a5cca12..6c46e69d1 100644 --- a/oid4vci/oid4vci/public_routes.py +++ b/oid4vci/oid4vci/public_routes.py @@ -2,10 +2,8 @@ import datetime import logging -from dataclasses import dataclass from secrets import token_urlsafe -from typing import Any, Dict, List, Mapping, Optional -from abc import ABC, abstractmethod +from typing import Any, Dict, List, Optional import jwt from aiohttp import web @@ -33,6 +31,8 @@ from .config import Config from .models.exchange import OID4VCIExchangeRecord from .models.supported_cred import SupportedCredential +from .pop_result import PopResult +from .cred_processor import CredIssueError LOGGER = logging.getLogger(__name__) PRE_AUTHORIZED_CODE_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:pre-authorized_code" @@ -223,17 +223,6 @@ async def key_material_for_kid(profile: Profile, kid: str): raise web.HTTPBadRequest(reason="Unsupported verification method type") -@dataclass -class PopResult: - """Result from proof of posession.""" - - headers: Mapping[str, Any] - payload: Mapping[str, Any] - verified: bool - holder_kid: Optional[str] - holder_jwk: Optional[Dict[str, Any]] - - async def handle_proof_of_posession( profile: Profile, proof: Dict[str, Any], nonce: str ): @@ -298,31 +287,6 @@ class IssueCredentialRequestSchema(OpenAPISchema): proof = fields.Dict(metadata={"description": ""}) -class ICredProcessor(ABC): - """Returns singed credential payload.""" - - @abstractmethod - def issue_cred( - self, - body: any, - supported: SupportedCredential, - ex_record: OID4VCIExchangeRecord, - pop: PopResult, - context: AdminRequestContext, - ): - """Method signature. - - Args: - body: any - supported: SupportedCredential - ex_record: OID4VCIExchangeRecord - pop: PopResult - context: AdminRequestContext - Returns: - encoded: signed credential payload. - """ - - @docs(tags=["oid4vci"], summary="Issue a credential") @request_schema(IssueCredentialRequestSchema()) async def issue_cred(request: web.Request): @@ -380,9 +344,13 @@ async def issue_cred(request: web.Request): raise web.HTTPInternalServerError( reason=f"No handler to process {supported.format} credential." ) - credential = await handler.cred_processor.issue_cred( - body, supported, ex_record, pop, context - ) + + try: + credential = await handler.cred_processor.issue_cred( + body, supported, ex_record, pop, context + ) + except CredIssueError as e: + raise web.HTTPBadRequest(reason=e.message) async with context.session() as session: ex_record.state = OID4VCIExchangeRecord.STATE_ISSUED