From ed824351dc6a8d45624758a076ddde28b898ef95 Mon Sep 17 00:00:00 2001 From: SDK Generator Bot Date: Wed, 17 Dec 2025 14:16:35 +0000 Subject: [PATCH] Generate sfs --- services/sfs/README.md | 22 + services/sfs/poetry.lock | 1778 +++++ services/sfs/pyproject.toml | 99 + services/sfs/src/stackit/sfs/__init__.py | 191 + services/sfs/src/stackit/sfs/api/__init__.py | 4 + .../sfs/src/stackit/sfs/api/default_api.py | 5783 +++++++++++++++++ services/sfs/src/stackit/sfs/api_client.py | 639 ++ services/sfs/src/stackit/sfs/api_response.py | 23 + services/sfs/src/stackit/sfs/configuration.py | 164 + services/sfs/src/stackit/sfs/exceptions.py | 217 + .../sfs/src/stackit/sfs/models/__init__.py | 86 + .../models/create_resource_pool_payload.py | 134 + .../models/create_resource_pool_response.py | 94 + .../create_resource_pool_snapshot_payload.py | 89 + .../create_resource_pool_snapshot_response.py | 96 + .../create_share_export_policy_payload.py | 110 + ...create_share_export_policy_request_rule.py | 124 + .../create_share_export_policy_response.py | 96 + .../sfs/models/create_share_payload.py | 107 + .../sfs/models/create_share_response.py | 86 + services/sfs/src/stackit/sfs/models/error.py | 94 + .../sfs/models/get_resource_pool_response.py | 92 + .../get_resource_pool_snapshot_response.py | 96 + .../get_share_export_policy_response.py | 96 + .../stackit/sfs/models/get_share_response.py | 86 + .../stackit/sfs/models/google_protobuf_any.py | 97 + .../list_performance_classes_response.py | 100 + .../list_resource_pool_snapshots_response.py | 100 + .../models/list_resource_pools_response.py | 100 + .../list_share_export_policies_response.py | 100 + .../sfs/models/list_shares_response.py | 92 + .../list_snapshot_schedules_response.py | 100 + .../stackit/sfs/models/performance_class.py | 85 + .../src/stackit/sfs/models/resource_pool.py | 228 + .../models/resource_pool_performance_class.py | 89 + .../sfs/models/resource_pool_snapshot.py | 136 + .../stackit/sfs/models/resource_pool_space.py | 115 + services/sfs/src/stackit/sfs/models/share.py | 151 + .../stackit/sfs/models/share_export_policy.py | 131 + .../sfs/models/share_export_policy_rule.py | 153 + .../stackit/sfs/models/snapshot_schedule.py | 85 + services/sfs/src/stackit/sfs/models/status.py | 111 + .../models/update_resource_pool_payload.py | 133 + .../models/update_resource_pool_response.py | 94 + .../update_share_export_policy_body_rule.py | 124 + .../update_share_export_policy_payload.py | 113 + .../update_share_export_policy_response.py | 94 + .../sfs/models/update_share_payload.py | 110 + .../sfs/models/update_share_response.py | 86 + .../stackit/sfs/models/validation_error.py | 113 + .../sfs/models/validation_error_field.py | 84 + services/sfs/src/stackit/sfs/py.typed | 0 services/sfs/src/stackit/sfs/rest.py | 148 + 53 files changed, 13478 insertions(+) create mode 100644 services/sfs/README.md create mode 100644 services/sfs/poetry.lock create mode 100644 services/sfs/pyproject.toml create mode 100644 services/sfs/src/stackit/sfs/__init__.py create mode 100644 services/sfs/src/stackit/sfs/api/__init__.py create mode 100644 services/sfs/src/stackit/sfs/api/default_api.py create mode 100644 services/sfs/src/stackit/sfs/api_client.py create mode 100644 services/sfs/src/stackit/sfs/api_response.py create mode 100644 services/sfs/src/stackit/sfs/configuration.py create mode 100644 services/sfs/src/stackit/sfs/exceptions.py create mode 100644 services/sfs/src/stackit/sfs/models/__init__.py create mode 100644 services/sfs/src/stackit/sfs/models/create_resource_pool_payload.py create mode 100644 services/sfs/src/stackit/sfs/models/create_resource_pool_response.py create mode 100644 services/sfs/src/stackit/sfs/models/create_resource_pool_snapshot_payload.py create mode 100644 services/sfs/src/stackit/sfs/models/create_resource_pool_snapshot_response.py create mode 100644 services/sfs/src/stackit/sfs/models/create_share_export_policy_payload.py create mode 100644 services/sfs/src/stackit/sfs/models/create_share_export_policy_request_rule.py create mode 100644 services/sfs/src/stackit/sfs/models/create_share_export_policy_response.py create mode 100644 services/sfs/src/stackit/sfs/models/create_share_payload.py create mode 100644 services/sfs/src/stackit/sfs/models/create_share_response.py create mode 100644 services/sfs/src/stackit/sfs/models/error.py create mode 100644 services/sfs/src/stackit/sfs/models/get_resource_pool_response.py create mode 100644 services/sfs/src/stackit/sfs/models/get_resource_pool_snapshot_response.py create mode 100644 services/sfs/src/stackit/sfs/models/get_share_export_policy_response.py create mode 100644 services/sfs/src/stackit/sfs/models/get_share_response.py create mode 100644 services/sfs/src/stackit/sfs/models/google_protobuf_any.py create mode 100644 services/sfs/src/stackit/sfs/models/list_performance_classes_response.py create mode 100644 services/sfs/src/stackit/sfs/models/list_resource_pool_snapshots_response.py create mode 100644 services/sfs/src/stackit/sfs/models/list_resource_pools_response.py create mode 100644 services/sfs/src/stackit/sfs/models/list_share_export_policies_response.py create mode 100644 services/sfs/src/stackit/sfs/models/list_shares_response.py create mode 100644 services/sfs/src/stackit/sfs/models/list_snapshot_schedules_response.py create mode 100644 services/sfs/src/stackit/sfs/models/performance_class.py create mode 100644 services/sfs/src/stackit/sfs/models/resource_pool.py create mode 100644 services/sfs/src/stackit/sfs/models/resource_pool_performance_class.py create mode 100644 services/sfs/src/stackit/sfs/models/resource_pool_snapshot.py create mode 100644 services/sfs/src/stackit/sfs/models/resource_pool_space.py create mode 100644 services/sfs/src/stackit/sfs/models/share.py create mode 100644 services/sfs/src/stackit/sfs/models/share_export_policy.py create mode 100644 services/sfs/src/stackit/sfs/models/share_export_policy_rule.py create mode 100644 services/sfs/src/stackit/sfs/models/snapshot_schedule.py create mode 100644 services/sfs/src/stackit/sfs/models/status.py create mode 100644 services/sfs/src/stackit/sfs/models/update_resource_pool_payload.py create mode 100644 services/sfs/src/stackit/sfs/models/update_resource_pool_response.py create mode 100644 services/sfs/src/stackit/sfs/models/update_share_export_policy_body_rule.py create mode 100644 services/sfs/src/stackit/sfs/models/update_share_export_policy_payload.py create mode 100644 services/sfs/src/stackit/sfs/models/update_share_export_policy_response.py create mode 100644 services/sfs/src/stackit/sfs/models/update_share_payload.py create mode 100644 services/sfs/src/stackit/sfs/models/update_share_response.py create mode 100644 services/sfs/src/stackit/sfs/models/validation_error.py create mode 100644 services/sfs/src/stackit/sfs/models/validation_error_field.py create mode 100644 services/sfs/src/stackit/sfs/py.typed create mode 100644 services/sfs/src/stackit/sfs/rest.py diff --git a/services/sfs/README.md b/services/sfs/README.md new file mode 100644 index 000000000..7180a91bb --- /dev/null +++ b/services/sfs/README.md @@ -0,0 +1,22 @@ +# stackit.sfs +API used to create and manage NFS Shares. + + +This package is part of the STACKIT Python SDK. For additional information, please visit the [GitHub repository](https://github.com/stackitcloud/stackit-sdk-python) of the SDK. + + +## Installation & Usage +### pip install + +```sh +pip install stackit-sfs +``` + +Then import the package: +```python +import stackit.sfs +``` + +## Getting Started + +[Examples](https://github.com/stackitcloud/stackit-sdk-python/tree/main/examples) for the usage of the package can be found in the [GitHub repository](https://github.com/stackitcloud/stackit-sdk-python) of the SDK. \ No newline at end of file diff --git a/services/sfs/poetry.lock b/services/sfs/poetry.lock new file mode 100644 index 000000000..53937e512 --- /dev/null +++ b/services/sfs/poetry.lock @@ -0,0 +1,1778 @@ +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "attrs" +version = "25.4.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, +] + +[[package]] +name = "autoflake" +version = "2.3.1" +description = "Removes unused imports and unused variables" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840"}, + {file = "autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e"}, +] + +[package.dependencies] +pyflakes = ">=3.0.0" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[[package]] +name = "autoimport" +version = "1.6.1" +description = "Autoimport missing python libraries." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "autoimport-1.6.1-py3-none-any.whl", hash = "sha256:8f9914576b07d408f2303783fe4fe85b8f9ef48f3771563810f4ea4a42c72af5"}, + {file = "autoimport-1.6.1.tar.gz", hash = "sha256:0c8b2656209a003fc7e2b6805c5e91770651cbe1e60edf1aaa14beae2405ae26"}, +] + +[package.dependencies] +autoflake = ">=1.4" +click = ">=8.1.3" +maison = ">=1.4.0,<2.0.0" +pyprojroot = ">=0.2.0" +sh = ">=1.14.2" +xdg = ">=6.0.0" + +[[package]] +name = "bandit" +version = "1.8.6" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "bandit-1.8.6-py3-none-any.whl", hash = "sha256:3348e934d736fcdb68b6aa4030487097e23a501adf3e7827b63658df464dddd0"}, + {file = "bandit-1.8.6.tar.gz", hash = "sha256:dbfe9c25fc6961c2078593de55fd19f2559f9e45b99f1272341f5b95dea4e56b"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0) ; python_version < \"3.11\""] +yaml = ["PyYAML"] + +[[package]] +name = "bandit" +version = "1.9.2" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "bandit-1.9.2-py3-none-any.whl", hash = "sha256:bda8d68610fc33a6e10b7a8f1d61d92c8f6c004051d5e946406be1fb1b16a868"}, + {file = "bandit-1.9.2.tar.gz", hash = "sha256:32410415cd93bf9c8b91972159d5cf1e7f063a9146d70345641cd3877de348ce"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0) ; python_version < \"3.11\""] +yaml = ["PyYAML"] + +[[package]] +name = "black" +version = "25.11.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "black-25.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ec311e22458eec32a807f029b2646f661e6859c3f61bc6d9ffb67958779f392e"}, + {file = "black-25.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1032639c90208c15711334d681de2e24821af0575573db2810b0763bcd62e0f0"}, + {file = "black-25.11.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0c0f7c461df55cf32929b002335883946a4893d759f2df343389c4396f3b6b37"}, + {file = "black-25.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:f9786c24d8e9bd5f20dc7a7f0cdd742644656987f6ea6947629306f937726c03"}, + {file = "black-25.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:895571922a35434a9d8ca67ef926da6bc9ad464522a5fe0db99b394ef1c0675a"}, + {file = "black-25.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb4f4b65d717062191bdec8e4a442539a8ea065e6af1c4f4d36f0cdb5f71e170"}, + {file = "black-25.11.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d81a44cbc7e4f73a9d6ae449ec2317ad81512d1e7dce7d57f6333fd6259737bc"}, + {file = "black-25.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:7eebd4744dfe92ef1ee349dc532defbf012a88b087bb7ddd688ff59a447b080e"}, + {file = "black-25.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:80e7486ad3535636657aa180ad32a7d67d7c273a80e12f1b4bfa0823d54e8fac"}, + {file = "black-25.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cced12b747c4c76bc09b4db057c319d8545307266f41aaee665540bc0e04e96"}, + {file = "black-25.11.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb2d54a39e0ef021d6c5eef442e10fd71fcb491be6413d083a320ee768329dd"}, + {file = "black-25.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae263af2f496940438e5be1a0c1020e13b09154f3af4df0835ea7f9fe7bfa409"}, + {file = "black-25.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0a1d40348b6621cc20d3d7530a5b8d67e9714906dfd7346338249ad9c6cedf2b"}, + {file = "black-25.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:51c65d7d60bb25429ea2bf0731c32b2a2442eb4bd3b2afcb47830f0b13e58bfd"}, + {file = "black-25.11.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:936c4dd07669269f40b497440159a221ee435e3fddcf668e0c05244a9be71993"}, + {file = "black-25.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:f42c0ea7f59994490f4dccd64e6b2dd49ac57c7c84f38b8faab50f8759db245c"}, + {file = "black-25.11.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:35690a383f22dd3e468c85dc4b915217f87667ad9cce781d7b42678ce63c4170"}, + {file = "black-25.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:dae49ef7369c6caa1a1833fd5efb7c3024bb7e4499bf64833f65ad27791b1545"}, + {file = "black-25.11.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bd4a22a0b37401c8e492e994bce79e614f91b14d9ea911f44f36e262195fdda"}, + {file = "black-25.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:aa211411e94fdf86519996b7f5f05e71ba34835d8f0c0f03c00a26271da02664"}, + {file = "black-25.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3bb5ce32daa9ff0605d73b6f19da0b0e6c1f8f2d75594db539fdfed722f2b06"}, + {file = "black-25.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9815ccee1e55717fe9a4b924cae1646ef7f54e0f990da39a34fc7b264fcf80a2"}, + {file = "black-25.11.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92285c37b93a1698dcbc34581867b480f1ba3a7b92acf1fe0467b04d7a4da0dc"}, + {file = "black-25.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:43945853a31099c7c0ff8dface53b4de56c41294fa6783c0441a8b1d9bf668bc"}, + {file = "black-25.11.0-py3-none-any.whl", hash = "sha256:e3f562da087791e96cefcd9dda058380a442ab322a02e222add53736451f604b"}, + {file = "black-25.11.0.tar.gz", hash = "sha256:9a323ac32f5dc75ce7470501b887250be5005a01602e931a15e45593f70f6e08"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +pytokens = ">=0.3.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "black" +version = "25.12.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "black-25.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f85ba1ad15d446756b4ab5f3044731bf68b777f8f9ac9cdabd2425b97cd9c4e8"}, + {file = "black-25.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:546eecfe9a3a6b46f9d69d8a642585a6eaf348bcbbc4d87a19635570e02d9f4a"}, + {file = "black-25.12.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17dcc893da8d73d8f74a596f64b7c98ef5239c2cd2b053c0f25912c4494bf9ea"}, + {file = "black-25.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:09524b0e6af8ba7a3ffabdfc7a9922fb9adef60fed008c7cd2fc01f3048e6e6f"}, + {file = "black-25.12.0-cp310-cp310-win_arm64.whl", hash = "sha256:b162653ed89eb942758efeb29d5e333ca5bb90e5130216f8369857db5955a7da"}, + {file = "black-25.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0cfa263e85caea2cff57d8f917f9f51adae8e20b610e2b23de35b5b11ce691a"}, + {file = "black-25.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a2f578ae20c19c50a382286ba78bfbeafdf788579b053d8e4980afb079ab9be"}, + {file = "black-25.12.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e1b65634b0e471d07ff86ec338819e2ef860689859ef4501ab7ac290431f9b"}, + {file = "black-25.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a3fa71e3b8dd9f7c6ac4d818345237dfb4175ed3bf37cd5a581dbc4c034f1ec5"}, + {file = "black-25.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:51e267458f7e650afed8445dc7edb3187143003d52a1b710c7321aef22aa9655"}, + {file = "black-25.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31f96b7c98c1ddaeb07dc0f56c652e25bdedaac76d5b68a059d998b57c55594a"}, + {file = "black-25.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05dd459a19e218078a1f98178c13f861fe6a9a5f88fc969ca4d9b49eb1809783"}, + {file = "black-25.12.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1f68c5eff61f226934be6b5b80296cf6939e5d2f0c2f7d543ea08b204bfaf59"}, + {file = "black-25.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:274f940c147ddab4442d316b27f9e332ca586d39c85ecf59ebdea82cc9ee8892"}, + {file = "black-25.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:169506ba91ef21e2e0591563deda7f00030cb466e747c4b09cb0a9dae5db2f43"}, + {file = "black-25.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a05ddeb656534c3e27a05a29196c962877c83fa5503db89e68857d1161ad08a5"}, + {file = "black-25.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ec77439ef3e34896995503865a85732c94396edcc739f302c5673a2315e1e7f"}, + {file = "black-25.12.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e509c858adf63aa61d908061b52e580c40eae0dfa72415fa47ac01b12e29baf"}, + {file = "black-25.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:252678f07f5bac4ff0d0e9b261fbb029fa530cfa206d0a636a34ab445ef8ca9d"}, + {file = "black-25.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bc5b1c09fe3c931ddd20ee548511c64ebf964ada7e6f0763d443947fd1c603ce"}, + {file = "black-25.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0a0953b134f9335c2434864a643c842c44fba562155c738a2a37a4d61f00cad5"}, + {file = "black-25.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2355bbb6c3b76062870942d8cc450d4f8ac71f9c93c40122762c8784df49543f"}, + {file = "black-25.12.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9678bd991cc793e81d19aeeae57966ee02909877cb65838ccffef24c3ebac08f"}, + {file = "black-25.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:97596189949a8aad13ad12fcbb4ae89330039b96ad6742e6f6b45e75ad5cfd83"}, + {file = "black-25.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:778285d9ea197f34704e3791ea9404cd6d07595745907dd2ce3da7a13627b29b"}, + {file = "black-25.12.0-py3-none-any.whl", hash = "sha256:48ceb36c16dbc84062740049eef990bb2ce07598272e673c17d1a7720c71c828"}, + {file = "black-25.12.0.tar.gz", hash = "sha256:8d3dd9cea14bff7ddc0eb243c811cdb1a011ebb4800a5f0335a01a68654796a7"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +pytokens = ">=0.3.0" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2025.11.12" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, +] + +[[package]] +name = "cffi" +version = "2.0.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, +] + +[package.dependencies] +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click" +version = "8.3.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "43.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.12\"" +files = [ + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cryptography" +version = "46.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["main"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926"}, + {file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71"}, + {file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac"}, + {file = "cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018"}, + {file = "cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb"}, + {file = "cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c"}, + {file = "cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3"}, + {file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20"}, + {file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de"}, + {file = "cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914"}, + {file = "cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db"}, + {file = "cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21"}, + {file = "cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506"}, + {file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963"}, + {file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4"}, + {file = "cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df"}, + {file = "cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f"}, + {file = "cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372"}, + {file = "cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32"}, + {file = "cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c"}, + {file = "cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1"}, +] + +[package.dependencies] +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "eradicate" +version = "2.3.0" +description = "Removes commented-out code." +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "eradicate-2.3.0-py3-none-any.whl", hash = "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e"}, + {file = "eradicate-2.3.0.tar.gz", hash = "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "flake8" +version = "7.3.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"}, + {file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.14.0,<2.15.0" +pyflakes = ">=3.4.0,<3.5.0" + +[[package]] +name = "flake8-bandit" +version = "4.1.1" +description = "Automated security testing with bandit and flake8." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, + {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, +] + +[package.dependencies] +bandit = ">=1.7.3" +flake8 = ">=5.0.0" + +[[package]] +name = "flake8-black" +version = "0.4.0" +description = "flake8 plugin to call black as a code style validator" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "flake8_black-0.4.0-py3-none-any.whl", hash = "sha256:288762d0c9ea065782d87eeecbcc20c69079d17fe1d0f0445f0eb0b0ffb80c39"}, + {file = "flake8_black-0.4.0.tar.gz", hash = "sha256:bf226868f695dee48d55ff6d7747e900709bfd6f605b7a378c70e711e3fc26cb"}, +] + +[package.dependencies] +black = ">=22.1.0" +flake8 = ">=3" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +develop = ["build", "twine"] + +[[package]] +name = "flake8-bugbear" +version = "24.12.12" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +optional = false +python-versions = ">=3.8.1" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "flake8_bugbear-24.12.12-py3-none-any.whl", hash = "sha256:1b6967436f65ca22a42e5373aaa6f2d87966ade9aa38d4baf2a1be550767545e"}, + {file = "flake8_bugbear-24.12.12.tar.gz", hash = "sha256:46273cef0a6b6ff48ca2d69e472f41420a42a46e24b2a8972e4f0d6733d12a64"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +flake8 = ">=6.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] + +[[package]] +name = "flake8-bugbear" +version = "25.11.29" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "flake8_bugbear-25.11.29-py3-none-any.whl", hash = "sha256:9bf15e2970e736d2340da4c0a70493db964061c9c38f708cfe1f7b2d87392298"}, + {file = "flake8_bugbear-25.11.29.tar.gz", hash = "sha256:b5d06710f3d26e595541ad303ad4d5cb52578bd4bccbb2c2c0b2c72e243dafc8"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +flake8 = ">=7.2.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] + +[[package]] +name = "flake8-eol" +version = "0.0.8" +description = "Flake8 plugin to enforce Unix/Linux EOL consistency" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "flake8-eol-0.0.8.tar.gz", hash = "sha256:a12d836aef941de299984d6c7c4be58ab5d0953507bc68da3e15f09fee0a5e66"}, + {file = "flake8_eol-0.0.8-py3-none-any.whl", hash = "sha256:85759779b32250385cef6d97dd98abae61ec3619f93725857393f948702f06a5"}, +] + +[package.dependencies] +flake8 = ">=3.7.0" + +[[package]] +name = "flake8-eradicate" +version = "1.5.0" +description = "Flake8 plugin to find commented out code" +optional = false +python-versions = ">=3.8,<4.0" +groups = ["dev"] +files = [ + {file = "flake8_eradicate-1.5.0-py3-none-any.whl", hash = "sha256:18acc922ad7de623f5247c7d5595da068525ec5437dd53b22ec2259b96ce9d22"}, + {file = "flake8_eradicate-1.5.0.tar.gz", hash = "sha256:aee636cb9ecb5594a7cd92d67ad73eb69909e5cc7bd81710cf9d00970f3983a6"}, +] + +[package.dependencies] +attrs = "*" +eradicate = ">=2.0,<3.0" +flake8 = ">5" + +[[package]] +name = "flake8-pyproject" +version = "1.2.4" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "flake8_pyproject-1.2.4-py3-none-any.whl", hash = "sha256:ea34c057f9a9329c76d98723bb2bb498cc6ba8ff9872c4d19932d48c91249a77"}, +] + +[package.dependencies] +Flake8 = ">=5" +TOMLi = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["Flit (>=3.4)", "pyTest (>=7)", "pyTest-cov (>=7) ; python_version >= \"3.10\""] + +[[package]] +name = "flake8-quotes" +version = "3.4.0" +description = "Flake8 lint for quotes." +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"}, +] + +[package.dependencies] +flake8 = "*" +setuptools = "*" + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version == \"3.9\"" +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + +[[package]] +name = "isort" +version = "6.1.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.9.0" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "isort-6.1.0-py3-none-any.whl", hash = "sha256:58d8927ecce74e5087aef019f778d4081a3b6c98f15a80ba35782ca8a2097784"}, + {file = "isort-6.1.0.tar.gz", hash = "sha256:9b8f96a14cfee0677e78e941ff62f03769a06d412aabb9e2a90487b3b7e8d481"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.6.0", markers = "python_version < \"3.10\""} + +[package.extras] +colors = ["colorama"] +plugins = ["setuptools"] + +[[package]] +name = "isort" +version = "7.0.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.10.0" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1"}, + {file = "isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187"}, +] + +[package.extras] +colors = ["colorama"] +plugins = ["setuptools"] + +[[package]] +name = "maison" +version = "1.4.3" +description = "Read settings from config files" +optional = false +python-versions = ">=3.7.1,<4.0.0" +groups = ["dev"] +files = [ + {file = "maison-1.4.3-py3-none-any.whl", hash = "sha256:a36208d0befb3bd8aa3b002ac198ce6f6e61efe568b195132640f4032eff46ac"}, + {file = "maison-1.4.3.tar.gz", hash = "sha256:766222ce82ae27138256c4af9d0bc6b3226288349601e095dcc567884cf0ce36"}, +] + +[package.dependencies] +click = ">=8.0.1,<9.0.0" +pydantic = ">=2.5.3,<3.0.0" +toml = ">=0.10.2,<0.11.0" + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"}, + {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "markdown-it-pyrs", "mistletoe (>=1.0,<2.0)", "mistune (>=3.0,<4.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins (>=0.5.0)"] +profiling = ["gprof2dot"] +rtd = ["ipykernel", "jupyter_sphinx", "mdit-py-plugins (>=0.5.0)", "myst-parser", "pyyaml", "sphinx", "sphinx-book-theme (>=1.0,<2.0)", "sphinx-copybutton", "sphinx-design"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions", "requests"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, + {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "platformdirs" +version = "4.5.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31"}, + {file = "platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda"}, +] + +[package.extras] +docs = ["furo (>=2025.9.25)", "proselint (>=0.14)", "sphinx (>=8.2.3)", "sphinx-autodoc-typehints (>=3.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.4.2)", "pytest-cov (>=7)", "pytest-mock (>=3.15.1)"] +type = ["mypy (>=1.18.2)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pycodestyle" +version = "2.14.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"}, + {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"}, +] + +[[package]] +name = "pycparser" +version = "2.23" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" +files = [ + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" + +[[package]] +name = "pyflakes" +version = "3.4.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"}, + {file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"}, +] + +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyjwt" +version = "2.10.1" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyprojroot" +version = "0.3.0" +description = "Project-oriented workflow in Python" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "pyprojroot-0.3.0-py3-none-any.whl", hash = "sha256:c426b51b17ab4f4d4f95b479cf5b6c22df59bb58fbd4f01b37a6977d29b99888"}, + {file = "pyprojroot-0.3.0.tar.gz", hash = "sha256:109705bb790968704958efcfc5ccce85d8e3dafa054897cc81371fcbbf56cb10"}, +] + +[package.dependencies] +typing-extensions = "*" + +[[package]] +name = "pytest" +version = "8.4.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, + {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1" +packaging = ">=20" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest" +version = "9.0.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b"}, + {file = "pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1.0.1" +packaging = ">=22" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytokens" +version = "0.3.0" +description = "A Fast, spec compliant Python 3.14+ tokenizer that runs on older Pythons." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytokens-0.3.0-py3-none-any.whl", hash = "sha256:95b2b5eaf832e469d141a378872480ede3f251a5a5041b8ec6e581d3ac71bbf3"}, + {file = "pytokens-0.3.0.tar.gz", hash = "sha256:2f932b14ed08de5fcf0b391ace2642f858f1394c0857202959000b68ed7a458a"}, +] + +[package.extras] +dev = ["black", "build", "mypy", "pytest", "pytest-cov", "setuptools", "tox", "twine", "wheel"] + +[[package]] +name = "pyyaml" +version = "6.0.3" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, +] + +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "14.2.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["dev"] +files = [ + {file = "rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd"}, + {file = "rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "setuptools" +version = "80.9.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "sh" +version = "2.2.2" +description = "Python subprocess replacement" +optional = false +python-versions = "<4.0,>=3.8.1" +groups = ["dev"] +files = [ + {file = "sh-2.2.2-py3-none-any.whl", hash = "sha256:e0b15b4ae8ffcd399bc8ffddcbd770a43c7a70a24b16773fbb34c001ad5d52af"}, + {file = "sh-2.2.2.tar.gz", hash = "sha256:653227a7c41a284ec5302173fbc044ee817c7bad5e6e4d8d55741b9aeb9eb65b"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "stackit-core" +version = "0.2.0" +description = "Core functionality for the STACKIT SDK for Python" +optional = false +python-versions = "<4.0,>=3.8" +groups = ["main"] +files = [ + {file = "stackit_core-0.2.0-py3-none-any.whl", hash = "sha256:04632fc6742790d08ddfcb7f2313e04d1254827397a80250f838a2f81b92645b"}, + {file = "stackit_core-0.2.0.tar.gz", hash = "sha256:b8af91877cdb060d6969a303d8cf20bc0b33b345afd91f679c44a987381e2d47"}, +] + +[package.dependencies] +cryptography = ">=43.0.1" +pydantic = ">=2.9.2" +pyjwt = ">=2.9.0" +requests = ">=2.32.3" +urllib3 = ">=2.2.3" + +[[package]] +name = "stevedore" +version = "5.5.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.12\"" +files = [ + {file = "stevedore-5.5.0-py3-none-any.whl", hash = "sha256:18363d4d268181e8e8452e71a38cd77630f345b2ef6b4a8d5614dac5ee0d18cf"}, + {file = "stevedore-5.5.0.tar.gz", hash = "sha256:d31496a4f4df9825e1a1e4f1f74d19abb0154aff311c3b376fcc89dae8fccd73"}, +] + +[[package]] +name = "stevedore" +version = "5.6.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820"}, + {file = "stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["dev"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.3.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "urllib3" +version = "2.6.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd"}, + {file = "urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797"}, +] + +[package.extras] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] + +[[package]] +name = "xdg" +version = "6.0.0" +description = "Variables defined by the XDG Base Directory Specification" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["dev"] +files = [ + {file = "xdg-6.0.0-py3-none-any.whl", hash = "sha256:df3510755b4395157fc04fc3b02467c777f3b3ca383257397f09ab0d4c16f936"}, + {file = "xdg-6.0.0.tar.gz", hash = "sha256:24278094f2d45e846d1eb28a2ebb92d7b67fc0cab5249ee3ce88c95f649a1c92"}, +] + +[[package]] +name = "zipp" +version = "3.23.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version == \"3.9\"" +files = [ + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.1" +python-versions = "^3.9" +content-hash = "65876db4be553e6b9cf53559da239c4ab32740cd8fd7ba995c30274c0bf41dde" diff --git a/services/sfs/pyproject.toml b/services/sfs/pyproject.toml new file mode 100644 index 000000000..e1fa95084 --- /dev/null +++ b/services/sfs/pyproject.toml @@ -0,0 +1,99 @@ +[project] +name = "stackit-sfs" + +[tool.poetry] +name = "stackit-sfs" +version = "v0.0.1a" +authors = [ + "STACKIT Developer Tools ", +] +description = "STACKIT File Storage (SFS)" +readme = "README.md" +#license = "NoLicense" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", +] +packages = [ + { include = "stackit", from="src" } +] + +[tool.poetry.dependencies] +python = "^3.9" +stackit-core = ">=0.0.1a" +requests = ">=2.32.3" +pydantic = ">=2.9.2" +python-dateutil = ">=2.9.0.post0" + +[tool.poetry.group.dev.dependencies] +black = ">=24.8.0" +pytest = ">=8.3.3" +flake8 = [ + { version= ">=5.0.3", python="<3.12"}, + { version= ">=6.0.1", python=">=3.12"} +] +flake8-black = ">=0.3.6" +flake8-pyproject = ">=1.2.3" +autoimport = ">=1.6.1" +flake8-eol = ">=0.0.8" +flake8-eradicate = ">=1.5.0" +flake8-bandit = ">=4.1.1" +flake8-bugbear = ">=23.1.14" +flake8-quotes = ">=3.4.0" +isort = ">=5.13.2" + +[project.urls] +Homepage = "https://github.com/stackitcloud/stackit-sdk-python" +Issues = "https://github.com/stackitcloud/stackit-sdk-python/issues" + +[build-system] +requires = ["setuptools", "poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +pythonpath = [ + "src" +] +testpaths = [ + "tests" +] + +[tool.black] +line-length = 120 +exclude = """ +/( + .eggs + | .git + | .hg + | .mypy_cache + | .nox + | .pants.d + | .tox + | .venv + | _build + | buck-out + | build + | dist + | node_modules + | venv +)/ +""" + +[tool.isort] +profile = 'black' + +[tool.flake8] +exclude= [".eggs", ".git", ".hg", ".mypy_cache", ".tox", ".venv", ".devcontainer", "venv", "_build", "buck-out", "build", "dist"] +statistics = true +show-source = false +max-line-length = 120 +# E203,W503 and E704 are incompatible with the formatter black +# W291 needs to be disabled because some doc-strings get generated with trailing whitespace but black won't re-format comments +ignore = ["E203", "W503", "E704", "W291"] +inline-quotes = '"' +docstring-quotes = '"""' +multiline-quotes = '"""' +ban-relative-imports = true +# Exclude generated code +extend-exclude = [ "src/stackit/*/models/*", "src/stackit/*/api/*", "src/stackit/*/*.py" ] \ No newline at end of file diff --git a/services/sfs/src/stackit/sfs/__init__.py b/services/sfs/src/stackit/sfs/__init__.py new file mode 100644 index 000000000..b0a80fc97 --- /dev/null +++ b/services/sfs/src/stackit/sfs/__init__.py @@ -0,0 +1,191 @@ +# coding: utf-8 + +# flake8: noqa + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +__version__ = "1.0.0" + +# Define package exports +__all__ = [ + "DefaultApi", + "ApiResponse", + "ApiClient", + "HostConfiguration", + "OpenApiException", + "ApiTypeError", + "ApiValueError", + "ApiKeyError", + "ApiAttributeError", + "ApiException", + "CreateResourcePoolPayload", + "CreateResourcePoolResponse", + "CreateResourcePoolSnapshotPayload", + "CreateResourcePoolSnapshotResponse", + "CreateShareExportPolicyPayload", + "CreateShareExportPolicyRequestRule", + "CreateShareExportPolicyResponse", + "CreateSharePayload", + "CreateShareResponse", + "Error", + "GetResourcePoolResponse", + "GetResourcePoolSnapshotResponse", + "GetShareExportPolicyResponse", + "GetShareResponse", + "GoogleProtobufAny", + "ListPerformanceClassesResponse", + "ListResourcePoolSnapshotsResponse", + "ListResourcePoolsResponse", + "ListShareExportPoliciesResponse", + "ListSharesResponse", + "ListSnapshotSchedulesResponse", + "PerformanceClass", + "ResourcePool", + "ResourcePoolPerformanceClass", + "ResourcePoolSnapshot", + "ResourcePoolSpace", + "Share", + "ShareExportPolicy", + "ShareExportPolicyRule", + "SnapshotSchedule", + "Status", + "UpdateResourcePoolPayload", + "UpdateResourcePoolResponse", + "UpdateShareExportPolicyBodyRule", + "UpdateShareExportPolicyPayload", + "UpdateShareExportPolicyResponse", + "UpdateSharePayload", + "UpdateShareResponse", + "ValidationError", + "ValidationErrorField", +] + +# import apis into sdk package +from stackit.sfs.api.default_api import DefaultApi as DefaultApi +from stackit.sfs.api_client import ApiClient as ApiClient + +# import ApiClient +from stackit.sfs.api_response import ApiResponse as ApiResponse +from stackit.sfs.configuration import HostConfiguration as HostConfiguration +from stackit.sfs.exceptions import ApiAttributeError as ApiAttributeError +from stackit.sfs.exceptions import ApiException as ApiException +from stackit.sfs.exceptions import ApiKeyError as ApiKeyError +from stackit.sfs.exceptions import ApiTypeError as ApiTypeError +from stackit.sfs.exceptions import ApiValueError as ApiValueError +from stackit.sfs.exceptions import OpenApiException as OpenApiException + +# import models into sdk package +from stackit.sfs.models.create_resource_pool_payload import ( + CreateResourcePoolPayload as CreateResourcePoolPayload, +) +from stackit.sfs.models.create_resource_pool_response import ( + CreateResourcePoolResponse as CreateResourcePoolResponse, +) +from stackit.sfs.models.create_resource_pool_snapshot_payload import ( + CreateResourcePoolSnapshotPayload as CreateResourcePoolSnapshotPayload, +) +from stackit.sfs.models.create_resource_pool_snapshot_response import ( + CreateResourcePoolSnapshotResponse as CreateResourcePoolSnapshotResponse, +) +from stackit.sfs.models.create_share_export_policy_payload import ( + CreateShareExportPolicyPayload as CreateShareExportPolicyPayload, +) +from stackit.sfs.models.create_share_export_policy_request_rule import ( + CreateShareExportPolicyRequestRule as CreateShareExportPolicyRequestRule, +) +from stackit.sfs.models.create_share_export_policy_response import ( + CreateShareExportPolicyResponse as CreateShareExportPolicyResponse, +) +from stackit.sfs.models.create_share_payload import ( + CreateSharePayload as CreateSharePayload, +) +from stackit.sfs.models.create_share_response import ( + CreateShareResponse as CreateShareResponse, +) +from stackit.sfs.models.error import Error as Error +from stackit.sfs.models.get_resource_pool_response import ( + GetResourcePoolResponse as GetResourcePoolResponse, +) +from stackit.sfs.models.get_resource_pool_snapshot_response import ( + GetResourcePoolSnapshotResponse as GetResourcePoolSnapshotResponse, +) +from stackit.sfs.models.get_share_export_policy_response import ( + GetShareExportPolicyResponse as GetShareExportPolicyResponse, +) +from stackit.sfs.models.get_share_response import GetShareResponse as GetShareResponse +from stackit.sfs.models.google_protobuf_any import ( + GoogleProtobufAny as GoogleProtobufAny, +) +from stackit.sfs.models.list_performance_classes_response import ( + ListPerformanceClassesResponse as ListPerformanceClassesResponse, +) +from stackit.sfs.models.list_resource_pool_snapshots_response import ( + ListResourcePoolSnapshotsResponse as ListResourcePoolSnapshotsResponse, +) +from stackit.sfs.models.list_resource_pools_response import ( + ListResourcePoolsResponse as ListResourcePoolsResponse, +) +from stackit.sfs.models.list_share_export_policies_response import ( + ListShareExportPoliciesResponse as ListShareExportPoliciesResponse, +) +from stackit.sfs.models.list_shares_response import ( + ListSharesResponse as ListSharesResponse, +) +from stackit.sfs.models.list_snapshot_schedules_response import ( + ListSnapshotSchedulesResponse as ListSnapshotSchedulesResponse, +) +from stackit.sfs.models.performance_class import PerformanceClass as PerformanceClass +from stackit.sfs.models.resource_pool import ResourcePool as ResourcePool +from stackit.sfs.models.resource_pool_performance_class import ( + ResourcePoolPerformanceClass as ResourcePoolPerformanceClass, +) +from stackit.sfs.models.resource_pool_snapshot import ( + ResourcePoolSnapshot as ResourcePoolSnapshot, +) +from stackit.sfs.models.resource_pool_space import ( + ResourcePoolSpace as ResourcePoolSpace, +) +from stackit.sfs.models.share import Share as Share +from stackit.sfs.models.share_export_policy import ( + ShareExportPolicy as ShareExportPolicy, +) +from stackit.sfs.models.share_export_policy_rule import ( + ShareExportPolicyRule as ShareExportPolicyRule, +) +from stackit.sfs.models.snapshot_schedule import SnapshotSchedule as SnapshotSchedule +from stackit.sfs.models.status import Status as Status +from stackit.sfs.models.update_resource_pool_payload import ( + UpdateResourcePoolPayload as UpdateResourcePoolPayload, +) +from stackit.sfs.models.update_resource_pool_response import ( + UpdateResourcePoolResponse as UpdateResourcePoolResponse, +) +from stackit.sfs.models.update_share_export_policy_body_rule import ( + UpdateShareExportPolicyBodyRule as UpdateShareExportPolicyBodyRule, +) +from stackit.sfs.models.update_share_export_policy_payload import ( + UpdateShareExportPolicyPayload as UpdateShareExportPolicyPayload, +) +from stackit.sfs.models.update_share_export_policy_response import ( + UpdateShareExportPolicyResponse as UpdateShareExportPolicyResponse, +) +from stackit.sfs.models.update_share_payload import ( + UpdateSharePayload as UpdateSharePayload, +) +from stackit.sfs.models.update_share_response import ( + UpdateShareResponse as UpdateShareResponse, +) +from stackit.sfs.models.validation_error import ValidationError as ValidationError +from stackit.sfs.models.validation_error_field import ( + ValidationErrorField as ValidationErrorField, +) diff --git a/services/sfs/src/stackit/sfs/api/__init__.py b/services/sfs/src/stackit/sfs/api/__init__.py new file mode 100644 index 000000000..60abe98b8 --- /dev/null +++ b/services/sfs/src/stackit/sfs/api/__init__.py @@ -0,0 +1,4 @@ +# flake8: noqa + +# import apis into api package +from stackit.sfs.api.default_api import DefaultApi diff --git a/services/sfs/src/stackit/sfs/api/default_api.py b/services/sfs/src/stackit/sfs/api/default_api.py new file mode 100644 index 000000000..f972f47f3 --- /dev/null +++ b/services/sfs/src/stackit/sfs/api/default_api.py @@ -0,0 +1,5783 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from typing import Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call +from stackit.core.configuration import Configuration +from typing_extensions import Annotated + +from stackit.sfs.api_client import ApiClient, RequestSerialized +from stackit.sfs.api_response import ApiResponse +from stackit.sfs.models.create_resource_pool_payload import CreateResourcePoolPayload +from stackit.sfs.models.create_resource_pool_response import CreateResourcePoolResponse +from stackit.sfs.models.create_resource_pool_snapshot_payload import ( + CreateResourcePoolSnapshotPayload, +) +from stackit.sfs.models.create_resource_pool_snapshot_response import ( + CreateResourcePoolSnapshotResponse, +) +from stackit.sfs.models.create_share_export_policy_payload import ( + CreateShareExportPolicyPayload, +) +from stackit.sfs.models.create_share_export_policy_response import ( + CreateShareExportPolicyResponse, +) +from stackit.sfs.models.create_share_payload import CreateSharePayload +from stackit.sfs.models.create_share_response import CreateShareResponse +from stackit.sfs.models.get_resource_pool_response import GetResourcePoolResponse +from stackit.sfs.models.get_resource_pool_snapshot_response import ( + GetResourcePoolSnapshotResponse, +) +from stackit.sfs.models.get_share_export_policy_response import ( + GetShareExportPolicyResponse, +) +from stackit.sfs.models.get_share_response import GetShareResponse +from stackit.sfs.models.list_performance_classes_response import ( + ListPerformanceClassesResponse, +) +from stackit.sfs.models.list_resource_pool_snapshots_response import ( + ListResourcePoolSnapshotsResponse, +) +from stackit.sfs.models.list_resource_pools_response import ListResourcePoolsResponse +from stackit.sfs.models.list_share_export_policies_response import ( + ListShareExportPoliciesResponse, +) +from stackit.sfs.models.list_shares_response import ListSharesResponse +from stackit.sfs.models.list_snapshot_schedules_response import ( + ListSnapshotSchedulesResponse, +) +from stackit.sfs.models.update_resource_pool_payload import UpdateResourcePoolPayload +from stackit.sfs.models.update_resource_pool_response import UpdateResourcePoolResponse +from stackit.sfs.models.update_share_export_policy_payload import ( + UpdateShareExportPolicyPayload, +) +from stackit.sfs.models.update_share_export_policy_response import ( + UpdateShareExportPolicyResponse, +) +from stackit.sfs.models.update_share_payload import UpdateSharePayload +from stackit.sfs.models.update_share_response import UpdateShareResponse +from stackit.sfs.rest import RESTResponseType + + +class DefaultApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, configuration: Configuration = None) -> None: + if configuration is None: + configuration = Configuration() + self.configuration = configuration + self.api_client = ApiClient(self.configuration) + + @validate_call + def create_resource_pool( + self, + project_id: StrictStr, + region: StrictStr, + create_resource_pool_payload: CreateResourcePoolPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CreateResourcePoolResponse: + """Create Resource Pool + + Create a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param create_resource_pool_payload: (required) + :type create_resource_pool_payload: CreateResourcePoolPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_resource_pool_serialize( + project_id=project_id, + region=region, + create_resource_pool_payload=create_resource_pool_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def create_resource_pool_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + create_resource_pool_payload: CreateResourcePoolPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CreateResourcePoolResponse]: + """Create Resource Pool + + Create a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param create_resource_pool_payload: (required) + :type create_resource_pool_payload: CreateResourcePoolPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_resource_pool_serialize( + project_id=project_id, + region=region, + create_resource_pool_payload=create_resource_pool_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def create_resource_pool_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + create_resource_pool_payload: CreateResourcePoolPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create Resource Pool + + Create a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param create_resource_pool_payload: (required) + :type create_resource_pool_payload: CreateResourcePoolPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_resource_pool_serialize( + project_id=project_id, + region=region, + create_resource_pool_payload=create_resource_pool_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _create_resource_pool_serialize( + self, + project_id, + region, + create_resource_pool_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if create_resource_pool_payload is not None: + _body_params = create_resource_pool_payload + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type(["application/json"]) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="POST", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def create_resource_pool_snapshot( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + create_resource_pool_snapshot_payload: CreateResourcePoolSnapshotPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CreateResourcePoolSnapshotResponse: + """Create Resource Pool Snapshot + + Creates a Snapshot of a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param create_resource_pool_snapshot_payload: (required) + :type create_resource_pool_snapshot_payload: CreateResourcePoolSnapshotPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + create_resource_pool_snapshot_payload=create_resource_pool_snapshot_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateResourcePoolSnapshotResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def create_resource_pool_snapshot_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + create_resource_pool_snapshot_payload: CreateResourcePoolSnapshotPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CreateResourcePoolSnapshotResponse]: + """Create Resource Pool Snapshot + + Creates a Snapshot of a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param create_resource_pool_snapshot_payload: (required) + :type create_resource_pool_snapshot_payload: CreateResourcePoolSnapshotPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + create_resource_pool_snapshot_payload=create_resource_pool_snapshot_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateResourcePoolSnapshotResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def create_resource_pool_snapshot_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + create_resource_pool_snapshot_payload: CreateResourcePoolSnapshotPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create Resource Pool Snapshot + + Creates a Snapshot of a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param create_resource_pool_snapshot_payload: (required) + :type create_resource_pool_snapshot_payload: CreateResourcePoolSnapshotPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + create_resource_pool_snapshot_payload=create_resource_pool_snapshot_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateResourcePoolSnapshotResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _create_resource_pool_snapshot_serialize( + self, + project_id, + region, + resource_pool_id, + create_resource_pool_snapshot_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if create_resource_pool_snapshot_payload is not None: + _body_params = create_resource_pool_snapshot_payload + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type(["application/json"]) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="POST", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/snapshots", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def create_share( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + create_share_payload: CreateSharePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CreateShareResponse: + """Create Share + + Creates a Share in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param create_share_payload: (required) + :type create_share_payload: CreateSharePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + create_share_payload=create_share_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def create_share_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + create_share_payload: CreateSharePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CreateShareResponse]: + """Create Share + + Creates a Share in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param create_share_payload: (required) + :type create_share_payload: CreateSharePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + create_share_payload=create_share_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def create_share_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + create_share_payload: CreateSharePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create Share + + Creates a Share in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param create_share_payload: (required) + :type create_share_payload: CreateSharePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + create_share_payload=create_share_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _create_share_serialize( + self, + project_id, + region, + resource_pool_id, + create_share_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if create_share_payload is not None: + _body_params = create_share_payload + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type(["application/json"]) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="POST", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/shares", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def create_share_export_policy( + self, + project_id: StrictStr, + region: StrictStr, + create_share_export_policy_payload: CreateShareExportPolicyPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CreateShareExportPolicyResponse: + """Create Share Export Policy + + Creates a ShareExportPolicy in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param create_share_export_policy_payload: (required) + :type create_share_export_policy_payload: CreateShareExportPolicyPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_export_policy_serialize( + project_id=project_id, + region=region, + create_share_export_policy_payload=create_share_export_policy_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def create_share_export_policy_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + create_share_export_policy_payload: CreateShareExportPolicyPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CreateShareExportPolicyResponse]: + """Create Share Export Policy + + Creates a ShareExportPolicy in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param create_share_export_policy_payload: (required) + :type create_share_export_policy_payload: CreateShareExportPolicyPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_export_policy_serialize( + project_id=project_id, + region=region, + create_share_export_policy_payload=create_share_export_policy_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def create_share_export_policy_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + create_share_export_policy_payload: CreateShareExportPolicyPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create Share Export Policy + + Creates a ShareExportPolicy in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param create_share_export_policy_payload: (required) + :type create_share_export_policy_payload: CreateShareExportPolicyPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_export_policy_serialize( + project_id=project_id, + region=region, + create_share_export_policy_payload=create_share_export_policy_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "CreateShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _create_share_export_policy_serialize( + self, + project_id, + region, + create_share_export_policy_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if create_share_export_policy_payload is not None: + _body_params = create_share_export_policy_payload + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type(["application/json"]) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="POST", + resource_path="/v1beta/projects/{projectId}/regions/{region}/shareExportPolicies", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def delete_resource_pool( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete Resource Pool + + Deletes a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def delete_resource_pool_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """Delete Resource Pool + + Deletes a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def delete_resource_pool_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete Resource Pool + + Deletes a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _delete_resource_pool_serialize( + self, + project_id, + region, + resource_pool_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="DELETE", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def delete_resource_pool_snapshot( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + snapshot_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete Resource Pool Snapshot + + Deletes a Snapshot from a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param snapshot_name: (required) + :type snapshot_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + snapshot_name=snapshot_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def delete_resource_pool_snapshot_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + snapshot_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """Delete Resource Pool Snapshot + + Deletes a Snapshot from a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param snapshot_name: (required) + :type snapshot_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + snapshot_name=snapshot_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def delete_resource_pool_snapshot_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + snapshot_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete Resource Pool Snapshot + + Deletes a Snapshot from a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param snapshot_name: (required) + :type snapshot_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + snapshot_name=snapshot_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _delete_resource_pool_snapshot_serialize( + self, + project_id, + region, + resource_pool_id, + snapshot_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + if snapshot_name is not None: + _path_params["snapshotName"] = snapshot_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="DELETE", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/snapshots/{snapshotName}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def delete_share( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete Share + + Deletes a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def delete_share_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """Delete Share + + Deletes a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def delete_share_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete Share + + Deletes a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _delete_share_serialize( + self, + project_id, + region, + resource_pool_id, + share_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + if share_id is not None: + _path_params["shareId"] = share_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="DELETE", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/shares/{shareId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def delete_share_export_policy( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Delete Share Export Policy + + Deletes a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def delete_share_export_policy_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """Delete Share Export Policy + + Deletes a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def delete_share_export_policy_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete Share Export Policy + + Deletes a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "object", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _delete_share_export_policy_serialize( + self, + project_id, + region, + policy_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if policy_id is not None: + _path_params["policyId"] = policy_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="DELETE", + resource_path="/v1beta/projects/{projectId}/regions/{region}/shareExportPolicies/{policyId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def get_resource_pool( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetResourcePoolResponse: + """Get Resource Pool + + Retrieves a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def get_resource_pool_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetResourcePoolResponse]: + """Get Resource Pool + + Retrieves a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def get_resource_pool_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Resource Pool + + Retrieves a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _get_resource_pool_serialize( + self, + project_id, + region, + resource_pool_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def get_resource_pool_snapshot( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + snapshot_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetResourcePoolSnapshotResponse: + """Get Resource Pool Snapshot + + Retrieves a Snapshot from a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param snapshot_name: (required) + :type snapshot_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + snapshot_name=snapshot_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetResourcePoolSnapshotResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def get_resource_pool_snapshot_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + snapshot_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetResourcePoolSnapshotResponse]: + """Get Resource Pool Snapshot + + Retrieves a Snapshot from a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param snapshot_name: (required) + :type snapshot_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + snapshot_name=snapshot_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetResourcePoolSnapshotResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def get_resource_pool_snapshot_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + snapshot_name: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Resource Pool Snapshot + + Retrieves a Snapshot from a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param snapshot_name: (required) + :type snapshot_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_resource_pool_snapshot_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + snapshot_name=snapshot_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetResourcePoolSnapshotResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _get_resource_pool_snapshot_serialize( + self, + project_id, + region, + resource_pool_id, + snapshot_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + if snapshot_name is not None: + _path_params["snapshotName"] = snapshot_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/snapshots/{snapshotName}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def get_share( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetShareResponse: + """Get Share + + Retrieves a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def get_share_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetShareResponse]: + """Get Share + + Retrieves a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def get_share_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Share + + Retrieves a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _get_share_serialize( + self, + project_id, + region, + resource_pool_id, + share_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + if share_id is not None: + _path_params["shareId"] = share_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/shares/{shareId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def get_share_export_policy( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetShareExportPolicyResponse: + """Get Share Export Policy + + Retrieves a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def get_share_export_policy_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetShareExportPolicyResponse]: + """Get Share Export Policy + + Retrieves a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def get_share_export_policy_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Share Export Policy + + Retrieves a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "GetShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _get_share_export_policy_serialize( + self, + project_id, + region, + policy_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if policy_id is not None: + _path_params["policyId"] = policy_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/regions/{region}/shareExportPolicies/{policyId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def list_performance_classes( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ListPerformanceClassesResponse: + """List Resource Performance Classes + + Lists all performances classes available + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_performance_classes_serialize( + _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListPerformanceClassesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def list_performance_classes_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ListPerformanceClassesResponse]: + """List Resource Performance Classes + + Lists all performances classes available + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_performance_classes_serialize( + _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListPerformanceClassesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def list_performance_classes_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Resource Performance Classes + + Lists all performances classes available + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_performance_classes_serialize( + _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListPerformanceClassesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _list_performance_classes_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/resourcePoolPerformanceClasses", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def list_resource_pool_snapshots( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ListResourcePoolSnapshotsResponse: + """List Resource Pool Snapshots + + Lists the Snapshots of a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_resource_pool_snapshots_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListResourcePoolSnapshotsResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def list_resource_pool_snapshots_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ListResourcePoolSnapshotsResponse]: + """List Resource Pool Snapshots + + Lists the Snapshots of a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_resource_pool_snapshots_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListResourcePoolSnapshotsResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def list_resource_pool_snapshots_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Resource Pool Snapshots + + Lists the Snapshots of a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_resource_pool_snapshots_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListResourcePoolSnapshotsResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _list_resource_pool_snapshots_serialize( + self, + project_id, + region, + resource_pool_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/snapshots", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def list_resource_pools( + self, + project_id: StrictStr, + region: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ListResourcePoolsResponse: + """List Resource Pools + + Lists Resource Pools in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_resource_pools_serialize( + project_id=project_id, + region=region, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListResourcePoolsResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def list_resource_pools_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ListResourcePoolsResponse]: + """List Resource Pools + + Lists Resource Pools in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_resource_pools_serialize( + project_id=project_id, + region=region, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListResourcePoolsResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def list_resource_pools_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Resource Pools + + Lists Resource Pools in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_resource_pools_serialize( + project_id=project_id, + region=region, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListResourcePoolsResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _list_resource_pools_serialize( + self, + project_id, + region, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def list_share_export_policies( + self, + project_id: StrictStr, + region: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ListShareExportPoliciesResponse: + """List Share Export Policies + + Lists ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_share_export_policies_serialize( + project_id=project_id, + region=region, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListShareExportPoliciesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def list_share_export_policies_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ListShareExportPoliciesResponse]: + """List Share Export Policies + + Lists ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_share_export_policies_serialize( + project_id=project_id, + region=region, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListShareExportPoliciesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def list_share_export_policies_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Share Export Policies + + Lists ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_share_export_policies_serialize( + project_id=project_id, + region=region, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListShareExportPoliciesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _list_share_export_policies_serialize( + self, + project_id, + region, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/regions/{region}/shareExportPolicies", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def list_shares( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ListSharesResponse: + """List Shares + + Lists Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_shares_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListSharesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def list_shares_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ListSharesResponse]: + """List Shares + + Lists Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_shares_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListSharesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def list_shares_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Shares + + Lists Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_shares_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListSharesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _list_shares_serialize( + self, + project_id, + region, + resource_pool_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/shares", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def list_snapshot_schedules( + self, + project_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ListSnapshotSchedulesResponse: + """List Snapshot Schedules + + Lists snapshot schedules + + :param project_id: (required) + :type project_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_snapshot_schedules_serialize( + project_id=project_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListSnapshotSchedulesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def list_snapshot_schedules_with_http_info( + self, + project_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ListSnapshotSchedulesResponse]: + """List Snapshot Schedules + + Lists snapshot schedules + + :param project_id: (required) + :type project_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_snapshot_schedules_serialize( + project_id=project_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListSnapshotSchedulesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def list_snapshot_schedules_without_preload_content( + self, + project_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Snapshot Schedules + + Lists snapshot schedules + + :param project_id: (required) + :type project_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_snapshot_schedules_serialize( + project_id=project_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "ListSnapshotSchedulesResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _list_snapshot_schedules_serialize( + self, + project_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="GET", + resource_path="/v1beta/projects/{projectId}/snapshotSchedules", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def update_resource_pool( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + update_resource_pool_payload: UpdateResourcePoolPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UpdateResourcePoolResponse: + """Update Resource Pool + + Updates a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param update_resource_pool_payload: (required) + :type update_resource_pool_payload: UpdateResourcePoolPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + update_resource_pool_payload=update_resource_pool_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def update_resource_pool_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + update_resource_pool_payload: UpdateResourcePoolPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UpdateResourcePoolResponse]: + """Update Resource Pool + + Updates a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param update_resource_pool_payload: (required) + :type update_resource_pool_payload: UpdateResourcePoolPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + update_resource_pool_payload=update_resource_pool_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def update_resource_pool_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + update_resource_pool_payload: UpdateResourcePoolPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update Resource Pool + + Updates a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param update_resource_pool_payload: (required) + :type update_resource_pool_payload: UpdateResourcePoolPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_resource_pool_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + update_resource_pool_payload=update_resource_pool_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateResourcePoolResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _update_resource_pool_serialize( + self, + project_id, + region, + resource_pool_id, + update_resource_pool_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if update_resource_pool_payload is not None: + _body_params = update_resource_pool_payload + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type(["application/json"]) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="PATCH", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def update_share( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + update_share_payload: UpdateSharePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UpdateShareResponse: + """Update Share + + Updates a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param update_share_payload: (required) + :type update_share_payload: UpdateSharePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + update_share_payload=update_share_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def update_share_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + update_share_payload: UpdateSharePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UpdateShareResponse]: + """Update Share + + Updates a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param update_share_payload: (required) + :type update_share_payload: UpdateSharePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + update_share_payload=update_share_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def update_share_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + resource_pool_id: StrictStr, + share_id: StrictStr, + update_share_payload: UpdateSharePayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update Share + + Updates a Shares in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param resource_pool_id: (required) + :type resource_pool_id: str + :param share_id: (required) + :type share_id: str + :param update_share_payload: (required) + :type update_share_payload: UpdateSharePayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_serialize( + project_id=project_id, + region=region, + resource_pool_id=resource_pool_id, + share_id=share_id, + update_share_payload=update_share_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateShareResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _update_share_serialize( + self, + project_id, + region, + resource_pool_id, + share_id, + update_share_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if resource_pool_id is not None: + _path_params["resourcePoolId"] = resource_pool_id + if share_id is not None: + _path_params["shareId"] = share_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if update_share_payload is not None: + _body_params = update_share_payload + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type(["application/json"]) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="PATCH", + resource_path="/v1beta/projects/{projectId}/regions/{region}/resourcePools/{resourcePoolId}/shares/{shareId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def update_share_export_policy( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + update_share_export_policy_payload: UpdateShareExportPolicyPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UpdateShareExportPolicyResponse: + """Update Share Export Policy + + Updates a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param update_share_export_policy_payload: (required) + :type update_share_export_policy_payload: UpdateShareExportPolicyPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + update_share_export_policy_payload=update_share_export_policy_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def update_share_export_policy_with_http_info( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + update_share_export_policy_payload: UpdateShareExportPolicyPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UpdateShareExportPolicyResponse]: + """Update Share Export Policy + + Updates a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param update_share_export_policy_payload: (required) + :type update_share_export_policy_payload: UpdateShareExportPolicyPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + update_share_export_policy_payload=update_share_export_policy_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def update_share_export_policy_without_preload_content( + self, + project_id: StrictStr, + region: StrictStr, + policy_id: StrictStr, + update_share_export_policy_payload: UpdateShareExportPolicyPayload, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update Share Export Policy + + Updates a ShareExportPolicies in a Resource Pool in a project. + + :param project_id: (required) + :type project_id: str + :param region: (required) + :type region: str + :param policy_id: (required) + :type policy_id: str + :param update_share_export_policy_payload: (required) + :type update_share_export_policy_payload: UpdateShareExportPolicyPayload + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_export_policy_serialize( + project_id=project_id, + region=region, + policy_id=policy_id, + update_share_export_policy_payload=update_share_export_policy_payload, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "UpdateShareExportPolicyResponse", + "400": "ValidationError", + "401": "str", + "500": "Error", + } + response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) + return response_data.response + + def _update_share_export_policy_serialize( + self, + project_id, + region, + policy_id, + update_share_export_policy_payload, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if project_id is not None: + _path_params["projectId"] = project_id + if region is not None: + _path_params["region"] = region + if policy_id is not None: + _path_params["policyId"] = policy_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if update_share_export_policy_payload is not None: + _body_params = update_share_export_policy_payload + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json", "application/problem+json", "text/plain"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type(["application/json"]) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [] + + return self.api_client.param_serialize( + method="PATCH", + resource_path="/v1beta/projects/{projectId}/regions/{region}/shareExportPolicies/{policyId}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) diff --git a/services/sfs/src/stackit/sfs/api_client.py b/services/sfs/src/stackit/sfs/api_client.py new file mode 100644 index 000000000..a136cb798 --- /dev/null +++ b/services/sfs/src/stackit/sfs/api_client.py @@ -0,0 +1,639 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import datetime +import json +import mimetypes +import os +import re +import tempfile +from enum import Enum +from typing import Dict, List, Optional, Tuple, Union +from urllib.parse import quote + +from dateutil.parser import parse +from pydantic import SecretStr +from stackit.core.configuration import Configuration + +import stackit.sfs.models +from stackit.sfs import rest +from stackit.sfs.api_response import ApiResponse +from stackit.sfs.api_response import T as ApiResponseT +from stackit.sfs.configuration import HostConfiguration +from stackit.sfs.exceptions import ( + ApiException, +) + + +RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]] + + +class ApiClient: + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + """ + + PRIMITIVE_TYPES = (float, bool, bytes, str, int) + NATIVE_TYPES_MAPPING = { + "int": int, + "long": int, # TODO remove as only py3 is supported? + "float": float, + "str": str, + "bool": bool, + "date": datetime.date, + "datetime": datetime.datetime, + "object": object, + } + + def __init__(self, configuration, header_name=None, header_value=None, cookie=None) -> None: + self.config: Configuration = configuration + + if self.config.custom_endpoint is None: + host_config = HostConfiguration(region=self.config.region, server_index=self.config.server_index) + self.host = host_config.host + else: + self.host = self.config.custom_endpoint + + self.rest_client = rest.RESTClientObject(self.config) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = "stackit-sdk-python/sfs" + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + pass + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers["User-Agent"] + + @user_agent.setter + def user_agent(self, value): + self.default_headers["User-Agent"] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + _default = None + + @classmethod + def get_default(cls): + """Return new instance of ApiClient. + + This method returns newly created, based on default constructor, + object of ApiClient class or returns a copy of default + ApiClient. + + :return: The ApiClient object. + """ + if cls._default is None: + cls._default = ApiClient() + return cls._default + + @classmethod + def set_default(cls, default): + """Set default instance of ApiClient. + + It stores default ApiClient. + + :param default: object of ApiClient. + """ + cls._default = default + + def param_serialize( + self, + method, + resource_path, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, + auth_settings=None, + collection_formats=None, + _host=None, + _request_auth=None, + ) -> RequestSerialized: + """Builds the HTTP request params needed by the request. + :param method: Method to call. + :param resource_path: Path to method endpoint. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :return: tuple of form (path, http_method, query_params, header_params, + body, post_params, files) + """ + + # header parameters + header_params = header_params or {} + header_params.update(self.default_headers) + if self.cookie: + header_params["Cookie"] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict(self.parameters_to_tuples(header_params, collection_formats)) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples(path_params, collection_formats) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace("{%s}" % k, quote(str(v))) + + # post parameters + if post_params or files: + post_params = post_params if post_params else [] + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples(post_params, collection_formats) + if files: + post_params.extend(self.files_parameters(files)) + + # body + if body: + body = self.sanitize_for_serialization(body) + + # request url + if _host is None: + url = self.host + resource_path + else: + # use server/host defined in path or operation instead + url = _host + resource_path + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + url_query = self.parameters_to_url_query(query_params, collection_formats) + url += "?" + url_query + + return method, url, header_params, body, post_params + + def call_api( + self, method, url, header_params=None, body=None, post_params=None, _request_timeout=None + ) -> rest.RESTResponse: + """Makes the HTTP request (synchronous) + :param method: Method to call. + :param url: Path to method endpoint. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param _request_timeout: timeout setting for this request. + :return: RESTResponse + """ + + try: + # perform request and return response + response_data = self.rest_client.request( + method, + url, + headers=header_params, + body=body, + post_params=post_params, + _request_timeout=_request_timeout, + ) + + except ApiException as e: + raise e + + return response_data + + def response_deserialize( + self, response_data: rest.RESTResponse, response_types_map: Optional[Dict[str, ApiResponseT]] = None + ) -> ApiResponse[ApiResponseT]: + """Deserializes response into an object. + :param response_data: RESTResponse object to be deserialized. + :param response_types_map: dict of response types. + :return: ApiResponse + """ + + msg = "RESTResponse.read() must be called before passing it to response_deserialize()" + if response_data.data is None: + raise ValueError(msg) + + response_type = response_types_map.get(str(response_data.status), None) + if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: + # if not found, look for '1XX', '2XX', etc. + response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) + + # deserialize response data + response_text = None + return_data = None + try: + if response_type == "bytearray": + return_data = response_data.data + elif response_type == "file": + return_data = self.__deserialize_file(response_data) + elif response_type is not None: + match = None + content_type = response_data.getheader("content-type") + if content_type is not None: + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) + encoding = match.group(1) if match else "utf-8" + response_text = response_data.data.decode(encoding) + return_data = self.deserialize(response_text, response_type, content_type) + finally: + if not 200 <= response_data.status <= 299: + raise ApiException.from_response( + http_resp=response_data, + body=response_text, + data=return_data, + ) + + return ApiResponse( + status_code=response_data.status, + data=return_data, + headers=response_data.getheaders(), + raw_data=response_data.data, + ) + + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. + + If obj is None, return None. + If obj is SecretStr, return obj.get_secret_value() + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, Enum): + return obj.value + elif isinstance(obj, SecretStr): + return obj.get_secret_value() + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [self.sanitize_for_serialization(sub_obj) for sub_obj in obj] + elif isinstance(obj, tuple): + return tuple(self.sanitize_for_serialization(sub_obj) for sub_obj in obj) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + + elif isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + if hasattr(obj, "to_dict") and callable(obj.to_dict): + obj_dict = obj.to_dict() + else: + obj_dict = obj.__dict__ + + if isinstance(obj_dict, list): + # here we handle instances that can either be a list or something else, and only became a real list by calling to_dict() # noqa: E501 + return self.sanitize_for_serialization(obj_dict) + + return {key: self.sanitize_for_serialization(val) for key, val in obj_dict.items()} + + def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + :param content_type: content type of response. + + :return: deserialized object. + """ + + # fetch data from response object + if content_type is None: + try: + data = json.loads(response_text) + except ValueError: + data = response_text + elif re.match(r"^application/(json|[\w!#$&.+-^_]+\+json)\s*(;|$)", content_type, re.IGNORECASE): + if response_text == "": + data = "" + else: + data = json.loads(response_text) + elif re.match(r"^text\/[a-z.+-]+\s*(;|$)", content_type, re.IGNORECASE): + data = response_text + else: + raise ApiException(status=0, reason="Unsupported content type: {0}".format(content_type)) + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if isinstance(klass, str): + if klass.startswith("List["): + m = re.match(r"List\[(.*)]", klass) + if m is None: + raise ValueError("Malformed List type definition") + sub_kls = m.group(1) + return [self.__deserialize(sub_data, sub_kls) for sub_data in data] + + if klass.startswith("Dict["): + m = re.match(r"Dict\[([^,]*), (.*)]", klass) + if m is None: + raise ValueError("Malformed Dict type definition") + sub_kls = m.group(2) + return {k: self.__deserialize(v, sub_kls) for k, v in data.items()} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(stackit.sfs.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + elif issubclass(klass, Enum): + return self.__deserialize_enum(data, klass) + else: + return self.__deserialize_model(data, klass) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == "multi": + new_params.extend((k, value) for value in v) + else: + if collection_format == "ssv": + delimiter = " " + elif collection_format == "tsv": + delimiter = "\t" + elif collection_format == "pipes": + delimiter = "|" + else: # csv is the default + delimiter = "," + new_params.append((k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def parameters_to_url_query(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: URL query string (e.g. a=Hello%20World&b=123) + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if isinstance(v, bool): + v = str(v).lower() + if isinstance(v, (int, float)): + v = str(v) + if isinstance(v, dict): + v = json.dumps(v) + + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == "multi": + new_params.extend((k, quote(str(value))) for value in v) + else: + if collection_format == "ssv": + delimiter = " " + elif collection_format == "tsv": + delimiter = "\t" + elif collection_format == "pipes": + delimiter = "|" + else: # csv is the default + delimiter = "," + new_params.append((k, delimiter.join(quote(str(value)) for value in v))) + else: + new_params.append((k, quote(str(v)))) + + return "&".join(["=".join(map(str, item)) for item in new_params]) + + def files_parameters( + self, + files: Dict[str, Union[str, bytes, List[str], List[bytes], Tuple[str, bytes]]], + ): + """Builds form parameters. + + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + for k, v in files.items(): + if isinstance(v, str): + with open(v, "rb") as f: + filename = os.path.basename(f.name) + filedata = f.read() + elif isinstance(v, bytes): + filename = k + filedata = v + elif isinstance(v, tuple): + filename, filedata = v + elif isinstance(v, list): + for file_param in v: + params.extend(self.files_parameters({k: file_param})) + continue + else: + raise ValueError("Unsupported file value") + mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream" + params.append(tuple([k, tuple([filename, filedata, mimetype])])) + return params + + def select_header_accept(self, accepts: List[str]) -> Optional[str]: + """Returns `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return None + + for accept in accepts: + if re.search("json", accept, re.IGNORECASE): + return accept + + return accepts[0] + + def select_header_content_type(self, content_types): + """Returns `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return None + + for content_type in content_types: + if re.search("json", content_type, re.IGNORECASE): + return content_type + + return content_types[0] + + def __deserialize_file(self, response): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + handle file downloading + save response body into a tmp file and return the instance + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + m = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition) + if m is None: + raise ValueError("Unexpected 'content-disposition' header value") + filename = m.group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return str(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException(status=0, reason="Failed to parse `{0}` as date object".format(string)) + + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException(status=0, reason=("Failed to parse `{0}` as datetime object".format(string))) + + def __deserialize_enum(self, data, klass): + """Deserializes primitive type to enum. + + :param data: primitive type. + :param klass: class literal. + :return: enum value. + """ + try: + return klass(data) + except ValueError: + raise rest.ApiException(status=0, reason=("Failed to parse `{0}` as `{1}`".format(data, klass))) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + + return klass.from_dict(data) diff --git a/services/sfs/src/stackit/sfs/api_response.py b/services/sfs/src/stackit/sfs/api_response.py new file mode 100644 index 000000000..b3ba14a1d --- /dev/null +++ b/services/sfs/src/stackit/sfs/api_response.py @@ -0,0 +1,23 @@ +"""API response object.""" + +from __future__ import annotations + +from typing import Generic, Mapping, Optional, TypeVar + +from pydantic import BaseModel, Field, StrictBytes, StrictInt + + +T = TypeVar("T") + + +class ApiResponse(BaseModel, Generic[T]): + """ + API response object + """ + + status_code: StrictInt = Field(description="HTTP status code") + headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers") + data: T = Field(description="Deserialized data given the data type") + raw_data: StrictBytes = Field(description="Raw data (HTTP response body)") + + model_config = {"arbitrary_types_allowed": True} diff --git a/services/sfs/src/stackit/sfs/configuration.py b/services/sfs/src/stackit/sfs/configuration.py new file mode 100644 index 000000000..68a363dd8 --- /dev/null +++ b/services/sfs/src/stackit/sfs/configuration.py @@ -0,0 +1,164 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import sys +from typing import Dict, List, Optional, TypedDict + +from typing_extensions import NotRequired + +import os + + +ServerVariablesT = Dict[str, str] + + +class HostSettingVariable(TypedDict): + description: str + default_value: str + enum_values: List[str] + + +class HostSetting(TypedDict): + url: str + description: str + variables: NotRequired[Dict[str, HostSettingVariable]] + + +class HostConfiguration: + def __init__( + self, + region=None, + server_index=None, + server_variables=None, + server_operation_index=None, + server_operation_variables=None, + ignore_operation_servers=False, + ) -> None: + print( + "WARNING: STACKIT will move to a new way of specifying regions, where the region is provided\n", + "as a function argument instead of being set in the client configuration.\n" + "Once all services have migrated, the methods to specify the region in the client configuration " + "will be removed.", + file=sys.stderr, + ) + """Constructor + """ + self._base_path = "https://sfs.api.eu01.stackit.cloud" + """Default Base url + """ + self.server_index = 0 if server_index is None else server_index + self.server_operation_index = server_operation_index or {} + """Default server index + """ + self.server_variables = server_variables or {} + if region: + self.server_variables["region"] = "{}.".format(region) + self.server_operation_variables = server_operation_variables or {} + """Default server variables + """ + self.ignore_operation_servers = ignore_operation_servers + """Ignore operation servers + """ + + def get_host_settings(self) -> List[HostSetting]: + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + { + "url": "https://sfs.api.{region}stackit.cloud", + "description": "No description provided", + "variables": { + "region": { + "description": "No description provided", + "default_value": "eu01.", + "enum_values": ["eu01."], + } + }, + } + ] + + def get_host_from_settings( + self, + index: Optional[int], + variables: Optional[ServerVariablesT] = None, + servers: Optional[List[HostSetting]] = None, + ) -> str: + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :param servers: an array of host settings or None + :error: if a region is given for a global url + :return: URL based on host settings + """ + if index is None: + return self._base_path + + variables = {} if variables is None else variables + servers = self.get_host_settings() if servers is None else servers + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers)) + ) + + url = server["url"] + + # check if environment variable was provided for region + # if nothing was set this is None + region_env = os.environ.get("STACKIT_REGION") + + # go through variables and replace placeholders + for variable_name, variable in server.get("variables", {}).items(): + # If a region is provided by the user for a global url + # return an error (except for providing via environment variable). + # The region is provided as a function argument instead of being set in the client configuration. + if ( + variable_name == "region" + and (variable["default_value"] == "global" or variable["default_value"] == "") + and region_env is None + and variables.get(variable_name) is not None + ): + raise ValueError( + "this API does not support setting a region in the client configuration, " + "please check if the region can be specified as a function parameter" + ) + used_value = variables.get(variable_name, variable["default_value"]) + + if "enum_values" in variable and used_value not in variable["enum_values"]: + given_value = variables[variable_name].replace(".", "") + valid_values = [v.replace(".", "") for v in variable["enum_values"]] + raise ValueError( + "The variable `{0}` in the host URL has invalid value '{1}'. Must be '{2}'.".format( + variable_name, given_value, valid_values + ) + ) + + url = url.replace("{" + variable_name + "}", used_value) + + return url + + @property + def host(self) -> str: + """Return generated host.""" + return self.get_host_from_settings(self.server_index, variables=self.server_variables) + + @host.setter + def host(self, value: str) -> None: + """Fix base path.""" + self._base_path = value + self.server_index = None diff --git a/services/sfs/src/stackit/sfs/exceptions.py b/services/sfs/src/stackit/sfs/exceptions.py new file mode 100644 index 000000000..e48ff1519 --- /dev/null +++ b/services/sfs/src/stackit/sfs/exceptions.py @@ -0,0 +1,217 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from typing import Any, Optional + +from typing_extensions import Self + + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None) -> None: + """Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiAttributeError(OpenApiException, AttributeError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Raised when an attribute reference or assignment fails. + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiAttributeError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__( + self, + status=None, + reason=None, + http_resp=None, + *, + body: Optional[str] = None, + data: Optional[Any] = None, + ) -> None: + self.status = status + self.reason = reason + self.body = body + self.data = data + self.headers = None + + if http_resp: + if self.status is None: + self.status = http_resp.status + if self.reason is None: + self.reason = http_resp.reason + if self.body is None: + try: + self.body = http_resp.data.decode("utf-8") + except Exception: # noqa: S110 + pass + self.headers = http_resp.getheaders() + + @classmethod + def from_response( + cls, + *, + http_resp, + body: Optional[str], + data: Optional[Any], + ) -> Self: + if http_resp.status == 400: + raise BadRequestException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 401: + raise UnauthorizedException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 403: + raise ForbiddenException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 404: + raise NotFoundException(http_resp=http_resp, body=body, data=data) + + # Added new conditions for 409 and 422 + if http_resp.status == 409: + raise ConflictException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 422: + raise UnprocessableEntityException(http_resp=http_resp, body=body, data=data) + + if 500 <= http_resp.status <= 599: + raise ServiceException(http_resp=http_resp, body=body, data=data) + raise ApiException(http_resp=http_resp, body=body, data=data) + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n" "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format(self.headers) + + if self.data or self.body: + error_message += "HTTP response body: {0}\n".format(self.data or self.body) + + return error_message + + +class BadRequestException(ApiException): + pass + + +class NotFoundException(ApiException): + pass + + +class UnauthorizedException(ApiException): + pass + + +class ForbiddenException(ApiException): + pass + + +class ServiceException(ApiException): + pass + + +class ConflictException(ApiException): + """Exception for HTTP 409 Conflict.""" + + pass + + +class UnprocessableEntityException(ApiException): + """Exception for HTTP 422 Unprocessable Entity.""" + + pass + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, int): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/services/sfs/src/stackit/sfs/models/__init__.py b/services/sfs/src/stackit/sfs/models/__init__.py new file mode 100644 index 000000000..da17ecef0 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/__init__.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +# flake8: noqa +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +# import models into model package +from stackit.sfs.models.create_resource_pool_payload import CreateResourcePoolPayload +from stackit.sfs.models.create_resource_pool_response import CreateResourcePoolResponse +from stackit.sfs.models.create_resource_pool_snapshot_payload import ( + CreateResourcePoolSnapshotPayload, +) +from stackit.sfs.models.create_resource_pool_snapshot_response import ( + CreateResourcePoolSnapshotResponse, +) +from stackit.sfs.models.create_share_export_policy_payload import ( + CreateShareExportPolicyPayload, +) +from stackit.sfs.models.create_share_export_policy_request_rule import ( + CreateShareExportPolicyRequestRule, +) +from stackit.sfs.models.create_share_export_policy_response import ( + CreateShareExportPolicyResponse, +) +from stackit.sfs.models.create_share_payload import CreateSharePayload +from stackit.sfs.models.create_share_response import CreateShareResponse +from stackit.sfs.models.error import Error +from stackit.sfs.models.get_resource_pool_response import GetResourcePoolResponse +from stackit.sfs.models.get_resource_pool_snapshot_response import ( + GetResourcePoolSnapshotResponse, +) +from stackit.sfs.models.get_share_export_policy_response import ( + GetShareExportPolicyResponse, +) +from stackit.sfs.models.get_share_response import GetShareResponse +from stackit.sfs.models.google_protobuf_any import GoogleProtobufAny +from stackit.sfs.models.list_performance_classes_response import ( + ListPerformanceClassesResponse, +) +from stackit.sfs.models.list_resource_pool_snapshots_response import ( + ListResourcePoolSnapshotsResponse, +) +from stackit.sfs.models.list_resource_pools_response import ListResourcePoolsResponse +from stackit.sfs.models.list_share_export_policies_response import ( + ListShareExportPoliciesResponse, +) +from stackit.sfs.models.list_shares_response import ListSharesResponse +from stackit.sfs.models.list_snapshot_schedules_response import ( + ListSnapshotSchedulesResponse, +) +from stackit.sfs.models.performance_class import PerformanceClass +from stackit.sfs.models.resource_pool import ResourcePool +from stackit.sfs.models.resource_pool_performance_class import ( + ResourcePoolPerformanceClass, +) +from stackit.sfs.models.resource_pool_snapshot import ResourcePoolSnapshot +from stackit.sfs.models.resource_pool_space import ResourcePoolSpace +from stackit.sfs.models.share import Share +from stackit.sfs.models.share_export_policy import ShareExportPolicy +from stackit.sfs.models.share_export_policy_rule import ShareExportPolicyRule +from stackit.sfs.models.snapshot_schedule import SnapshotSchedule +from stackit.sfs.models.status import Status +from stackit.sfs.models.update_resource_pool_payload import UpdateResourcePoolPayload +from stackit.sfs.models.update_resource_pool_response import UpdateResourcePoolResponse +from stackit.sfs.models.update_share_export_policy_body_rule import ( + UpdateShareExportPolicyBodyRule, +) +from stackit.sfs.models.update_share_export_policy_payload import ( + UpdateShareExportPolicyPayload, +) +from stackit.sfs.models.update_share_export_policy_response import ( + UpdateShareExportPolicyResponse, +) +from stackit.sfs.models.update_share_payload import UpdateSharePayload +from stackit.sfs.models.update_share_response import UpdateShareResponse +from stackit.sfs.models.validation_error import ValidationError +from stackit.sfs.models.validation_error_field import ValidationErrorField diff --git a/services/sfs/src/stackit/sfs/models/create_resource_pool_payload.py b/services/sfs/src/stackit/sfs/models/create_resource_pool_payload.py new file mode 100644 index 000000000..bbc012ae8 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_resource_pool_payload.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + StrictBool, + StrictInt, + StrictStr, +) +from typing_extensions import Self + + +class CreateResourcePoolPayload(BaseModel): + """ + CreateResourcePoolPayload + """ # noqa: E501 + + availability_zone: Optional[StrictStr] = Field( + default=None, description="Availability zone", alias="availabilityZone" + ) + ip_acl: Optional[List[StrictStr]] = Field( + default=None, + description='List of IPs that can mount the Resource Pool in read-only; IPs must have a subnet mask (e.g. "172.16.0.0/24" for a range of IPs, or "172.16.0.250/32" for a specific IP)', + alias="ipAcl", + ) + labels: Optional[Dict[str, StrictStr]] = Field( + default=None, + description="An optional object that represents the labels associated with the resource pool keys are validated using the following regex '^[\\\\p{Ll}][\\\\p{Ll}\\\\p{N}_-]*$' and cannot be empty values are validated using the following regex '^[\\\\p{Ll}\\\\p{N}_-]*$'", + ) + name: Optional[StrictStr] = Field(default=None, description="Name of the Resource Pool") + performance_class: Optional[StrictStr] = Field( + default=None, description="Name of the performance class", alias="performanceClass" + ) + size_gigabytes: Optional[StrictInt] = Field( + default=None, description="Size of the Resource Pool (unit: gibibytes)", alias="sizeGigabytes" + ) + snapshot_schedule_name: Optional[StrictStr] = Field( + default=None, description="(optional) Name of the Snapshot Schedule to use", alias="snapshotScheduleName" + ) + snapshots_are_visible: Optional[StrictBool] = Field( + default=None, + description="Whether the .snapshot directory is visible when mounting the resource pool. Setting this value to false might prevent you from accessing the snapshots (e.g. for security reasons). Additionally, the access to the snapshots is always controlled by the export policy of the resource pool. That means, if snapshots are visible and the export policy allows for reading the resource pool, then it also allows reading the snapshot of all shares.", + alias="snapshotsAreVisible", + ) + __properties: ClassVar[List[str]] = [ + "availabilityZone", + "ipAcl", + "labels", + "name", + "performanceClass", + "sizeGigabytes", + "snapshotScheduleName", + "snapshotsAreVisible", + ] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateResourcePoolPayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateResourcePoolPayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "availabilityZone": obj.get("availabilityZone"), + "ipAcl": obj.get("ipAcl"), + "labels": obj.get("labels"), + "name": obj.get("name"), + "performanceClass": obj.get("performanceClass"), + "sizeGigabytes": obj.get("sizeGigabytes"), + "snapshotScheduleName": obj.get("snapshotScheduleName"), + "snapshotsAreVisible": obj.get("snapshotsAreVisible"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/create_resource_pool_response.py b/services/sfs/src/stackit/sfs/models/create_resource_pool_response.py new file mode 100644 index 000000000..b6fe72ef6 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_resource_pool_response.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.resource_pool import ResourcePool + + +class CreateResourcePoolResponse(BaseModel): + """ + CreateResourcePoolResponse + """ # noqa: E501 + + resource_pool: Optional[ResourcePool] = Field( + default=None, description="Created Resource Pool", alias="resourcePool" + ) + __properties: ClassVar[List[str]] = ["resourcePool"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateResourcePoolResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of resource_pool + if self.resource_pool: + _dict["resourcePool"] = self.resource_pool.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateResourcePoolResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "resourcePool": ( + ResourcePool.from_dict(obj["resourcePool"]) if obj.get("resourcePool") is not None else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/create_resource_pool_snapshot_payload.py b/services/sfs/src/stackit/sfs/models/create_resource_pool_snapshot_payload.py new file mode 100644 index 000000000..db7f757d4 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_resource_pool_snapshot_payload.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + + +class CreateResourcePoolSnapshotPayload(BaseModel): + """ + CreateResourcePoolSnapshotPayload + """ # noqa: E501 + + comment: Optional[StrictStr] = Field( + default=None, description="(optional) A comment to add more information about a snapshot" + ) + name: Optional[StrictStr] = Field(default=None, description="Name of the Resource Pool Snapshot") + __properties: ClassVar[List[str]] = ["comment", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateResourcePoolSnapshotPayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if comment (nullable) is None + # and model_fields_set contains the field + if self.comment is None and "comment" in self.model_fields_set: + _dict["comment"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateResourcePoolSnapshotPayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"comment": obj.get("comment"), "name": obj.get("name")}) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/create_resource_pool_snapshot_response.py b/services/sfs/src/stackit/sfs/models/create_resource_pool_snapshot_response.py new file mode 100644 index 000000000..e6d0e65b2 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_resource_pool_snapshot_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.resource_pool_snapshot import ResourcePoolSnapshot + + +class CreateResourcePoolSnapshotResponse(BaseModel): + """ + CreateResourcePoolSnapshotResponse + """ # noqa: E501 + + resource_pool_snapshot: Optional[ResourcePoolSnapshot] = Field( + default=None, description="Created Resource Pool Snapshot", alias="resourcePoolSnapshot" + ) + __properties: ClassVar[List[str]] = ["resourcePoolSnapshot"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateResourcePoolSnapshotResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of resource_pool_snapshot + if self.resource_pool_snapshot: + _dict["resourcePoolSnapshot"] = self.resource_pool_snapshot.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateResourcePoolSnapshotResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "resourcePoolSnapshot": ( + ResourcePoolSnapshot.from_dict(obj["resourcePoolSnapshot"]) + if obj.get("resourcePoolSnapshot") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/create_share_export_policy_payload.py b/services/sfs/src/stackit/sfs/models/create_share_export_policy_payload.py new file mode 100644 index 000000000..fd0641e4b --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_share_export_policy_payload.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + +from stackit.sfs.models.create_share_export_policy_request_rule import ( + CreateShareExportPolicyRequestRule, +) + + +class CreateShareExportPolicyPayload(BaseModel): + """ + CreateShareExportPolicyPayload + """ # noqa: E501 + + labels: Optional[Dict[str, StrictStr]] = Field( + default=None, + description="An optional object that represents the labels associated with the share export policy keys are validated using the following regex '^[\\\\p{Ll}][\\\\p{Ll}\\\\p{N}_-]*$' and cannot be empty values are validated using the following regex '^[\\\\p{Ll}\\\\p{N}_-]*$'", + ) + name: Optional[StrictStr] = Field(default=None, description="Name of the Share Export Policy") + rules: Optional[List[CreateShareExportPolicyRequestRule]] = Field( + default=None, + description='List of rules of the Share Export Policy. The order of the rules within the array does not matter - what matters is the field "order" within each rule', + ) + __properties: ClassVar[List[str]] = ["labels", "name", "rules"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateShareExportPolicyPayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in rules (list) + _items = [] + if self.rules: + for _item in self.rules: + if _item: + _items.append(_item.to_dict()) + _dict["rules"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateShareExportPolicyPayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "labels": obj.get("labels"), + "name": obj.get("name"), + "rules": ( + [CreateShareExportPolicyRequestRule.from_dict(_item) for _item in obj["rules"]] + if obj.get("rules") is not None + else None + ), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/create_share_export_policy_request_rule.py b/services/sfs/src/stackit/sfs/models/create_share_export_policy_request_rule.py new file mode 100644 index 000000000..41a10e5e6 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_share_export_policy_request_rule.py @@ -0,0 +1,124 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + StrictBool, + StrictInt, + StrictStr, +) +from typing_extensions import Self + + +class CreateShareExportPolicyRequestRule(BaseModel): + """ + CreateShareExportPolicyRequestRule + """ # noqa: E501 + + description: Optional[StrictStr] = Field(default=None, description="Description of the Rule (optional)") + ip_acl: Optional[List[StrictStr]] = Field( + default=None, + description='IP access control list, where IPs must have a subnet mask (e.g. "172.16.0.0/24" for a range of IPs, or "172.16.0.250/32" for a specific IP) This array cannot be empty', + alias="ipAcl", + ) + order: Optional[StrictInt] = Field( + default=None, + description="Order of the rule within a Share Export Policy. The order is used so that when a client IP matches multiple rules, the first rule is applied This field must be set", + ) + read_only: Optional[StrictBool] = Field( + default=False, + description="Flag to indicate if client IPs matching this rule can only mount the share in read only mode Default: false", + alias="readOnly", + ) + set_uuid: Optional[StrictBool] = Field( + default=False, description="Flag to honor set UUID Default: false", alias="setUuid" + ) + super_user: Optional[StrictBool] = Field( + default=True, + description="Flag to indicate if client IPs matching this rule have root access on the Share Default: true", + alias="superUser", + ) + __properties: ClassVar[List[str]] = ["description", "ipAcl", "order", "readOnly", "setUuid", "superUser"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateShareExportPolicyRequestRule from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if description (nullable) is None + # and model_fields_set contains the field + if self.description is None and "description" in self.model_fields_set: + _dict["description"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateShareExportPolicyRequestRule from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "description": obj.get("description"), + "ipAcl": obj.get("ipAcl"), + "order": obj.get("order"), + "readOnly": obj.get("readOnly") if obj.get("readOnly") is not None else False, + "setUuid": obj.get("setUuid") if obj.get("setUuid") is not None else False, + "superUser": obj.get("superUser") if obj.get("superUser") is not None else True, + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/create_share_export_policy_response.py b/services/sfs/src/stackit/sfs/models/create_share_export_policy_response.py new file mode 100644 index 000000000..edeabd7b5 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_share_export_policy_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.share_export_policy import ShareExportPolicy + + +class CreateShareExportPolicyResponse(BaseModel): + """ + CreateShareExportPolicyResponse + """ # noqa: E501 + + share_export_policy: Optional[ShareExportPolicy] = Field( + default=None, description="Created Share Export Policy", alias="shareExportPolicy" + ) + __properties: ClassVar[List[str]] = ["shareExportPolicy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateShareExportPolicyResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of share_export_policy + if self.share_export_policy: + _dict["shareExportPolicy"] = self.share_export_policy.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateShareExportPolicyResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "shareExportPolicy": ( + ShareExportPolicy.from_dict(obj["shareExportPolicy"]) + if obj.get("shareExportPolicy") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/create_share_payload.py b/services/sfs/src/stackit/sfs/models/create_share_payload.py new file mode 100644 index 000000000..2885c76d1 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_share_payload.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing_extensions import Self + + +class CreateSharePayload(BaseModel): + """ + CreateSharePayload + """ # noqa: E501 + + export_policy_name: Optional[StrictStr] = Field( + default=None, + description="Name of the Share Export Policy to use in the Share. Note that if this is not set, the Share can only be mounted in read only by clients with IPs matching the IP ACL of the Resource Pool hosting this Share. You can also assign a Share Export Policy after creating the Share", + alias="exportPolicyName", + ) + labels: Optional[Dict[str, StrictStr]] = Field( + default=None, + description="An optional object that represents the labels associated with the share keys are validated using the following regex '^[\\\\p{Ll}][\\\\p{Ll}\\\\p{N}_-]*$' and cannot be empty values are validated using the following regex '^[\\\\p{Ll}\\\\p{N}_-]*$'", + ) + name: Optional[StrictStr] = Field(default=None, description="Name of the Share") + space_hard_limit_gigabytes: Optional[StrictInt] = Field( + default=None, + description="Space hard limit for the Share. If zero, the Share will have access to the full space of the Resource Pool it lives in. (unit: gibibytes)", + alias="spaceHardLimitGigabytes", + ) + __properties: ClassVar[List[str]] = ["exportPolicyName", "labels", "name", "spaceHardLimitGigabytes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateSharePayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if export_policy_name (nullable) is None + # and model_fields_set contains the field + if self.export_policy_name is None and "export_policy_name" in self.model_fields_set: + _dict["exportPolicyName"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateSharePayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "exportPolicyName": obj.get("exportPolicyName"), + "labels": obj.get("labels"), + "name": obj.get("name"), + "spaceHardLimitGigabytes": obj.get("spaceHardLimitGigabytes"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/create_share_response.py b/services/sfs/src/stackit/sfs/models/create_share_response.py new file mode 100644 index 000000000..346a789ee --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/create_share_response.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.share import Share + + +class CreateShareResponse(BaseModel): + """ + CreateShareResponse + """ # noqa: E501 + + share: Optional[Share] = Field(default=None, description="Created Share") + __properties: ClassVar[List[str]] = ["share"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateShareResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of share + if self.share: + _dict["share"] = self.share.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateShareResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"share": Share.from_dict(obj["share"]) if obj.get("share") is not None else None}) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/error.py b/services/sfs/src/stackit/sfs/models/error.py new file mode 100644 index 000000000..95ff8091d --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/error.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + + +class Error(BaseModel): + """ + Error + """ # noqa: E501 + + error_description: Optional[Any] = Field( + default=None, description="Human-readable string of the error that occured" + ) + title: StrictStr = Field(description="Human-readable description of the error that occurred.") + type: StrictStr = Field( + description="URI Uniquely identifies the error type. It will be in the format of storage.stackit.cloud/ e.g. storage.stackit.cloud/validation-error" + ) + __properties: ClassVar[List[str]] = ["error_description", "title", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Error from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if error_description (nullable) is None + # and model_fields_set contains the field + if self.error_description is None and "error_description" in self.model_fields_set: + _dict["error_description"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Error from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + {"error_description": obj.get("error_description"), "title": obj.get("title"), "type": obj.get("type")} + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/get_resource_pool_response.py b/services/sfs/src/stackit/sfs/models/get_resource_pool_response.py new file mode 100644 index 000000000..9dbf783c7 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/get_resource_pool_response.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.resource_pool import ResourcePool + + +class GetResourcePoolResponse(BaseModel): + """ + GetResourcePoolResponse + """ # noqa: E501 + + resource_pool: Optional[ResourcePool] = Field(default=None, description="Resource Pool", alias="resourcePool") + __properties: ClassVar[List[str]] = ["resourcePool"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetResourcePoolResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of resource_pool + if self.resource_pool: + _dict["resourcePool"] = self.resource_pool.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetResourcePoolResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "resourcePool": ( + ResourcePool.from_dict(obj["resourcePool"]) if obj.get("resourcePool") is not None else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/get_resource_pool_snapshot_response.py b/services/sfs/src/stackit/sfs/models/get_resource_pool_snapshot_response.py new file mode 100644 index 000000000..be0e76530 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/get_resource_pool_snapshot_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.resource_pool_snapshot import ResourcePoolSnapshot + + +class GetResourcePoolSnapshotResponse(BaseModel): + """ + GetResourcePoolSnapshotResponse + """ # noqa: E501 + + resource_pool_snapshot: Optional[ResourcePoolSnapshot] = Field( + default=None, description="Resource Pool Snapshot", alias="resourcePoolSnapshot" + ) + __properties: ClassVar[List[str]] = ["resourcePoolSnapshot"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetResourcePoolSnapshotResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of resource_pool_snapshot + if self.resource_pool_snapshot: + _dict["resourcePoolSnapshot"] = self.resource_pool_snapshot.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetResourcePoolSnapshotResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "resourcePoolSnapshot": ( + ResourcePoolSnapshot.from_dict(obj["resourcePoolSnapshot"]) + if obj.get("resourcePoolSnapshot") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/get_share_export_policy_response.py b/services/sfs/src/stackit/sfs/models/get_share_export_policy_response.py new file mode 100644 index 000000000..79c449727 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/get_share_export_policy_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.share_export_policy import ShareExportPolicy + + +class GetShareExportPolicyResponse(BaseModel): + """ + GetShareExportPolicyResponse + """ # noqa: E501 + + share_export_policy: Optional[ShareExportPolicy] = Field( + default=None, description="Share Export Policies", alias="shareExportPolicy" + ) + __properties: ClassVar[List[str]] = ["shareExportPolicy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetShareExportPolicyResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of share_export_policy + if self.share_export_policy: + _dict["shareExportPolicy"] = self.share_export_policy.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetShareExportPolicyResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "shareExportPolicy": ( + ShareExportPolicy.from_dict(obj["shareExportPolicy"]) + if obj.get("shareExportPolicy") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/get_share_response.py b/services/sfs/src/stackit/sfs/models/get_share_response.py new file mode 100644 index 000000000..721ba774a --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/get_share_response.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.share import Share + + +class GetShareResponse(BaseModel): + """ + GetShareResponse + """ # noqa: E501 + + share: Optional[Share] = Field(default=None, description="Share") + __properties: ClassVar[List[str]] = ["share"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetShareResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of share + if self.share: + _dict["share"] = self.share.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetShareResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"share": Share.from_dict(obj["share"]) if obj.get("share") is not None else None}) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/google_protobuf_any.py b/services/sfs/src/stackit/sfs/models/google_protobuf_any.py new file mode 100644 index 000000000..166a7be27 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/google_protobuf_any.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + + +class GoogleProtobufAny(BaseModel): + """ + Contains an arbitrary serialized message along with a @type that describes the type of the serialized message. + """ # noqa: E501 + + type: Optional[StrictStr] = Field(default=None, description="The type of the serialized message.", alias="@type") + additional_properties: Dict[str, Any] = {} + __properties: ClassVar[List[str]] = ["@type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleProtobufAny from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + * Fields in `self.additional_properties` are added to the output dict. + """ + excluded_fields: Set[str] = set( + [ + "additional_properties", + ] + ) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # puts key-value pairs in additional_properties in the top level + if self.additional_properties is not None: + for _key, _value in self.additional_properties.items(): + _dict[_key] = _value + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleProtobufAny from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"@type": obj.get("@type")}) + # store additional fields in additional_properties + for _key in obj.keys(): + if _key not in cls.__properties: + _obj.additional_properties[_key] = obj.get(_key) + + return _obj diff --git a/services/sfs/src/stackit/sfs/models/list_performance_classes_response.py b/services/sfs/src/stackit/sfs/models/list_performance_classes_response.py new file mode 100644 index 000000000..dcacb3e7c --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/list_performance_classes_response.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.performance_class import PerformanceClass + + +class ListPerformanceClassesResponse(BaseModel): + """ + ListPerformanceClassesResponse + """ # noqa: E501 + + performance_classes: Optional[List[PerformanceClass]] = Field( + default=None, description="List of Performance Classes", alias="performanceClasses" + ) + __properties: ClassVar[List[str]] = ["performanceClasses"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ListPerformanceClassesResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in performance_classes (list) + _items = [] + if self.performance_classes: + for _item in self.performance_classes: + if _item: + _items.append(_item.to_dict()) + _dict["performanceClasses"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ListPerformanceClassesResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "performanceClasses": ( + [PerformanceClass.from_dict(_item) for _item in obj["performanceClasses"]] + if obj.get("performanceClasses") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/list_resource_pool_snapshots_response.py b/services/sfs/src/stackit/sfs/models/list_resource_pool_snapshots_response.py new file mode 100644 index 000000000..380503545 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/list_resource_pool_snapshots_response.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.resource_pool_snapshot import ResourcePoolSnapshot + + +class ListResourcePoolSnapshotsResponse(BaseModel): + """ + ListResourcePoolSnapshotsResponse + """ # noqa: E501 + + resource_pool_snapshots: Optional[List[ResourcePoolSnapshot]] = Field( + default=None, description="List of Resource Pool Snapshots", alias="resourcePoolSnapshots" + ) + __properties: ClassVar[List[str]] = ["resourcePoolSnapshots"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ListResourcePoolSnapshotsResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in resource_pool_snapshots (list) + _items = [] + if self.resource_pool_snapshots: + for _item in self.resource_pool_snapshots: + if _item: + _items.append(_item.to_dict()) + _dict["resourcePoolSnapshots"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ListResourcePoolSnapshotsResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "resourcePoolSnapshots": ( + [ResourcePoolSnapshot.from_dict(_item) for _item in obj["resourcePoolSnapshots"]] + if obj.get("resourcePoolSnapshots") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/list_resource_pools_response.py b/services/sfs/src/stackit/sfs/models/list_resource_pools_response.py new file mode 100644 index 000000000..4825c0851 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/list_resource_pools_response.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.resource_pool import ResourcePool + + +class ListResourcePoolsResponse(BaseModel): + """ + ListResourcePoolsResponse + """ # noqa: E501 + + resource_pools: Optional[List[ResourcePool]] = Field( + default=None, description="List of Resource Pools", alias="resourcePools" + ) + __properties: ClassVar[List[str]] = ["resourcePools"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ListResourcePoolsResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in resource_pools (list) + _items = [] + if self.resource_pools: + for _item in self.resource_pools: + if _item: + _items.append(_item.to_dict()) + _dict["resourcePools"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ListResourcePoolsResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "resourcePools": ( + [ResourcePool.from_dict(_item) for _item in obj["resourcePools"]] + if obj.get("resourcePools") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/list_share_export_policies_response.py b/services/sfs/src/stackit/sfs/models/list_share_export_policies_response.py new file mode 100644 index 000000000..0e4910ebf --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/list_share_export_policies_response.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.share_export_policy import ShareExportPolicy + + +class ListShareExportPoliciesResponse(BaseModel): + """ + ListShareExportPoliciesResponse + """ # noqa: E501 + + share_export_policies: Optional[List[ShareExportPolicy]] = Field( + default=None, description="List of Share Export Policies", alias="shareExportPolicies" + ) + __properties: ClassVar[List[str]] = ["shareExportPolicies"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ListShareExportPoliciesResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in share_export_policies (list) + _items = [] + if self.share_export_policies: + for _item in self.share_export_policies: + if _item: + _items.append(_item.to_dict()) + _dict["shareExportPolicies"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ListShareExportPoliciesResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "shareExportPolicies": ( + [ShareExportPolicy.from_dict(_item) for _item in obj["shareExportPolicies"]] + if obj.get("shareExportPolicies") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/list_shares_response.py b/services/sfs/src/stackit/sfs/models/list_shares_response.py new file mode 100644 index 000000000..0b0529695 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/list_shares_response.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.share import Share + + +class ListSharesResponse(BaseModel): + """ + ListSharesResponse + """ # noqa: E501 + + shares: Optional[List[Share]] = Field(default=None, description="List of Shares") + __properties: ClassVar[List[str]] = ["shares"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ListSharesResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in shares (list) + _items = [] + if self.shares: + for _item in self.shares: + if _item: + _items.append(_item.to_dict()) + _dict["shares"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ListSharesResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + {"shares": [Share.from_dict(_item) for _item in obj["shares"]] if obj.get("shares") is not None else None} + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/list_snapshot_schedules_response.py b/services/sfs/src/stackit/sfs/models/list_snapshot_schedules_response.py new file mode 100644 index 000000000..96a18b48e --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/list_snapshot_schedules_response.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.snapshot_schedule import SnapshotSchedule + + +class ListSnapshotSchedulesResponse(BaseModel): + """ + ListSnapshotSchedulesResponse + """ # noqa: E501 + + snapshot_schedules: Optional[List[SnapshotSchedule]] = Field( + default=None, description="List of Snapshot Schedules", alias="snapshotSchedules" + ) + __properties: ClassVar[List[str]] = ["snapshotSchedules"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ListSnapshotSchedulesResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in snapshot_schedules (list) + _items = [] + if self.snapshot_schedules: + for _item in self.snapshot_schedules: + if _item: + _items.append(_item.to_dict()) + _dict["snapshotSchedules"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ListSnapshotSchedulesResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "snapshotSchedules": ( + [SnapshotSchedule.from_dict(_item) for _item in obj["snapshotSchedules"]] + if obj.get("snapshotSchedules") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/performance_class.py b/services/sfs/src/stackit/sfs/models/performance_class.py new file mode 100644 index 000000000..5209423a5 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/performance_class.py @@ -0,0 +1,85 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing_extensions import Self + + +class PerformanceClass(BaseModel): + """ + PerformanceClass + """ # noqa: E501 + + iops: Optional[StrictInt] = Field(default=None, description="IOPS of the Performance Class") + name: Optional[StrictStr] = Field(default=None, description="Name of the Performance Class") + throughput: Optional[StrictInt] = Field(default=None, description="Throughput of the Performance Class") + __properties: ClassVar[List[str]] = ["iops", "name", "throughput"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PerformanceClass from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PerformanceClass from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + {"iops": obj.get("iops"), "name": obj.get("name"), "throughput": obj.get("throughput")} + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/resource_pool.py b/services/sfs/src/stackit/sfs/models/resource_pool.py new file mode 100644 index 000000000..6f18c1754 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/resource_pool.py @@ -0,0 +1,228 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from datetime import datetime +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + StrictBool, + StrictInt, + StrictStr, + field_validator, +) +from typing_extensions import Self + +from stackit.sfs.models.resource_pool_performance_class import ( + ResourcePoolPerformanceClass, +) +from stackit.sfs.models.resource_pool_space import ResourcePoolSpace +from stackit.sfs.models.snapshot_schedule import SnapshotSchedule + + +class ResourcePool(BaseModel): + """ + ResourcePool + """ # noqa: E501 + + availability_zone: Optional[StrictStr] = Field( + default=None, description="Name of the respective availability zone", alias="availabilityZone" + ) + count_shares: Optional[StrictInt] = Field( + default=None, description="Number of Shares in the Resource Pool", alias="countShares" + ) + created_at: Optional[datetime] = Field(default=None, alias="createdAt") + id: Optional[StrictStr] = Field(default=None, description="ID of the Resource Pool") + ip_acl: Optional[List[StrictStr]] = Field( + default=None, + description='List of IPs that can mount the Resource Pool in read-only; IPs must have a subnet mask (e.g. "172.16.0.0/24" for a range of IPs, or "172.16.0.250/32" for a specific IP)', + alias="ipAcl", + ) + labels: Optional[Dict[str, StrictStr]] = None + mount_path: Optional[StrictStr] = Field( + default=None, + description="Mount path of the Resource Pool, used to mount the Resource Pool Note that a Resource Pool can only be mounted in read-only", + alias="mountPath", + ) + name: Optional[StrictStr] = Field(default=None, description="Name of the Resource Pool") + performance_class: Optional[ResourcePoolPerformanceClass] = Field( + default=None, description="Information about the performance class", alias="performanceClass" + ) + performance_class_downgradable_at: Optional[datetime] = Field( + default=None, + description="Time when the performance class can be downgraded again.", + alias="performanceClassDowngradableAt", + ) + size_reducible_at: Optional[datetime] = Field( + default=None, description="Time when the size can be reduced again.", alias="sizeReducibleAt" + ) + snapshot_schedule: Optional[SnapshotSchedule] = Field(default=None, alias="snapshotSchedule") + snapshots_are_visible: Optional[StrictBool] = Field( + default=None, + description="Whether the .snapshot directory is visible when mounting the resource pool. Setting this value to false might prevent you from accessing the snapshots (e.g. for security reasons). Additionally, the access to the snapshots is always controlled by the export policy of the resource pool. That means, if snapshots are visible and the export policy allows for reading the resource pool, then it also allows reading the snapshot of all shares.", + alias="snapshotsAreVisible", + ) + space: Optional[ResourcePoolSpace] = Field(default=None, description="Space information") + state: Optional[StrictStr] = Field( + default=None, + description='State of the Resource Pool (possible values: ["pending", "creating", "created", "updating", "error", "deleting"])', + ) + __properties: ClassVar[List[str]] = [ + "availabilityZone", + "countShares", + "createdAt", + "id", + "ipAcl", + "labels", + "mountPath", + "name", + "performanceClass", + "performanceClassDowngradableAt", + "sizeReducibleAt", + "snapshotSchedule", + "snapshotsAreVisible", + "space", + "state", + ] + + @field_validator("created_at", mode="before") + def created_at_change_year_zero_to_one(cls, value): + """Workaround which prevents year 0 issue""" + if isinstance(value, str): + # Check for year "0000" at the beginning of the string + # This assumes common date formats like YYYY-MM-DDTHH:MM:SS+00:00 or YYYY-MM-DDTHH:MM:SSZ + if value.startswith("0000-01-01T") and re.match( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\+\d{2}:\d{2}|Z)$", value + ): + # Workaround: Replace "0000" with "0001" + return "0001" + value[4:] # Take "0001" and append the rest of the string + return value + + @field_validator("performance_class_downgradable_at", mode="before") + def performance_class_downgradable_at_change_year_zero_to_one(cls, value): + """Workaround which prevents year 0 issue""" + if isinstance(value, str): + # Check for year "0000" at the beginning of the string + # This assumes common date formats like YYYY-MM-DDTHH:MM:SS+00:00 or YYYY-MM-DDTHH:MM:SSZ + if value.startswith("0000-01-01T") and re.match( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\+\d{2}:\d{2}|Z)$", value + ): + # Workaround: Replace "0000" with "0001" + return "0001" + value[4:] # Take "0001" and append the rest of the string + return value + + @field_validator("size_reducible_at", mode="before") + def size_reducible_at_change_year_zero_to_one(cls, value): + """Workaround which prevents year 0 issue""" + if isinstance(value, str): + # Check for year "0000" at the beginning of the string + # This assumes common date formats like YYYY-MM-DDTHH:MM:SS+00:00 or YYYY-MM-DDTHH:MM:SSZ + if value.startswith("0000-01-01T") and re.match( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\+\d{2}:\d{2}|Z)$", value + ): + # Workaround: Replace "0000" with "0001" + return "0001" + value[4:] # Take "0001" and append the rest of the string + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ResourcePool from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of performance_class + if self.performance_class: + _dict["performanceClass"] = self.performance_class.to_dict() + # override the default output from pydantic by calling `to_dict()` of snapshot_schedule + if self.snapshot_schedule: + _dict["snapshotSchedule"] = self.snapshot_schedule.to_dict() + # override the default output from pydantic by calling `to_dict()` of space + if self.space: + _dict["space"] = self.space.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ResourcePool from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "availabilityZone": obj.get("availabilityZone"), + "countShares": obj.get("countShares"), + "createdAt": obj.get("createdAt"), + "id": obj.get("id"), + "ipAcl": obj.get("ipAcl"), + "labels": obj.get("labels"), + "mountPath": obj.get("mountPath"), + "name": obj.get("name"), + "performanceClass": ( + ResourcePoolPerformanceClass.from_dict(obj["performanceClass"]) + if obj.get("performanceClass") is not None + else None + ), + "performanceClassDowngradableAt": obj.get("performanceClassDowngradableAt"), + "sizeReducibleAt": obj.get("sizeReducibleAt"), + "snapshotSchedule": ( + SnapshotSchedule.from_dict(obj["snapshotSchedule"]) + if obj.get("snapshotSchedule") is not None + else None + ), + "snapshotsAreVisible": obj.get("snapshotsAreVisible"), + "space": ResourcePoolSpace.from_dict(obj["space"]) if obj.get("space") is not None else None, + "state": obj.get("state"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/resource_pool_performance_class.py b/services/sfs/src/stackit/sfs/models/resource_pool_performance_class.py new file mode 100644 index 000000000..635fbba83 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/resource_pool_performance_class.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing_extensions import Self + + +class ResourcePoolPerformanceClass(BaseModel): + """ + ResourcePoolPerformanceClass + """ # noqa: E501 + + name: Optional[StrictStr] = Field(default=None, description="Name of the performance class") + peak_iops: Optional[StrictInt] = Field( + default=None, + description="Max. IOPS of the Resource Pool. This is shared between every Share in the Resource Pool.", + alias="peakIops", + ) + throughput: Optional[StrictInt] = Field(default=None, description="Throughput of the Resource Pool.") + __properties: ClassVar[List[str]] = ["name", "peakIops", "throughput"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ResourcePoolPerformanceClass from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ResourcePoolPerformanceClass from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + {"name": obj.get("name"), "peakIops": obj.get("peakIops"), "throughput": obj.get("throughput")} + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/resource_pool_snapshot.py b/services/sfs/src/stackit/sfs/models/resource_pool_snapshot.py new file mode 100644 index 000000000..47cafb08b --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/resource_pool_snapshot.py @@ -0,0 +1,136 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from datetime import datetime +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing_extensions import Self + + +class ResourcePoolSnapshot(BaseModel): + """ + ResourcePoolSnapshot + """ # noqa: E501 + + comment: Optional[StrictStr] = Field( + default=None, description="(optional) A comment to add more information about a snapshot" + ) + created_at: Optional[datetime] = Field(default=None, alias="createdAt") + logical_size_gigabytes: Optional[StrictInt] = Field( + default=None, + description="Represents the user-visible data size at the time of the snapshot in Gibibytes (e.g. what’s in the snapshot)", + alias="logicalSizeGigabytes", + ) + resource_pool_id: Optional[StrictStr] = Field( + default=None, description="ID of the Resource Pool of the Snapshot", alias="resourcePoolId" + ) + size_gigabytes: Optional[StrictInt] = Field( + default=None, + description="Reflects the actual storage footprint in the backend at snapshot time in Gibibytes (e.g. how much storage from the Resource Pool does it use).", + alias="sizeGigabytes", + ) + snapshot_name: Optional[StrictStr] = Field( + default=None, description="Name of the Resource Pool Snapshot", alias="snapshotName" + ) + __properties: ClassVar[List[str]] = [ + "comment", + "createdAt", + "logicalSizeGigabytes", + "resourcePoolId", + "sizeGigabytes", + "snapshotName", + ] + + @field_validator("created_at", mode="before") + def created_at_change_year_zero_to_one(cls, value): + """Workaround which prevents year 0 issue""" + if isinstance(value, str): + # Check for year "0000" at the beginning of the string + # This assumes common date formats like YYYY-MM-DDTHH:MM:SS+00:00 or YYYY-MM-DDTHH:MM:SSZ + if value.startswith("0000-01-01T") and re.match( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\+\d{2}:\d{2}|Z)$", value + ): + # Workaround: Replace "0000" with "0001" + return "0001" + value[4:] # Take "0001" and append the rest of the string + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ResourcePoolSnapshot from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if comment (nullable) is None + # and model_fields_set contains the field + if self.comment is None and "comment" in self.model_fields_set: + _dict["comment"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ResourcePoolSnapshot from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "comment": obj.get("comment"), + "createdAt": obj.get("createdAt"), + "logicalSizeGigabytes": obj.get("logicalSizeGigabytes"), + "resourcePoolId": obj.get("resourcePoolId"), + "sizeGigabytes": obj.get("sizeGigabytes"), + "snapshotName": obj.get("snapshotName"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/resource_pool_space.py b/services/sfs/src/stackit/sfs/models/resource_pool_space.py new file mode 100644 index 000000000..05f03d8f4 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/resource_pool_space.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set, Union + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + StrictFloat, + StrictInt, +) +from typing_extensions import Self + + +class ResourcePoolSpace(BaseModel): + """ + ResourcePoolSpace + """ # noqa: E501 + + available_gigabytes: Optional[Union[StrictFloat, StrictInt]] = Field( + default=None, + description="Available space of the Resource Pool (only available when retrieving a single Resource Pool by ID)", + alias="availableGigabytes", + ) + size_gigabytes: Optional[StrictInt] = Field( + default=None, description="Size of the Resource Pool in Gibibytes.", alias="sizeGigabytes" + ) + used_gigabytes: Optional[Union[StrictFloat, StrictInt]] = Field( + default=None, + description="Used space of the Resource Pool (only available when retrieving a single Resource Pool by ID)", + alias="usedGigabytes", + ) + __properties: ClassVar[List[str]] = ["availableGigabytes", "sizeGigabytes", "usedGigabytes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ResourcePoolSpace from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if available_gigabytes (nullable) is None + # and model_fields_set contains the field + if self.available_gigabytes is None and "available_gigabytes" in self.model_fields_set: + _dict["availableGigabytes"] = None + + # set to None if used_gigabytes (nullable) is None + # and model_fields_set contains the field + if self.used_gigabytes is None and "used_gigabytes" in self.model_fields_set: + _dict["usedGigabytes"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ResourcePoolSpace from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "availableGigabytes": obj.get("availableGigabytes"), + "sizeGigabytes": obj.get("sizeGigabytes"), + "usedGigabytes": obj.get("usedGigabytes"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/share.py b/services/sfs/src/stackit/sfs/models/share.py new file mode 100644 index 000000000..3230d3a7e --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/share.py @@ -0,0 +1,151 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from datetime import datetime +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing_extensions import Self + +from stackit.sfs.models.share_export_policy import ShareExportPolicy + + +class Share(BaseModel): + """ + Share + """ # noqa: E501 + + created_at: Optional[datetime] = Field(default=None, alias="createdAt") + export_policy: Optional[ShareExportPolicy] = Field( + default=None, + description="Details of the Share Export Policy to use in the Share. Note that if this is not set, the Share can only be mounted in read only by clients with IPs matching the IP ACL of the Resource Pool hosting this Share", + alias="exportPolicy", + ) + id: Optional[StrictStr] = Field(default=None, description="ID of the Share") + labels: Optional[Dict[str, StrictStr]] = Field( + default=None, + description="An optional object that represents the labels associated with the share keys are validated using the following regex '^[\\\\p{Ll}][\\\\p{Ll}\\\\p{N}_-]*$' and cannot be empty values are validated using the following regex '^[\\\\p{Ll}\\\\p{N}_-]*$'", + ) + mount_path: Optional[StrictStr] = Field( + default=None, description="Mount path of the Share, used to mount the Share", alias="mountPath" + ) + name: Optional[StrictStr] = Field(default=None, description="Name of the Share") + space_hard_limit_gigabytes: Optional[StrictInt] = Field( + default=None, + description="Space hard limit for the Share. If zero, the Share will have access to the full space of the Resource Pool it lives in. (unit: gibibytes)", + alias="spaceHardLimitGigabytes", + ) + state: Optional[StrictStr] = Field( + default=None, + description='State of the Resource Pool Snapshot (possible values: ["pending", "creating", "created", "error", "deleting"])', + ) + __properties: ClassVar[List[str]] = [ + "createdAt", + "exportPolicy", + "id", + "labels", + "mountPath", + "name", + "spaceHardLimitGigabytes", + "state", + ] + + @field_validator("created_at", mode="before") + def created_at_change_year_zero_to_one(cls, value): + """Workaround which prevents year 0 issue""" + if isinstance(value, str): + # Check for year "0000" at the beginning of the string + # This assumes common date formats like YYYY-MM-DDTHH:MM:SS+00:00 or YYYY-MM-DDTHH:MM:SSZ + if value.startswith("0000-01-01T") and re.match( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\+\d{2}:\d{2}|Z)$", value + ): + # Workaround: Replace "0000" with "0001" + return "0001" + value[4:] # Take "0001" and append the rest of the string + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Share from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of export_policy + if self.export_policy: + _dict["exportPolicy"] = self.export_policy.to_dict() + # set to None if export_policy (nullable) is None + # and model_fields_set contains the field + if self.export_policy is None and "export_policy" in self.model_fields_set: + _dict["exportPolicy"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Share from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "createdAt": obj.get("createdAt"), + "exportPolicy": ( + ShareExportPolicy.from_dict(obj["exportPolicy"]) if obj.get("exportPolicy") is not None else None + ), + "id": obj.get("id"), + "labels": obj.get("labels"), + "mountPath": obj.get("mountPath"), + "name": obj.get("name"), + "spaceHardLimitGigabytes": obj.get("spaceHardLimitGigabytes"), + "state": obj.get("state"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/share_export_policy.py b/services/sfs/src/stackit/sfs/models/share_export_policy.py new file mode 100644 index 000000000..acf622f77 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/share_export_policy.py @@ -0,0 +1,131 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from datetime import datetime +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing_extensions import Self + +from stackit.sfs.models.share_export_policy_rule import ShareExportPolicyRule + + +class ShareExportPolicy(BaseModel): + """ + ShareExportPolicy + """ # noqa: E501 + + created_at: Optional[datetime] = Field(default=None, alias="createdAt") + id: Optional[StrictStr] = Field(default=None, description="ID of the Share Export Policy") + labels: Optional[Dict[str, StrictStr]] = Field( + default=None, + description="An optional object that represents the labels associated with the share export policy keys are validated using the following regex '^[\\\\p{Ll}][\\\\p{Ll}\\\\p{N}_-]*$' and cannot be empty values are validated using the following regex '^[\\\\p{Ll}\\\\p{N}_-]*$'", + ) + name: Optional[StrictStr] = Field(default=None, description="Name of the Share Export Policy") + rules: Optional[List[ShareExportPolicyRule]] = Field( + default=None, + description='List of rules of the Share Export Policy. The order of the rules within the array does not matter - what matters is the field "order" within each rule', + ) + shares_using_export_policy: Optional[StrictInt] = Field( + default=None, description="Number of Shares using this Share Export Policy", alias="sharesUsingExportPolicy" + ) + __properties: ClassVar[List[str]] = ["createdAt", "id", "labels", "name", "rules", "sharesUsingExportPolicy"] + + @field_validator("created_at", mode="before") + def created_at_change_year_zero_to_one(cls, value): + """Workaround which prevents year 0 issue""" + if isinstance(value, str): + # Check for year "0000" at the beginning of the string + # This assumes common date formats like YYYY-MM-DDTHH:MM:SS+00:00 or YYYY-MM-DDTHH:MM:SSZ + if value.startswith("0000-01-01T") and re.match( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\+\d{2}:\d{2}|Z)$", value + ): + # Workaround: Replace "0000" with "0001" + return "0001" + value[4:] # Take "0001" and append the rest of the string + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ShareExportPolicy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in rules (list) + _items = [] + if self.rules: + for _item in self.rules: + if _item: + _items.append(_item.to_dict()) + _dict["rules"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ShareExportPolicy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "createdAt": obj.get("createdAt"), + "id": obj.get("id"), + "labels": obj.get("labels"), + "name": obj.get("name"), + "rules": ( + [ShareExportPolicyRule.from_dict(_item) for _item in obj["rules"]] + if obj.get("rules") is not None + else None + ), + "sharesUsingExportPolicy": obj.get("sharesUsingExportPolicy"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/share_export_policy_rule.py b/services/sfs/src/stackit/sfs/models/share_export_policy_rule.py new file mode 100644 index 000000000..0489dd4b9 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/share_export_policy_rule.py @@ -0,0 +1,153 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from datetime import datetime +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + StrictBool, + StrictInt, + StrictStr, + field_validator, +) +from typing_extensions import Self + + +class ShareExportPolicyRule(BaseModel): + """ + ShareExportPolicyRule + """ # noqa: E501 + + created_at: Optional[datetime] = Field(default=None, alias="createdAt") + description: Optional[StrictStr] = Field(default=None, description="Description of the Rule (optional)") + id: Optional[StrictStr] = Field(default=None, description="ID of the Share Export Policy Rule") + ip_acl: Optional[List[StrictStr]] = Field( + default=None, + description='IP access control list, where IPs must have a subnet mask (e.g. "172.16.0.0/24" for a range of IPs, or "172.16.0.251/32" for a specific IP) This array cannot be empty', + alias="ipAcl", + ) + order: Optional[StrictInt] = Field( + default=None, + description="Order of the rule within a Share Export Policy. The order is used so that when a client IP matches multiple rules, the first rule is applied This field must be set", + ) + read_only: Optional[StrictBool] = Field( + default=None, + description="Flag to indicate if client IPs matching this rule can only mount the share in read only mode Default: false", + alias="readOnly", + ) + set_uuid: Optional[StrictBool] = Field( + default=None, description="Flag to honor set UUID Default: false", alias="setUuid" + ) + super_user: Optional[StrictBool] = Field( + default=None, + description="Flag to indicate if client IPs matching this rule have root access on the Share Default: true", + alias="superUser", + ) + __properties: ClassVar[List[str]] = [ + "createdAt", + "description", + "id", + "ipAcl", + "order", + "readOnly", + "setUuid", + "superUser", + ] + + @field_validator("created_at", mode="before") + def created_at_change_year_zero_to_one(cls, value): + """Workaround which prevents year 0 issue""" + if isinstance(value, str): + # Check for year "0000" at the beginning of the string + # This assumes common date formats like YYYY-MM-DDTHH:MM:SS+00:00 or YYYY-MM-DDTHH:MM:SSZ + if value.startswith("0000-01-01T") and re.match( + r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\+\d{2}:\d{2}|Z)$", value + ): + # Workaround: Replace "0000" with "0001" + return "0001" + value[4:] # Take "0001" and append the rest of the string + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ShareExportPolicyRule from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if description (nullable) is None + # and model_fields_set contains the field + if self.description is None and "description" in self.model_fields_set: + _dict["description"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ShareExportPolicyRule from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "createdAt": obj.get("createdAt"), + "description": obj.get("description"), + "id": obj.get("id"), + "ipAcl": obj.get("ipAcl"), + "order": obj.get("order"), + "readOnly": obj.get("readOnly"), + "setUuid": obj.get("setUuid"), + "superUser": obj.get("superUser"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/snapshot_schedule.py b/services/sfs/src/stackit/sfs/models/snapshot_schedule.py new file mode 100644 index 000000000..c39999338 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/snapshot_schedule.py @@ -0,0 +1,85 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + + +class SnapshotSchedule(BaseModel): + """ + SnapshotSchedule + """ # noqa: E501 + + cron: Optional[StrictStr] = Field( + default=None, + description="Cron of the Snapshot Schedule (follows the cron schedule expression in Unix-like systems)", + ) + name: Optional[StrictStr] = Field(default=None, description="Name of the Snapshot Schedule") + __properties: ClassVar[List[str]] = ["cron", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SnapshotSchedule from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SnapshotSchedule from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"cron": obj.get("cron"), "name": obj.get("name")}) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/status.py b/services/sfs/src/stackit/sfs/models/status.py new file mode 100644 index 000000000..42535aa38 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/status.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing_extensions import Self + +from stackit.sfs.models.google_protobuf_any import GoogleProtobufAny + + +class Status(BaseModel): + """ + The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors). + """ # noqa: E501 + + code: Optional[StrictInt] = Field( + default=None, + description="The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code].", + ) + details: Optional[List[GoogleProtobufAny]] = Field( + default=None, + description="A list of messages that carry the error details. There is a common set of message types for APIs to use.", + ) + message: Optional[StrictStr] = Field( + default=None, + description="A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client.", + ) + __properties: ClassVar[List[str]] = ["code", "details", "message"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Status from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in details (list) + _items = [] + if self.details: + for _item in self.details: + if _item: + _items.append(_item.to_dict()) + _dict["details"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Status from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "code": obj.get("code"), + "details": ( + [GoogleProtobufAny.from_dict(_item) for _item in obj["details"]] + if obj.get("details") is not None + else None + ), + "message": obj.get("message"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/update_resource_pool_payload.py b/services/sfs/src/stackit/sfs/models/update_resource_pool_payload.py new file mode 100644 index 000000000..2e259d654 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/update_resource_pool_payload.py @@ -0,0 +1,133 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + StrictBool, + StrictInt, + StrictStr, +) +from typing_extensions import Self + + +class UpdateResourcePoolPayload(BaseModel): + """ + UpdateResourcePoolPayload + """ # noqa: E501 + + ip_acl: Optional[List[StrictStr]] = Field( + default=None, + description='(optional) List of IPs that can mount the Resource Pool in read-only; IPs must have a subnet mask (e.g. "172.16.0.0/24" for a range of IPs, or "172.16.0.250/32" for a specific IP)', + alias="ipAcl", + ) + labels: Optional[Dict[str, StrictStr]] = Field( + default=None, + description="An optional object that represents the labels associated with the resource pool keys are validated using the following regex '^[\\\\p{Ll}][\\\\p{Ll}\\\\p{N}_-]*$' and cannot be empty values are validated using the following regex '^[\\\\p{Ll}\\\\p{N}_-]*$'", + ) + performance_class: Optional[StrictStr] = Field( + default=None, description="(optional) Name of the performance class", alias="performanceClass" + ) + size_gigabytes: Optional[StrictInt] = Field( + default=None, description="(optional) Size of the Resource Pool (unit: gigabytes)", alias="sizeGigabytes" + ) + snapshot_schedule_name: Optional[StrictStr] = Field( + default=None, + description="(optional) Name of the Snapshot Schedule to use If not set, the Snapshot Schedule is not updated If set to an empty string, the Snapshot Schedule is removed", + alias="snapshotScheduleName", + ) + snapshots_are_visible: Optional[StrictBool] = Field( + default=None, + description="Whether the .snapshot directory is visible when mounting the resource pool. Setting this value to false might prevent you from accessing the snapshots (e.g. for security reasons). Additionally, the access to the snapshots is always controlled by the export policy of the resource pool. That means, if snapshots are visible and the export policy allows for reading the resource pool, then it also allows reading the snapshot of all shares.", + alias="snapshotsAreVisible", + ) + __properties: ClassVar[List[str]] = [ + "ipAcl", + "labels", + "performanceClass", + "sizeGigabytes", + "snapshotScheduleName", + "snapshotsAreVisible", + ] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UpdateResourcePoolPayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if size_gigabytes (nullable) is None + # and model_fields_set contains the field + if self.size_gigabytes is None and "size_gigabytes" in self.model_fields_set: + _dict["sizeGigabytes"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UpdateResourcePoolPayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "ipAcl": obj.get("ipAcl"), + "labels": obj.get("labels"), + "performanceClass": obj.get("performanceClass"), + "sizeGigabytes": obj.get("sizeGigabytes"), + "snapshotScheduleName": obj.get("snapshotScheduleName"), + "snapshotsAreVisible": obj.get("snapshotsAreVisible"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/update_resource_pool_response.py b/services/sfs/src/stackit/sfs/models/update_resource_pool_response.py new file mode 100644 index 000000000..6cdf251fa --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/update_resource_pool_response.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.resource_pool import ResourcePool + + +class UpdateResourcePoolResponse(BaseModel): + """ + UpdateResourcePoolResponse + """ # noqa: E501 + + resource_pool: Optional[ResourcePool] = Field( + default=None, description="Updated Resource Pool", alias="resourcePool" + ) + __properties: ClassVar[List[str]] = ["resourcePool"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UpdateResourcePoolResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of resource_pool + if self.resource_pool: + _dict["resourcePool"] = self.resource_pool.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UpdateResourcePoolResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "resourcePool": ( + ResourcePool.from_dict(obj["resourcePool"]) if obj.get("resourcePool") is not None else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/update_share_export_policy_body_rule.py b/services/sfs/src/stackit/sfs/models/update_share_export_policy_body_rule.py new file mode 100644 index 000000000..8c63fb20e --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/update_share_export_policy_body_rule.py @@ -0,0 +1,124 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + StrictBool, + StrictInt, + StrictStr, +) +from typing_extensions import Self + + +class UpdateShareExportPolicyBodyRule(BaseModel): + """ + UpdateShareExportPolicyBodyRule + """ # noqa: E501 + + description: Optional[StrictStr] = Field(default=None, description="Description of the Rule (optional)") + ip_acl: Optional[List[StrictStr]] = Field( + default=None, + description='IP access control list, where IPs must have a subnet mask (e.g. "172.16.0.0/24" for a range of IPs, or "172.16.0.250/32" for a specific IP) This array cannot be empty', + alias="ipAcl", + ) + order: Optional[StrictInt] = Field( + default=None, + description="Order of the rule within a Share Export Policy. The order is used so that when a client IP matches multiple rules, the first rule is applied This field must be set", + ) + read_only: Optional[StrictBool] = Field( + default=False, + description="Flag to indicate if client IPs matching this rule can only mount the share in read only mode Default: false", + alias="readOnly", + ) + set_uuid: Optional[StrictBool] = Field( + default=False, description="Flag to honor set UUID Default: false", alias="setUuid" + ) + super_user: Optional[StrictBool] = Field( + default=True, + description="Flag to indicate if client IPs matching this rule have root access on the Share Default: true", + alias="superUser", + ) + __properties: ClassVar[List[str]] = ["description", "ipAcl", "order", "readOnly", "setUuid", "superUser"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UpdateShareExportPolicyBodyRule from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if description (nullable) is None + # and model_fields_set contains the field + if self.description is None and "description" in self.model_fields_set: + _dict["description"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UpdateShareExportPolicyBodyRule from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "description": obj.get("description"), + "ipAcl": obj.get("ipAcl"), + "order": obj.get("order"), + "readOnly": obj.get("readOnly") if obj.get("readOnly") is not None else False, + "setUuid": obj.get("setUuid") if obj.get("setUuid") is not None else False, + "superUser": obj.get("superUser") if obj.get("superUser") is not None else True, + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/update_share_export_policy_payload.py b/services/sfs/src/stackit/sfs/models/update_share_export_policy_payload.py new file mode 100644 index 000000000..0c3c02c64 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/update_share_export_policy_payload.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + +from stackit.sfs.models.update_share_export_policy_body_rule import ( + UpdateShareExportPolicyBodyRule, +) + + +class UpdateShareExportPolicyPayload(BaseModel): + """ + UpdateShareExportPolicyPayload + """ # noqa: E501 + + labels: Optional[Dict[str, StrictStr]] = Field( + default=None, + description="An optional object that represents the labels associated with the share export policy keys are validated using the following regex '^[\\\\p{Ll}][\\\\p{Ll}\\\\p{N}_-]*$' and cannot be empty values are validated using the following regex '^[\\\\p{Ll}\\\\p{N}_-]*$'", + ) + rules: Optional[List[UpdateShareExportPolicyBodyRule]] = Field( + default=None, + description='List of rules of the Share Export Policy. The order of the rules within the array does not matter - what matters is the field "order" within each rule. The whole set of rules needs to be sent in the same request, e.g. if 1 rule is sent, the share export policy will have just that 1 rule, as opposed to extending the existing set of rules with the 1 rule that was sent Important note: the array of rules passed always overwrites the rule array (i.e. sending an empty array removes all rules)', + ) + __properties: ClassVar[List[str]] = ["labels", "rules"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UpdateShareExportPolicyPayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in rules (list) + _items = [] + if self.rules: + for _item in self.rules: + if _item: + _items.append(_item.to_dict()) + _dict["rules"] = _items + # set to None if rules (nullable) is None + # and model_fields_set contains the field + if self.rules is None and "rules" in self.model_fields_set: + _dict["rules"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UpdateShareExportPolicyPayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "labels": obj.get("labels"), + "rules": ( + [UpdateShareExportPolicyBodyRule.from_dict(_item) for _item in obj["rules"]] + if obj.get("rules") is not None + else None + ), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/update_share_export_policy_response.py b/services/sfs/src/stackit/sfs/models/update_share_export_policy_response.py new file mode 100644 index 000000000..050e91358 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/update_share_export_policy_response.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.share_export_policy import ShareExportPolicy + + +class UpdateShareExportPolicyResponse(BaseModel): + """ + UpdateShareExportPolicyResponse + """ # noqa: E501 + + share_export_policy: Optional[ShareExportPolicy] = Field(default=None, alias="shareExportPolicy") + __properties: ClassVar[List[str]] = ["shareExportPolicy"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UpdateShareExportPolicyResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of share_export_policy + if self.share_export_policy: + _dict["shareExportPolicy"] = self.share_export_policy.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UpdateShareExportPolicyResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "shareExportPolicy": ( + ShareExportPolicy.from_dict(obj["shareExportPolicy"]) + if obj.get("shareExportPolicy") is not None + else None + ) + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/update_share_payload.py b/services/sfs/src/stackit/sfs/models/update_share_payload.py new file mode 100644 index 000000000..4f5c21059 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/update_share_payload.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing_extensions import Self + + +class UpdateSharePayload(BaseModel): + """ + UpdateSharePayload + """ # noqa: E501 + + export_policy_name: Optional[StrictStr] = Field( + default=None, + description='Name of the Share Export Policy to use in the Share. The behavior depends on the value: - If not set (null): Keep the existing export policy (if any) - If set to empty string (""): Remove the existing export policy - If set to a policy name: Update to use the specified policy, creating a new association if none exists', + alias="exportPolicyName", + ) + labels: Optional[Dict[str, StrictStr]] = Field( + default=None, + description="An optional object that represents the labels associated with the share keys are validated using the following regex '^[\\\\p{Ll}][\\\\p{Ll}\\\\p{N}_-]*$' and cannot be empty values are validated using the following regex '^[\\\\p{Ll}\\\\p{N}_-]*$'", + ) + space_hard_limit_gigabytes: Optional[StrictInt] = Field( + default=None, + description="Space hard limit for the Share. If zero, the Share will have access to the full space of the Resource Pool it lives in. (unit: gibibytes)", + alias="spaceHardLimitGigabytes", + ) + __properties: ClassVar[List[str]] = ["exportPolicyName", "labels", "spaceHardLimitGigabytes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UpdateSharePayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if export_policy_name (nullable) is None + # and model_fields_set contains the field + if self.export_policy_name is None and "export_policy_name" in self.model_fields_set: + _dict["exportPolicyName"] = None + + # set to None if space_hard_limit_gigabytes (nullable) is None + # and model_fields_set contains the field + if self.space_hard_limit_gigabytes is None and "space_hard_limit_gigabytes" in self.model_fields_set: + _dict["spaceHardLimitGigabytes"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UpdateSharePayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "exportPolicyName": obj.get("exportPolicyName"), + "labels": obj.get("labels"), + "spaceHardLimitGigabytes": obj.get("spaceHardLimitGigabytes"), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/update_share_response.py b/services/sfs/src/stackit/sfs/models/update_share_response.py new file mode 100644 index 000000000..24dbfa501 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/update_share_response.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self + +from stackit.sfs.models.share import Share + + +class UpdateShareResponse(BaseModel): + """ + UpdateShareResponse + """ # noqa: E501 + + share: Optional[Share] = Field(default=None, description="Updated Share") + __properties: ClassVar[List[str]] = ["share"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UpdateShareResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of share + if self.share: + _dict["share"] = self.share.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UpdateShareResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"share": Share.from_dict(obj["share"]) if obj.get("share") is not None else None}) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/validation_error.py b/services/sfs/src/stackit/sfs/models/validation_error.py new file mode 100644 index 000000000..215f2cba2 --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/validation_error.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + +from stackit.sfs.models.validation_error_field import ValidationErrorField + + +class ValidationError(BaseModel): + """ + ValidationError + """ # noqa: E501 + + error_description: Optional[Any] = Field( + default=None, description="Human-readable string of the error that occured" + ) + title: StrictStr = Field(description="Human-readable description of the error that occurred.") + type: StrictStr = Field( + description="URI Uniquely identifies the error type. It will be in the format of storage.stackit.cloud/ e.g. storage.stackit.cloud/validation-error" + ) + fields: List[ValidationErrorField] + __properties: ClassVar[List[str]] = ["error_description", "title", "type", "fields"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ValidationError from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in fields (list) + _items = [] + if self.fields: + for _item in self.fields: + if _item: + _items.append(_item.to_dict()) + _dict["fields"] = _items + # set to None if error_description (nullable) is None + # and model_fields_set contains the field + if self.error_description is None and "error_description" in self.model_fields_set: + _dict["error_description"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ValidationError from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "error_description": obj.get("error_description"), + "title": obj.get("title"), + "type": obj.get("type"), + "fields": ( + [ValidationErrorField.from_dict(_item) for _item in obj["fields"]] + if obj.get("fields") is not None + else None + ), + } + ) + return _obj diff --git a/services/sfs/src/stackit/sfs/models/validation_error_field.py b/services/sfs/src/stackit/sfs/models/validation_error_field.py new file mode 100644 index 000000000..daf2e1f0c --- /dev/null +++ b/services/sfs/src/stackit/sfs/models/validation_error_field.py @@ -0,0 +1,84 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + + +class ValidationErrorField(BaseModel): + """ + ValidationErrorField + """ # noqa: E501 + + var_field: StrictStr = Field( + description="The name of the field in the request that has failed the validation", alias="field" + ) + reason: StrictStr = Field(description="The reason why the validation failed") + __properties: ClassVar[List[str]] = ["field", "reason"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ValidationErrorField from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ValidationErrorField from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"field": obj.get("field"), "reason": obj.get("reason")}) + return _obj diff --git a/services/sfs/src/stackit/sfs/py.typed b/services/sfs/src/stackit/sfs/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/services/sfs/src/stackit/sfs/rest.py b/services/sfs/src/stackit/sfs/rest.py new file mode 100644 index 000000000..ae68b45bc --- /dev/null +++ b/services/sfs/src/stackit/sfs/rest.py @@ -0,0 +1,148 @@ +# coding: utf-8 + +""" + STACKIT File Storage (SFS) + + API used to create and manage NFS Shares. + + The version of the OpenAPI document: 1beta.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import io +import json +import re + +import requests +from stackit.core.authorization import Authorization +from stackit.core.configuration import Configuration + +from stackit.sfs.exceptions import ApiException, ApiValueError + + +RESTResponseType = requests.Response + + +class RESTResponse(io.IOBase): + + def __init__(self, resp) -> None: + self.response = resp + self.status = resp.status_code + self.reason = resp.reason + self.data = None + + def read(self): + if self.data is None: + self.data = self.response.content + return self.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.response.headers + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.response.headers.get(name, default) + + +class RESTClientObject: + def __init__(self, config: Configuration) -> None: + self.session = config.custom_http_session if config.custom_http_session else requests.Session() + authorization = Authorization(config) + self.session.auth = authorization.auth_method + + def request(self, method, url, headers=None, body=None, post_params=None, _request_timeout=None): + """Perform requests. + + :param method: http request method + :param url: http request url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + if method not in ["GET", "HEAD", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"]: + raise ValueError("Method %s not allowed", method) + + if post_params and body: + raise ApiValueError("body parameter cannot be used with post_params parameter.") + + post_params = post_params or {} + headers = headers or {} + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]: + + # no content type provided or payload is json + content_type = headers.get("Content-Type") + if not content_type or re.search("json", content_type, re.IGNORECASE): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.session.request( + method, + url, + data=request_body, + headers=headers, + ) + elif content_type == "application/x-www-form-urlencoded": + r = self.session.request( + method, + url, + params=post_params, + headers=headers, + ) + elif content_type == "multipart/form-data": + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers["Content-Type"] + # Ensures that dict objects are serialized + post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a, b) for a, b in post_params] + r = self.session.request( + method, + url, + files=post_params, + headers=headers, + ) + # Pass a `string` parameter directly in the body to support + # other content types than JSON when `body` argument is + # provided in serialized form. + elif isinstance(body, str) or isinstance(body, bytes): + r = self.session.request( + method, + url, + data=body, + headers=headers, + ) + elif headers["Content-Type"].startswith("text/") and isinstance(body, bool): + request_body = "true" if body else "false" + r = self.session.request(method, url, data=request_body, headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.session.request( + method, + url, + params={}, + headers=headers, + ) + except requests.exceptions.SSLError as e: + msg = "\n".join([type(e).__name__, str(e)]) + raise ApiException(status=0, reason=msg) + + return RESTResponse(r)