From 9b4f7a3617cdc2dcbec20eb39f63f4732505678d Mon Sep 17 00:00:00 2001 From: Henk Berendsen <61596108+hb140502@users.noreply.github.com> Date: Fri, 24 Feb 2023 11:01:05 +0100 Subject: [PATCH 01/32] AWS synchronisation button (#8) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Removed unnecessary docstrings. --- website/projects/admin.py | 8 ++++++++ website/projects/awssync.py | 15 +++++++++++++++ .../templates/admin/projects/change_list.html | 1 + website/projects/tests/test_admin.py | 6 ++++++ website/projects/tests/test_awssync.py | 14 ++++++++++++++ 5 files changed, 44 insertions(+) create mode 100644 website/projects/awssync.py create mode 100644 website/projects/tests/test_awssync.py diff --git a/website/projects/admin.py b/website/projects/admin.py index 0a39d439..7fae8a6d 100644 --- a/website/projects/admin.py +++ b/website/projects/admin.py @@ -12,6 +12,7 @@ from mailing_lists.models import MailingList +from projects.awssync import AWSSync from projects.forms import ProjectAdminForm, RepositoryInlineForm from projects.githubsync import GitHubSync from projects.models import Client, Project, Repository @@ -171,6 +172,12 @@ def synchronise_current_projects_to_GitHub(self, request): ], ) + def synchronise_to_AWS(self, request): + """Synchronise to Amazon Web Services.""" + sync = AWSSync() + sync.button_pressed() + return redirect("admin:projects_project_changelist") + def get_urls(self): """Get admin urls.""" urls = super().get_urls() @@ -180,6 +187,7 @@ def get_urls(self): self.admin_site.admin_view(self.synchronise_current_projects_to_GitHub), name="synchronise_to_github", ), + path("sync-to-aws/", self.admin_site.admin_view(self.synchronise_to_AWS), name="synchronise_to_aws"), ] return custom_urls + urls diff --git a/website/projects/awssync.py b/website/projects/awssync.py new file mode 100644 index 00000000..6ebc8e1a --- /dev/null +++ b/website/projects/awssync.py @@ -0,0 +1,15 @@ +class AWSSync: + """Synchronise with Amazon Web Services.""" + + def __init__(self): + """Create an AWSSync instance.""" + print("Created AWSSync instance") + + def button_pressed(self): + """ + Print debug message to show that the button has been pressed. + + :return: True if function executes successfully + """ + print("Pressed button") + return True diff --git a/website/projects/templates/admin/projects/change_list.html b/website/projects/templates/admin/projects/change_list.html index 82da6c2e..9e2f1890 100644 --- a/website/projects/templates/admin/projects/change_list.html +++ b/website/projects/templates/admin/projects/change_list.html @@ -4,6 +4,7 @@ {% block object-tools-items %}
  • Synchronize projects of the current semester to GitHub + Synchronize projects of the current semester to AWS
  • {{ block.super }} {% endblock %} diff --git a/website/projects/tests/test_admin.py b/website/projects/tests/test_admin.py index d8778d0c..25f0328a 100644 --- a/website/projects/tests/test_admin.py +++ b/website/projects/tests/test_admin.py @@ -84,6 +84,7 @@ def setUp(self): self.sync_mock.users_removed = 1 self.sync_mock.repos_archived = 1 self.github_mock = MagicMock(return_value=self.sync_mock) + self.aws_mock = MagicMock() messages.error = MagicMock() messages.warning = MagicMock() messages.success = MagicMock() @@ -233,6 +234,11 @@ def test_synchronise_current_projects_to_GitHub(self): self.assertNotIn(self.project_archived, args[1]) self.project_admin.synchronise_to_GitHub = original_sync_action + def test_synchronise_to_AWS(self): + with patch("projects.admin.AWSSync", self.aws_mock): + self.project_admin.synchronise_to_AWS(self.request) + self.aws_mock.assert_called_once() + def test_archive_all_repositories(self): self.project_admin.archive_all_repositories(self.request, Project.objects.all()) self.repo1.refresh_from_db() diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py new file mode 100644 index 00000000..7983dc09 --- /dev/null +++ b/website/projects/tests/test_awssync.py @@ -0,0 +1,14 @@ +from django.test import TestCase + +from projects import awssync + + +class AWSSyncTest(TestCase): + """Test AWSSync class.""" + + def setUp(self): + self.sync = awssync.AWSSync() + + def test_button_pressed(self): + return_value = self.sync.button_pressed() + self.assertTrue(return_value) From 2d4b50f78a77f7e1b44d3534be8fc46dd5c4350c Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Fri, 24 Feb 2023 12:07:16 +0100 Subject: [PATCH 02/32] Add boto3 and moto dependencies (#11) --- poetry.lock | 296 +++++++++++++++++++++++++++++++++++++++++++++---- pyproject.toml | 2 + 2 files changed, 278 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6381833a..9e1c0056 100644 --- a/poetry.lock +++ b/poetry.lock @@ -49,6 +49,38 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "boto3" +version = "1.26.78" +description = "The AWS SDK for Python" +category = "main" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +botocore = ">=1.29.78,<1.30.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.29.78" +description = "Low-level, data-driven core of boto 3." +category = "main" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.16.9)"] + [[package]] name = "cachetools" version = "5.2.0" @@ -85,7 +117,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode-backport = ["unicodedata2"] +unicode_backport = ["unicodedata2"] [[package]] name = "click" @@ -151,7 +183,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] +dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] [[package]] name = "django" @@ -280,7 +312,6 @@ python-versions = "*" [package.dependencies] pycodestyle = "*" -setuptools = "*" [[package]] name = "freezegun" @@ -343,7 +374,7 @@ six = ">=1.9.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] @@ -408,6 +439,28 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "libsass" version = "0.21.0" @@ -419,6 +472,14 @@ python-versions = "*" [package.dependencies] six = "*" +[[package]] +name = "markupsafe" +version = "2.1.2" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "mccabe" version = "0.7.0" @@ -427,6 +488,49 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "moto" +version = "4.1.3" +description = "" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.12.201" +cryptography = ">=3.3.1" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.13.0" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +apigatewayv2 = ["PyYAML (>=5.1)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=2.5.1)"] +batch = ["docker (>=2.5.1)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +ds = ["sshpubkeys (>=3.1.0)"] +dynamodb = ["docker (>=2.5.1)"] +dynamodbstreams = ["docker (>=2.5.1)"] +ebs = ["sshpubkeys (>=3.1.0)"] +ec2 = ["sshpubkeys (>=3.1.0)"] +efs = ["sshpubkeys (>=3.1.0)"] +eks = ["sshpubkeys (>=3.1.0)"] +glue = ["pyparsing (>=3.0.7)"] +iotdata = ["jsondiff (>=1.1.2)"] +route53resolver = ["sshpubkeys (>=3.1.0)"] +s3 = ["PyYAML (>=5.1)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +ssm = ["PyYAML (>=5.1)"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -613,7 +717,7 @@ python-versions = ">=3.6" cffi = ">=1.4.1" [package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] @@ -631,7 +735,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "dev" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" @@ -662,7 +766,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" @@ -679,6 +783,23 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "responses" +version = "0.22.0" +description = "A utility library for mocking out the `requests` Python library." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +requests = ">=2.22.0,<3.0" +toml = "*" +types-toml = "*" +urllib3 = ">=1.25.10" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "types-requests"] + [[package]] name = "rjsmin" version = "1.2.0" @@ -699,17 +820,18 @@ python-versions = ">=3.6,<4" pyasn1 = ">=0.1.3" [[package]] -name = "setuptools" -version = "65.5.1" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" +name = "s3transfer" +version = "0.6.0" +description = "An Amazon S3 Transfer Manager" +category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">= 3.7" + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] [[package]] name = "six" @@ -743,6 +865,14 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + [[package]] name = "tomli" version = "2.0.1" @@ -751,6 +881,14 @@ category = "dev" optional = false python-versions = ">=3.7" +[[package]] +name = "types-toml" +version = "0.10.8.5" +description = "Typing stubs for toml" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "tzdata" version = "2022.6" @@ -788,6 +926,20 @@ category = "main" optional = true python-versions = "*" +[[package]] +name = "werkzeug" +version = "2.2.3" +description = "The comprehensive WSGI web application library." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog"] + [[package]] name = "wrapt" version = "1.14.1" @@ -796,13 +948,21 @@ category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +category = "main" +optional = false +python-versions = ">=3.4" + [extras] -production = ["uwsgi", "uWSGI", "psycopg2-binary"] +production = ["uwsgi", "psycopg2-binary"] [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "cb906894eb8ca0d6f28ab7ea2ca335aff0ff3f43f32e0cabc9ab323703715abf" +content-hash = "d812c41bd73a271e800f7a4969553f2b8b5a748e8d2f435c8ef5b1d953451f72" [metadata.files] absl-py = [ @@ -841,6 +1001,14 @@ black = [ {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, ] +boto3 = [ + {file = "boto3-1.26.78-py3-none-any.whl", hash = "sha256:0c593017fa49dbc34dcdbd5659208f2daf293a499d5f4d7e61978cd6b5d72a97"}, + {file = "boto3-1.26.78.tar.gz", hash = "sha256:488bf63d65864ab7fcdf9337c5aa4d825d444e253738a60f80789916bacc47dc"}, +] +botocore = [ + {file = "botocore-1.29.78-py3-none-any.whl", hash = "sha256:656ac8822a1b6c887a8efe1172bcefa9c9c450face26dc39998a249e8c340a23"}, + {file = "botocore-1.29.78.tar.gz", hash = "sha256:2bee6ed037590ef1e4884d944486232871513915f12a8590c63e3bb6046479bf"}, +] cachetools = [ {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, @@ -1084,6 +1252,14 @@ idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +jmespath = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] libsass = [ {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, {file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"}, @@ -1096,10 +1272,66 @@ libsass = [ {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, ] +markupsafe = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +] mccabe = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +moto = [ + {file = "moto-4.1.3-py2.py3-none-any.whl", hash = "sha256:dcd1d06662982cf3c94f36d6348251ccdcf62a1c5de5650425cb4e6f260ae7a0"}, + {file = "moto-4.1.3.tar.gz", hash = "sha256:c8200ccaa9440c2e9daa0bd5e0bd768a719db5a2c82ea8d782f0e3fa09a3c5e2"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -1226,7 +1458,9 @@ protobuf = [ {file = "protobuf-4.21.9-cp38-cp38-win_amd64.whl", hash = "sha256:84ea107016244dfc1eecae7684f7ce13c788b9a644cd3fca5b77871366556444"}, {file = "protobuf-4.21.9-cp39-cp39-win32.whl", hash = "sha256:f9eae277dd240ae19bb06ff4e2346e771252b0e619421965504bd1b1bba7c5fa"}, {file = "protobuf-4.21.9-cp39-cp39-win_amd64.whl", hash = "sha256:6e312e280fbe3c74ea9e080d9e6080b636798b5e3939242298b591064470b06b"}, + {file = "protobuf-4.21.9-py2.py3-none-any.whl", hash = "sha256:7eb8f2cc41a34e9c956c256e3ac766cf4e1a4c9c925dc757a41a01be3e852965"}, {file = "protobuf-4.21.9-py3-none-any.whl", hash = "sha256:48e2cd6b88c6ed3d5877a3ea40df79d08374088e89bedc32557348848dff250b"}, + {file = "protobuf-4.21.9.tar.gz", hash = "sha256:61f21493d96d2a77f9ca84fefa105872550ab5ef71d21c458eb80edcf4885a99"}, ] psycopg2-binary = [ {file = "psycopg2-binary-2.9.5.tar.gz", hash = "sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c"}, @@ -1252,6 +1486,8 @@ psycopg2-binary = [ {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b"}, {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32"}, {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-win32.whl", hash = "sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720"}, {file = "psycopg2_binary-2.9.5-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60"}, {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd"}, {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8"}, @@ -1403,6 +1639,10 @@ requests-oauthlib = [ {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, ] +responses = [ + {file = "responses-0.22.0-py3-none-any.whl", hash = "sha256:dcf294d204d14c436fddcc74caefdbc5764795a40ff4e6a7740ed8ddbf3294be"}, + {file = "responses-0.22.0.tar.gz", hash = "sha256:396acb2a13d25297789a5866b4881cf4e46ffd49cc26c43ab1117f40b973102e"}, +] rjsmin = [ {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e18fe1a610fb105273bb369f61c2b0bd9e66a3f0792e27e4cac44e42ace1968b"}, {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:6c395ffc130332cca744f081ed5efd5699038dcb7a5d30c3ff4bc6adb5b30a62"}, @@ -1429,9 +1669,9 @@ rsa = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, ] -setuptools = [ - {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, - {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, +s3transfer = [ + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1449,10 +1689,18 @@ text-unidecode = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +types-toml = [ + {file = "types-toml-0.10.8.5.tar.gz", hash = "sha256:bf80fce7d2d74be91148f47b88d9ae5adeb1024abef22aa2fdbabc036d6b8b3c"}, + {file = "types_toml-0.10.8.5-py3-none-any.whl", hash = "sha256:2432017febe43174af0f3c65f03116e3d3cf43e7e1406b8200e106da8cf98992"}, +] tzdata = [ {file = "tzdata-2022.6-py2.py3-none-any.whl", hash = "sha256:04a680bdc5b15750c39c12a448885a51134a27ec9af83667663f0b3a1bf3f342"}, {file = "tzdata-2022.6.tar.gz", hash = "sha256:91f11db4503385928c15598c98573e3af07e7229181bee5375bd30f1695ddcae"}, @@ -1468,6 +1716,10 @@ urllib3 = [ uwsgi = [ {file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"}, ] +werkzeug = [ + {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, + {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, +] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, @@ -1534,3 +1786,7 @@ wrapt = [ {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] +xmltodict = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] diff --git a/pyproject.toml b/pyproject.toml index ca487767..7c75d701 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,8 @@ uWSGI = {version = "^2.0.19", optional = true} admin-totals = "^1.0.1" django-bootstrap5 = "^22.1" django-easy-admin-object-actions = "^1.1.0" +boto3 = "^1.26.78" +moto = "^4.1.3" [tool.poetry.extras] production = ["uwsgi", "psycopg2-binary"] From a23790698aaee29faedd10c9c8bb3511b8e59a24 Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Sun, 26 Feb 2023 12:23:23 +0100 Subject: [PATCH 03/32] Add logger and replace prints with logs --- website/projects/awssync.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 6ebc8e1a..c7f2dd14 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -1,9 +1,12 @@ +import logging + class AWSSync: """Synchronise with Amazon Web Services.""" def __init__(self): """Create an AWSSync instance.""" - print("Created AWSSync instance") + self.logger = logging.getLogger("django.aws") + self.logger.info("Created AWSSync instance.") def button_pressed(self): """ @@ -11,5 +14,5 @@ def button_pressed(self): :return: True if function executes successfully """ - print("Pressed button") + self.logger.info("Pressed button") return True From 02c2243d35a7e7cbf8be2d0633dde427011525dd Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Sun, 26 Feb 2023 12:32:14 +0100 Subject: [PATCH 04/32] Add function to create AWS organization --- website/projects/awssync.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index c7f2dd14..36722c28 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -1,11 +1,18 @@ import logging +import boto3 + +from botocore.exceptions import ClientError + + class AWSSync: """Synchronise with Amazon Web Services.""" def __init__(self): """Create an AWSSync instance.""" self.logger = logging.getLogger("django.aws") + self.org_info = None + self.fail = False self.logger.info("Created AWSSync instance.") def button_pressed(self): @@ -16,3 +23,16 @@ def button_pressed(self): """ self.logger.info("Pressed button") return True + + def create_aws_organization(self): + """Create an AWS organization with the current user as the management account.""" + client = boto3.client("organizations") + try: + response = client.create_organization(FeatureSet="ALL") + self.org_info = response["Organization"] + self.logger.info("Created an AWS organization and saved organization info.") + except ClientError as error: + self.fail = True + self.logger.error("Something went wrong creating an AWS organization.") + self.logger.debug(f"{error}") + self.logger.debug(f"{error.response}") From 6b55b191808a418e7014474b4e6164d532d4348f Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Sun, 26 Feb 2023 12:54:36 +0100 Subject: [PATCH 05/32] Add unit tests for creating AWS organization --- website/projects/tests/test_awssync.py | 48 ++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index 7983dc09..6822fc14 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -1,5 +1,13 @@ +from unittest.mock import patch + +import boto3 + +from botocore.exceptions import ClientError + from django.test import TestCase +from moto import mock_organizations + from projects import awssync @@ -12,3 +20,43 @@ def setUp(self): def test_button_pressed(self): return_value = self.sync.button_pressed() self.assertTrue(return_value) + + def mock_api(self, operation_name, kwarg): + if operation_name == "CreateOrganization": + raise ClientError( + { + "Error": { + "Message": "The AWS account is already a member of an organization.", + "Code": "AlreadyInOrganizationException", + }, + "ResponseMetadata": { + "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "HTTPStatusCode": 400, + "HTTPHeaders": { + "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "content-type": "application/x-amz-json-1.1", + "content-length": "111", + "date": "Sun, 01 Jan 2023 00:00:00 GMT", + "connection": "close", + }, + "RetryAttempts": 0, + }, + "Message": "The AWS account is already a member of an organization.", + }, + "create_organization", + ) + + @mock_organizations + def test_create_aws_organization(self): + moto_client = boto3.client("organizations") + org = self.sync + org.create_aws_organization() + describe_org = moto_client.describe_organization()["Organization"] + self.assertEqual(describe_org, org.org_info) + + @patch("botocore.client.BaseClient._make_api_call", mock_api) + def test_create_aws_organization__exception(self): + org = self.sync + org.create_aws_organization() + self.assertTrue(org.fail) + self.assertIsNone(org.org_info) From 2bb9d9fcb55487f413b2a791a4da1e7ea97617f0 Mon Sep 17 00:00:00 2001 From: Henk Berendsen <61596108+hb140502@users.noreply.github.com> Date: Fri, 3 Mar 2023 10:29:03 +0100 Subject: [PATCH 06/32] Deliverable sprint 1 (#19) * AWS synchronisation button (#8) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Removed unnecessary docstrings. * Add boto3 and moto dependencies (#11) * Add logger and replace prints with logs * Add function to create AWS organization * Add unit tests for creating AWS organization * bugfix (#619) Co-authored-by: nvoers --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com> Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com> Co-authored-by: nvoers --- poetry.lock | 296 ++++++++++++++++-- pyproject.toml | 2 + website/projects/admin.py | 8 + website/projects/awssync.py | 38 +++ .../templates/admin/projects/change_list.html | 1 + website/projects/tests/test_admin.py | 6 + website/projects/tests/test_awssync.py | 62 ++++ website/room_reservation/views.py | 2 +- 8 files changed, 394 insertions(+), 21 deletions(-) create mode 100644 website/projects/awssync.py create mode 100644 website/projects/tests/test_awssync.py diff --git a/poetry.lock b/poetry.lock index 6381833a..9e1c0056 100644 --- a/poetry.lock +++ b/poetry.lock @@ -49,6 +49,38 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "boto3" +version = "1.26.78" +description = "The AWS SDK for Python" +category = "main" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +botocore = ">=1.29.78,<1.30.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.29.78" +description = "Low-level, data-driven core of boto 3." +category = "main" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.16.9)"] + [[package]] name = "cachetools" version = "5.2.0" @@ -85,7 +117,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode-backport = ["unicodedata2"] +unicode_backport = ["unicodedata2"] [[package]] name = "click" @@ -151,7 +183,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] +dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] [[package]] name = "django" @@ -280,7 +312,6 @@ python-versions = "*" [package.dependencies] pycodestyle = "*" -setuptools = "*" [[package]] name = "freezegun" @@ -343,7 +374,7 @@ six = ">=1.9.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] @@ -408,6 +439,28 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "libsass" version = "0.21.0" @@ -419,6 +472,14 @@ python-versions = "*" [package.dependencies] six = "*" +[[package]] +name = "markupsafe" +version = "2.1.2" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "mccabe" version = "0.7.0" @@ -427,6 +488,49 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "moto" +version = "4.1.3" +description = "" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.12.201" +cryptography = ">=3.3.1" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.13.0" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +apigatewayv2 = ["PyYAML (>=5.1)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=2.5.1)"] +batch = ["docker (>=2.5.1)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +ds = ["sshpubkeys (>=3.1.0)"] +dynamodb = ["docker (>=2.5.1)"] +dynamodbstreams = ["docker (>=2.5.1)"] +ebs = ["sshpubkeys (>=3.1.0)"] +ec2 = ["sshpubkeys (>=3.1.0)"] +efs = ["sshpubkeys (>=3.1.0)"] +eks = ["sshpubkeys (>=3.1.0)"] +glue = ["pyparsing (>=3.0.7)"] +iotdata = ["jsondiff (>=1.1.2)"] +route53resolver = ["sshpubkeys (>=3.1.0)"] +s3 = ["PyYAML (>=5.1)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +ssm = ["PyYAML (>=5.1)"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -613,7 +717,7 @@ python-versions = ">=3.6" cffi = ">=1.4.1" [package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] @@ -631,7 +735,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "dev" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" @@ -662,7 +766,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" @@ -679,6 +783,23 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "responses" +version = "0.22.0" +description = "A utility library for mocking out the `requests` Python library." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +requests = ">=2.22.0,<3.0" +toml = "*" +types-toml = "*" +urllib3 = ">=1.25.10" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "types-requests"] + [[package]] name = "rjsmin" version = "1.2.0" @@ -699,17 +820,18 @@ python-versions = ">=3.6,<4" pyasn1 = ">=0.1.3" [[package]] -name = "setuptools" -version = "65.5.1" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" +name = "s3transfer" +version = "0.6.0" +description = "An Amazon S3 Transfer Manager" +category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">= 3.7" + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] [[package]] name = "six" @@ -743,6 +865,14 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + [[package]] name = "tomli" version = "2.0.1" @@ -751,6 +881,14 @@ category = "dev" optional = false python-versions = ">=3.7" +[[package]] +name = "types-toml" +version = "0.10.8.5" +description = "Typing stubs for toml" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "tzdata" version = "2022.6" @@ -788,6 +926,20 @@ category = "main" optional = true python-versions = "*" +[[package]] +name = "werkzeug" +version = "2.2.3" +description = "The comprehensive WSGI web application library." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog"] + [[package]] name = "wrapt" version = "1.14.1" @@ -796,13 +948,21 @@ category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +category = "main" +optional = false +python-versions = ">=3.4" + [extras] -production = ["uwsgi", "uWSGI", "psycopg2-binary"] +production = ["uwsgi", "psycopg2-binary"] [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "cb906894eb8ca0d6f28ab7ea2ca335aff0ff3f43f32e0cabc9ab323703715abf" +content-hash = "d812c41bd73a271e800f7a4969553f2b8b5a748e8d2f435c8ef5b1d953451f72" [metadata.files] absl-py = [ @@ -841,6 +1001,14 @@ black = [ {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, ] +boto3 = [ + {file = "boto3-1.26.78-py3-none-any.whl", hash = "sha256:0c593017fa49dbc34dcdbd5659208f2daf293a499d5f4d7e61978cd6b5d72a97"}, + {file = "boto3-1.26.78.tar.gz", hash = "sha256:488bf63d65864ab7fcdf9337c5aa4d825d444e253738a60f80789916bacc47dc"}, +] +botocore = [ + {file = "botocore-1.29.78-py3-none-any.whl", hash = "sha256:656ac8822a1b6c887a8efe1172bcefa9c9c450face26dc39998a249e8c340a23"}, + {file = "botocore-1.29.78.tar.gz", hash = "sha256:2bee6ed037590ef1e4884d944486232871513915f12a8590c63e3bb6046479bf"}, +] cachetools = [ {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, @@ -1084,6 +1252,14 @@ idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +jmespath = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] libsass = [ {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, {file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"}, @@ -1096,10 +1272,66 @@ libsass = [ {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, ] +markupsafe = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +] mccabe = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +moto = [ + {file = "moto-4.1.3-py2.py3-none-any.whl", hash = "sha256:dcd1d06662982cf3c94f36d6348251ccdcf62a1c5de5650425cb4e6f260ae7a0"}, + {file = "moto-4.1.3.tar.gz", hash = "sha256:c8200ccaa9440c2e9daa0bd5e0bd768a719db5a2c82ea8d782f0e3fa09a3c5e2"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -1226,7 +1458,9 @@ protobuf = [ {file = "protobuf-4.21.9-cp38-cp38-win_amd64.whl", hash = "sha256:84ea107016244dfc1eecae7684f7ce13c788b9a644cd3fca5b77871366556444"}, {file = "protobuf-4.21.9-cp39-cp39-win32.whl", hash = "sha256:f9eae277dd240ae19bb06ff4e2346e771252b0e619421965504bd1b1bba7c5fa"}, {file = "protobuf-4.21.9-cp39-cp39-win_amd64.whl", hash = "sha256:6e312e280fbe3c74ea9e080d9e6080b636798b5e3939242298b591064470b06b"}, + {file = "protobuf-4.21.9-py2.py3-none-any.whl", hash = "sha256:7eb8f2cc41a34e9c956c256e3ac766cf4e1a4c9c925dc757a41a01be3e852965"}, {file = "protobuf-4.21.9-py3-none-any.whl", hash = "sha256:48e2cd6b88c6ed3d5877a3ea40df79d08374088e89bedc32557348848dff250b"}, + {file = "protobuf-4.21.9.tar.gz", hash = "sha256:61f21493d96d2a77f9ca84fefa105872550ab5ef71d21c458eb80edcf4885a99"}, ] psycopg2-binary = [ {file = "psycopg2-binary-2.9.5.tar.gz", hash = "sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c"}, @@ -1252,6 +1486,8 @@ psycopg2-binary = [ {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b"}, {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32"}, {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-win32.whl", hash = "sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720"}, {file = "psycopg2_binary-2.9.5-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60"}, {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd"}, {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8"}, @@ -1403,6 +1639,10 @@ requests-oauthlib = [ {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, ] +responses = [ + {file = "responses-0.22.0-py3-none-any.whl", hash = "sha256:dcf294d204d14c436fddcc74caefdbc5764795a40ff4e6a7740ed8ddbf3294be"}, + {file = "responses-0.22.0.tar.gz", hash = "sha256:396acb2a13d25297789a5866b4881cf4e46ffd49cc26c43ab1117f40b973102e"}, +] rjsmin = [ {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e18fe1a610fb105273bb369f61c2b0bd9e66a3f0792e27e4cac44e42ace1968b"}, {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:6c395ffc130332cca744f081ed5efd5699038dcb7a5d30c3ff4bc6adb5b30a62"}, @@ -1429,9 +1669,9 @@ rsa = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, ] -setuptools = [ - {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, - {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, +s3transfer = [ + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1449,10 +1689,18 @@ text-unidecode = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +types-toml = [ + {file = "types-toml-0.10.8.5.tar.gz", hash = "sha256:bf80fce7d2d74be91148f47b88d9ae5adeb1024abef22aa2fdbabc036d6b8b3c"}, + {file = "types_toml-0.10.8.5-py3-none-any.whl", hash = "sha256:2432017febe43174af0f3c65f03116e3d3cf43e7e1406b8200e106da8cf98992"}, +] tzdata = [ {file = "tzdata-2022.6-py2.py3-none-any.whl", hash = "sha256:04a680bdc5b15750c39c12a448885a51134a27ec9af83667663f0b3a1bf3f342"}, {file = "tzdata-2022.6.tar.gz", hash = "sha256:91f11db4503385928c15598c98573e3af07e7229181bee5375bd30f1695ddcae"}, @@ -1468,6 +1716,10 @@ urllib3 = [ uwsgi = [ {file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"}, ] +werkzeug = [ + {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, + {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, +] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, @@ -1534,3 +1786,7 @@ wrapt = [ {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] +xmltodict = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] diff --git a/pyproject.toml b/pyproject.toml index ca487767..7c75d701 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,8 @@ uWSGI = {version = "^2.0.19", optional = true} admin-totals = "^1.0.1" django-bootstrap5 = "^22.1" django-easy-admin-object-actions = "^1.1.0" +boto3 = "^1.26.78" +moto = "^4.1.3" [tool.poetry.extras] production = ["uwsgi", "psycopg2-binary"] diff --git a/website/projects/admin.py b/website/projects/admin.py index 0a39d439..7fae8a6d 100644 --- a/website/projects/admin.py +++ b/website/projects/admin.py @@ -12,6 +12,7 @@ from mailing_lists.models import MailingList +from projects.awssync import AWSSync from projects.forms import ProjectAdminForm, RepositoryInlineForm from projects.githubsync import GitHubSync from projects.models import Client, Project, Repository @@ -171,6 +172,12 @@ def synchronise_current_projects_to_GitHub(self, request): ], ) + def synchronise_to_AWS(self, request): + """Synchronise to Amazon Web Services.""" + sync = AWSSync() + sync.button_pressed() + return redirect("admin:projects_project_changelist") + def get_urls(self): """Get admin urls.""" urls = super().get_urls() @@ -180,6 +187,7 @@ def get_urls(self): self.admin_site.admin_view(self.synchronise_current_projects_to_GitHub), name="synchronise_to_github", ), + path("sync-to-aws/", self.admin_site.admin_view(self.synchronise_to_AWS), name="synchronise_to_aws"), ] return custom_urls + urls diff --git a/website/projects/awssync.py b/website/projects/awssync.py new file mode 100644 index 00000000..36722c28 --- /dev/null +++ b/website/projects/awssync.py @@ -0,0 +1,38 @@ +import logging + +import boto3 + +from botocore.exceptions import ClientError + + +class AWSSync: + """Synchronise with Amazon Web Services.""" + + def __init__(self): + """Create an AWSSync instance.""" + self.logger = logging.getLogger("django.aws") + self.org_info = None + self.fail = False + self.logger.info("Created AWSSync instance.") + + def button_pressed(self): + """ + Print debug message to show that the button has been pressed. + + :return: True if function executes successfully + """ + self.logger.info("Pressed button") + return True + + def create_aws_organization(self): + """Create an AWS organization with the current user as the management account.""" + client = boto3.client("organizations") + try: + response = client.create_organization(FeatureSet="ALL") + self.org_info = response["Organization"] + self.logger.info("Created an AWS organization and saved organization info.") + except ClientError as error: + self.fail = True + self.logger.error("Something went wrong creating an AWS organization.") + self.logger.debug(f"{error}") + self.logger.debug(f"{error.response}") diff --git a/website/projects/templates/admin/projects/change_list.html b/website/projects/templates/admin/projects/change_list.html index 82da6c2e..9e2f1890 100644 --- a/website/projects/templates/admin/projects/change_list.html +++ b/website/projects/templates/admin/projects/change_list.html @@ -4,6 +4,7 @@ {% block object-tools-items %}
  • Synchronize projects of the current semester to GitHub + Synchronize projects of the current semester to AWS
  • {{ block.super }} {% endblock %} diff --git a/website/projects/tests/test_admin.py b/website/projects/tests/test_admin.py index d8778d0c..25f0328a 100644 --- a/website/projects/tests/test_admin.py +++ b/website/projects/tests/test_admin.py @@ -84,6 +84,7 @@ def setUp(self): self.sync_mock.users_removed = 1 self.sync_mock.repos_archived = 1 self.github_mock = MagicMock(return_value=self.sync_mock) + self.aws_mock = MagicMock() messages.error = MagicMock() messages.warning = MagicMock() messages.success = MagicMock() @@ -233,6 +234,11 @@ def test_synchronise_current_projects_to_GitHub(self): self.assertNotIn(self.project_archived, args[1]) self.project_admin.synchronise_to_GitHub = original_sync_action + def test_synchronise_to_AWS(self): + with patch("projects.admin.AWSSync", self.aws_mock): + self.project_admin.synchronise_to_AWS(self.request) + self.aws_mock.assert_called_once() + def test_archive_all_repositories(self): self.project_admin.archive_all_repositories(self.request, Project.objects.all()) self.repo1.refresh_from_db() diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py new file mode 100644 index 00000000..6822fc14 --- /dev/null +++ b/website/projects/tests/test_awssync.py @@ -0,0 +1,62 @@ +from unittest.mock import patch + +import boto3 + +from botocore.exceptions import ClientError + +from django.test import TestCase + +from moto import mock_organizations + +from projects import awssync + + +class AWSSyncTest(TestCase): + """Test AWSSync class.""" + + def setUp(self): + self.sync = awssync.AWSSync() + + def test_button_pressed(self): + return_value = self.sync.button_pressed() + self.assertTrue(return_value) + + def mock_api(self, operation_name, kwarg): + if operation_name == "CreateOrganization": + raise ClientError( + { + "Error": { + "Message": "The AWS account is already a member of an organization.", + "Code": "AlreadyInOrganizationException", + }, + "ResponseMetadata": { + "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "HTTPStatusCode": 400, + "HTTPHeaders": { + "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "content-type": "application/x-amz-json-1.1", + "content-length": "111", + "date": "Sun, 01 Jan 2023 00:00:00 GMT", + "connection": "close", + }, + "RetryAttempts": 0, + }, + "Message": "The AWS account is already a member of an organization.", + }, + "create_organization", + ) + + @mock_organizations + def test_create_aws_organization(self): + moto_client = boto3.client("organizations") + org = self.sync + org.create_aws_organization() + describe_org = moto_client.describe_organization()["Organization"] + self.assertEqual(describe_org, org.org_info) + + @patch("botocore.client.BaseClient._make_api_call", mock_api) + def test_create_aws_organization__exception(self): + org = self.sync + org.create_aws_organization() + self.assertTrue(org.fail) + self.assertIsNone(org.org_info) diff --git a/website/room_reservation/views.py b/website/room_reservation/views.py index f617d2ee..eeef5b36 100644 --- a/website/room_reservation/views.py +++ b/website/room_reservation/views.py @@ -134,7 +134,7 @@ def get_context_data(self, **kwargs): } for reservation in Reservation.objects.filter( start_time__date__gte=timezone.now() - self.time_window_past, - start_time__date__lt=timezone.now() + self.time_window_future, + start_time__date__lte=timezone.now() + self.time_window_future, ) ] ) From 784c16c4e6416e63fc1ce7c9bded4035d121deb1 Mon Sep 17 00:00:00 2001 From: Jer111 <82157107+Jer111@users.noreply.github.com> Date: Fri, 3 Mar 2023 10:49:13 +0100 Subject: [PATCH 07/32] Added logger setlevel (#20) --- website/projects/awssync.py | 1 + 1 file changed, 1 insertion(+) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 36722c28..e0f44734 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -11,6 +11,7 @@ class AWSSync: def __init__(self): """Create an AWSSync instance.""" self.logger = logging.getLogger("django.aws") + self.logger.setLevel(logging.DEBUG) self.org_info = None self.fail = False self.logger.info("Created AWSSync instance.") From 8c83d5d915d03a998f20cf4c18a83dd15665ecba Mon Sep 17 00:00:00 2001 From: Henk Berendsen <61596108+hb140502@users.noreply.github.com> Date: Fri, 3 Mar 2023 12:38:38 +0100 Subject: [PATCH 08/32] Updated deliverable sprint 1 (#22) * AWS synchronisation button (#8) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Removed unnecessary docstrings. * Add boto3 and moto dependencies (#11) * Add logger and replace prints with logs * Add function to create AWS organization * Add unit tests for creating AWS organization * bugfix (#619) Co-authored-by: nvoers * Added logger setlevel (#20) --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com> Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com> Co-authored-by: nvoers Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com> --- website/projects/awssync.py | 1 + 1 file changed, 1 insertion(+) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 36722c28..e0f44734 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -11,6 +11,7 @@ class AWSSync: def __init__(self): """Create an AWSSync instance.""" self.logger = logging.getLogger("django.aws") + self.logger.setLevel(logging.DEBUG) self.org_info = None self.fail = False self.logger.info("Created AWSSync instance.") From baf6f285374b161cd7f8d5b4c9a663192ecd7332 Mon Sep 17 00:00:00 2001 From: Jer111 <82157107+Jer111@users.noreply.github.com> Date: Tue, 14 Mar 2023 10:35:27 +0100 Subject: [PATCH 09/32] Db sync (#16) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Added get mailinglist to the awssync file * Added first version of get_all_managers * Added test case for mailing lists * Removed some prints * reformatted using black * flake8 intentation added * flake8 intentation correction * Removed get manager * Linting added * unused import removed * Added get_teamid_from_email * Changed function email with teamid * Updated get_emails_with_teamids, working now * Added test for get_emails_with_ids * Added linting * linting * Added more test * Linting in awssync and its test file * Moved the imports around * moved the imports around * Black linting * switched imports around * Switched imports around part 2 * Switched imports around part 3 * Switched imports around part 4 * Fixed when no project exist for mailing list * Added some more tests * Removed exeption try expect * Black linting * Changed get_email_with_teamid to new format * Changed get_emails_with_teamids to go over project * Added tests for get_emails_with_teamids * changed info for get_emails_with_teamids --------- Co-authored-by: Henk --- website/projects/awssync.py | 45 +++++++++++++++++++++ website/projects/tests/test_awssync.py | 55 ++++++++++++++++++++++++++ 2 files changed, 100 insertions(+) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index e0f44734..90017cbc 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -1,9 +1,17 @@ +"""Framework for synchronisation with Amazon Web Services (AWS).""" + import logging import boto3 from botocore.exceptions import ClientError +from courses.models import Semester + +from mailing_lists.models import MailingList + +from projects.models import Project + class AWSSync: """Synchronise with Amazon Web Services.""" @@ -23,8 +31,45 @@ def button_pressed(self): :return: True if function executes successfully """ self.logger.info("Pressed button") + self.logger.info(self.get_emails_with_teamids()) return True + def get_all_mailing_lists(self): + """ + Get all mailing lists from the database. + + :return: List of mailing lists + """ + mailing_lists = MailingList.objects.all() + mailing_list_names = [ml.email_address for ml in mailing_lists] + return mailing_list_names + + def get_emails_with_teamids(self): + """ + Create a list of dictionaries containing email, slug and semester. + + Slug and semester combined are together an uniqueness constraint. + + :return: list of dictionaries of email, slug and semester + """ + email_ids = [] + + for project in ( + Project.objects.filter(mailinglist__isnull=False) + .filter(semester=Semester.objects.get_or_create_current_semester()) + .values("slug", "semester", "mailinglist") + ): + project_slug = project["slug"] + project_semester = str(Semester.objects.get(pk=project["semester"])) + project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address + email_dict = { + "project_email": project_email, + "project_slug": project_slug, + "project_semester": project_semester, + } + email_ids.append(email_dict) + return email_ids + def create_aws_organization(self): """Create an AWS organization with the current user as the management account.""" client = boto3.client("organizations") diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index 6822fc14..6cf4ab3d 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -1,3 +1,5 @@ +"""Tests for awssync.py.""" + from unittest.mock import patch import boto3 @@ -8,19 +10,72 @@ from moto import mock_organizations +from courses.models import Semester + +from mailing_lists.models import MailingList + from projects import awssync +from projects.models import Project class AWSSyncTest(TestCase): """Test AWSSync class.""" def setUp(self): + """Set up testing environment.""" self.sync = awssync.AWSSync() + self.semester = Semester.objects.create(year=2023, season=Semester.SPRING) + self.mailing_list = MailingList.objects.create(address="test1") + self.project = Project.objects.create(id=1, name="test1", semester=self.semester, slug="test1") + self.mailing_list.projects.add(self.project) def test_button_pressed(self): + """Test button_pressed function.""" return_value = self.sync.button_pressed() self.assertTrue(return_value) + def test_get_all_mailing_lists(self): + """Test get_all_mailing_lists function.""" + mailing_lists = self.sync.get_all_mailing_lists() + self.assertIsInstance(mailing_lists, list) + + def test_get_emails_with_teamids_normal(self): + """Test get_emails_with_teamids function.""" + email_id = self.sync.get_emails_with_teamids() + self.assertIsInstance(email_id, list) + self.assertIsInstance(email_id[0], dict) + expected_result = [ + {"project_email": "test1@giphouse.nl", "project_slug": "test1", "project_semester": "Spring 2023"} + ] + self.assertEqual(email_id, expected_result) + + def test_get_emails_with_teamids_no_project(self): + """Test get_emails_with_teamids function.""" + MailingList.objects.all().delete() + self.mailing_list = MailingList.objects.create(address="test2") + email_id = self.sync.get_emails_with_teamids() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + + def test_get_emails_with_teamids_no_mailing_list(self): + """Test get_emails_with_teamids function.""" + MailingList.objects.all().delete() + Project.objects.all().delete() + email_id = self.sync.get_emails_with_teamids() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + + def test_get_emails_with_teamids_different_semester(self): + """Test get_emails_with_teamids function.""" + MailingList.objects.all().delete() + new_semester = Semester.objects.create(year=2022, season=Semester.FALL) + self.mailing_list = MailingList.objects.create(address="test2") + self.project = Project.objects.create(id=2, name="test2", semester=new_semester, slug="test2") + self.mailing_list.projects.add(self.project) + email_id = self.sync.get_emails_with_teamids() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + def mock_api(self, operation_name, kwarg): if operation_name == "CreateOrganization": raise ClientError( From 65d1048aadf2ff33cd2ac5c67411a2b4dbc4c752 Mon Sep 17 00:00:00 2001 From: mitchellboes <49476235+mitchellboes@users.noreply.github.com> Date: Tue, 14 Mar 2023 12:20:59 +0100 Subject: [PATCH 10/32] Db sync (#25) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Added get mailinglist to the awssync file * Added first version of get_all_managers * Added test case for mailing lists * Removed some prints * reformatted using black * flake8 intentation added * flake8 intentation correction * Removed get manager * Linting added * unused import removed * Added get_teamid_from_email * Changed function email with teamid * Updated get_emails_with_teamids, working now * Added test for get_emails_with_ids * Added linting * linting * Added more test * Linting in awssync and its test file * Moved the imports around * moved the imports around * Black linting * switched imports around * Switched imports around part 2 * Switched imports around part 3 * Switched imports around part 4 * Fixed when no project exist for mailing list * Added some more tests * Removed exeption try expect * Black linting * Changed get_email_with_teamid to new format * Changed get_emails_with_teamids to go over project * Added tests for get_emails_with_teamids * changed info for get_emails_with_teamids * Changed email data dict to struct * added test for TypeError exception for eq operator * resolved linting errors * changed comment to correct datatype * dramatically improved test class name --------- Co-authored-by: Henk Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com> Co-authored-by: Jer111 --- website/projects/awssync.py | 33 +++++++++++++++++++------- website/projects/tests/test_awssync.py | 21 ++++++++++++---- 2 files changed, 42 insertions(+), 12 deletions(-) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 90017cbc..ac9bddda 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -13,6 +13,26 @@ from projects.models import Project +class SyncData: + """Structure for AWS giphouse sync data.""" + + def __init__(self, project_email, project_slug, project_semester): + """Create SyncData instance.""" + self.project_email = project_email + self.project_slug = project_slug + self.project_semester = project_semester + + def __eq__(self, other): + """Overload equals for SyncData type.""" + if not isinstance(other, SyncData): + raise TypeError("Must compare to object of type SyncData") + return ( + self.project_email == other.project_email + and self.project_slug == other.project_slug + and self.project_semester == other.project_semester + ) + + class AWSSync: """Synchronise with Amazon Web Services.""" @@ -46,11 +66,11 @@ def get_all_mailing_lists(self): def get_emails_with_teamids(self): """ - Create a list of dictionaries containing email, slug and semester. + Create a list of SyncData struct containing email, slug and semester. Slug and semester combined are together an uniqueness constraint. - :return: list of dictionaries of email, slug and semester + :return: list of SyncData structs with email, slug and semester """ email_ids = [] @@ -62,12 +82,9 @@ def get_emails_with_teamids(self): project_slug = project["slug"] project_semester = str(Semester.objects.get(pk=project["semester"])) project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address - email_dict = { - "project_email": project_email, - "project_slug": project_slug, - "project_semester": project_semester, - } - email_ids.append(email_dict) + + sync_data = SyncData(project_email, project_slug, project_semester) + email_ids.append(sync_data) return email_ids def create_aws_organization(self): diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index 6cf4ab3d..f9aa1353 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -18,6 +18,20 @@ from projects.models import Project +class SyncDataTest(TestCase): + """Test SyncData class (struct).""" + + def setUp(self): + """setup test environment.""" + self.sync = awssync.SyncData + + def test_throw_type_error_SyncData_class(self): + """Test Type Error when equals is called on wrong type.""" + with self.assertRaises(TypeError) as context: + self.sync("", "", "") == [] + self.assertTrue("Must compare to object of type SyncData" in str(context.exception)) + + class AWSSyncTest(TestCase): """Test AWSSync class.""" @@ -42,11 +56,10 @@ def test_get_all_mailing_lists(self): def test_get_emails_with_teamids_normal(self): """Test get_emails_with_teamids function.""" email_id = self.sync.get_emails_with_teamids() + self.assertIsInstance(email_id, list) - self.assertIsInstance(email_id[0], dict) - expected_result = [ - {"project_email": "test1@giphouse.nl", "project_slug": "test1", "project_semester": "Spring 2023"} - ] + self.assertIsInstance(email_id[0], awssync.SyncData) + expected_result = [awssync.SyncData("test1@giphouse.nl", "test1", "Spring 2023")] self.assertEqual(email_id, expected_result) def test_get_emails_with_teamids_no_project(self): From c562c6940877bb9d07f0792b4e7a0e4c038f4370 Mon Sep 17 00:00:00 2001 From: mitchellboes <49476235+mitchellboes@users.noreply.github.com> Date: Tue, 14 Mar 2023 14:56:12 +0100 Subject: [PATCH 11/32] Added function to generate which users have to be invited after the sync button is pressed (#23) * Added 'generate_aws_sync_list' function and tests * solved black errors * changed generate_aws_sync_list to use SyncData structure --- website/projects/awssync.py | 9 ++++++++ website/projects/tests/test_awssync.py | 32 ++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index ac9bddda..29499b58 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -99,3 +99,12 @@ def create_aws_organization(self): self.logger.error("Something went wrong creating an AWS organization.") self.logger.debug(f"{error}") self.logger.debug(f"{error.response}") + + def generate_aws_sync_list(self, giphouse_data, aws_data): + """ + Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS. + + This includes their ID and email address, to be able to put users in the correct AWS orginization later. + """ + sync_list = [x for x in giphouse_data if x not in aws_data] + return sync_list diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index f9aa1353..f48eaf4e 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -128,3 +128,35 @@ def test_create_aws_organization__exception(self): org.create_aws_organization() self.assertTrue(org.fail) self.assertIsNone(org.org_info) + + +class AWSSyncListTest(TestCase): + """Test AWSSyncList class.""" + + def setUp(self): + self.sync = awssync.AWSSync() + self.syncData = awssync.SyncData + + self.test1 = self.syncData("test1@test1.test1", "test1", "test1") + self.test2 = self.syncData("test2@test2.test2", "test2", "test2") + self.test3 = self.syncData("test3@test3.test3", "test3", "test3") + + def test_AWS_sync_list_both_empty(self): + gip_list = [] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_empty_AWS(self): + gip_list = [self.test1, self.test2] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list) + + def test_AWS_sync_list_empty_GiP(self): + gip_list = [] + aws_list = [self.test1, self.test2] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_both_full(self): + gip_list = [self.test1, self.test2] + aws_list = [self.test2, self.test3] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1]) From 2bf3048b88fdba006005fae87ffbfccb62dace51 Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Tue, 4 Apr 2023 12:44:04 +0200 Subject: [PATCH 12/32] Create and attach SCP policies (#29) * Add functions for creating and attaching SCP policies * Improve test cases --- website/projects/awssync.py | 42 ++++++++++++ website/projects/tests/test_awssync.py | 92 ++++++++++++++++++++++++++ 2 files changed, 134 insertions(+) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 29499b58..688bbd8a 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -1,5 +1,6 @@ """Framework for synchronisation with Amazon Web Services (AWS).""" +import json import logging import boto3 @@ -108,3 +109,44 @@ def generate_aws_sync_list(self, giphouse_data, aws_data): """ sync_list = [x for x in giphouse_data if x not in aws_data] return sync_list + + def create_scp_policy(self, policy_name, policy_description, policy_content): + """ + Create a SCP policy. + + :param policy_name: The policy name. + :param policy_description: The policy description. + :param policy_content: The policy configuration as a dictionary. The policy is automatically + converted to JSON format, including escaped quotation marks. + :return: Details of newly created policy as a dict on success and NoneType object otherwise. + """ + client = boto3.client("organizations") + try: + response = client.create_policy( + Content=json.dumps(policy_content), + Description=policy_description, + Name=policy_name, + Type="SERVICE_CONTROL_POLICY", + ) + except ClientError as error: + self.fail = True + self.logger.error("Something went wrong creating an SCP policy.") + self.logger.error(error) + else: + return response["Policy"] + + def attach_scp_policy(self, policy_id, target_id): + """ + Attaches a SCP policy to a target (root, OU, or member account). + + :param policy_id: The ID of the policy to be attached. + :param target_id: The ID of the target root, OU, or member account. + """ + client = boto3.client("organizations") + try: + client.attach_policy(PolicyId=policy_id, TargetId=target_id) + except ClientError as error: + self.fail = True + self.logger.error("Something went wrong attaching an SCP policy to a target.") + self.logger.debug(f"{error}") + self.logger.debug(f"{error.response}") diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index f48eaf4e..83823aa6 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -1,5 +1,6 @@ """Tests for awssync.py.""" +import json from unittest.mock import patch import boto3 @@ -114,6 +115,32 @@ def mock_api(self, operation_name, kwarg): "create_organization", ) + if operation_name == "CreatePolicy": + raise ClientError( + { + "Error": { + "Message": """The provided policy document does not meet the + requirements of the specified policy type.""", + "Code": "MalformedPolicyDocumentException", + }, + "ResponseMetadata": { + "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "HTTPStatusCode": 400, + "HTTPHeaders": { + "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "content-type": "application/x-amz-json-1.1", + "content-length": "147", + "date": "Sun, 01 Jan 2023 00:00:00 GMT", + "connection": "close", + }, + "RetryAttempts": 0, + }, + "Message": """The provided policy document does not meet the + requirements of the specified policy type.""", + }, + "create_policy", + ) + @mock_organizations def test_create_aws_organization(self): moto_client = boto3.client("organizations") @@ -129,6 +156,71 @@ def test_create_aws_organization__exception(self): self.assertTrue(org.fail) self.assertIsNone(org.org_info) + @mock_organizations + def test_create_scp_policy(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + + self.assertFalse(self.sync.fail) + self.assertEqual(policy["PolicySummary"]["Name"], policy_name) + self.assertEqual(policy["PolicySummary"]["Description"], policy_description) + self.assertEqual(policy["Content"], json.dumps(policy_content)) + + @mock_organizations + def test_create_scp_policy__exception(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = { + "Version": "2012-10-17", + "Statement": [{"Effect": "NonExistentEffect", "Action": "*", "Resource": "*"}], + } + with patch("botocore.client.BaseClient._make_api_call", self.mock_api): + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + + self.assertTrue(self.sync.fail) + self.assertIsNone(policy) + + @mock_organizations + def test_attach_scp_policy(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + + policy_id = policy["PolicySummary"]["Id"] + root_id = moto_client.list_roots()["Roots"][0]["Id"] + self.sync.attach_scp_policy(policy_id, root_id) + + current_scp_policies = moto_client.list_policies_for_target(TargetId=root_id, Filter="SERVICE_CONTROL_POLICY") + current_scp_policy_ids = [scp_policy["Id"] for scp_policy in current_scp_policies["Policies"]] + + self.assertIn(policy_id, current_scp_policy_ids) + self.assertFalse(self.sync.fail) + + @mock_organizations + def test_attach_scp_policy__exception(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + + policy_id = policy["PolicySummary"]["Id"] + root_id = self.sync.org_info["Id"] # Retrieves organization ID, not root ID, resulting in ClientError. + self.sync.attach_scp_policy(policy_id, root_id) + + self.assertTrue(self.sync.fail) + class AWSSyncListTest(TestCase): """Test AWSSyncList class.""" From 9661415499801b756a90f237fb7afeabe456dca4 Mon Sep 17 00:00:00 2001 From: Jer111 <82157107+Jer111@users.noreply.github.com> Date: Tue, 11 Apr 2023 12:27:41 +0200 Subject: [PATCH 13/32] 12 moto helper (#36) * merged with development and added create_c_i_OU * Added some tests for create_c_i_OU * Added some tests for create_c_i_ou * Linting * Changed the mock_api call back to orginal * Added create_team_ou with tests * Fix problems with moto testing * Worked on tests and added apitalkerclass * Make test asserts more meaningful * black * Added tests for create_ou's without parts * Added one test that gets all children under OU * Fix linting * Changed return of response create team ou did not save the name of the team OU * Fix test create team OU * Resolved linting issues * Fix flake8 * remove create_team_ou --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: Fouad Lamsettef --- website/projects/awssync.py | 28 ++++ website/projects/tests/test_awssync.py | 190 ++++++++++++++++--------- website/room_reservation/views.py | 2 +- 3 files changed, 148 insertions(+), 72 deletions(-) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 688bbd8a..dca36633 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -42,6 +42,7 @@ def __init__(self): self.logger = logging.getLogger("django.aws") self.logger.setLevel(logging.DEBUG) self.org_info = None + self.iterationOU_info = None self.fail = False self.logger.info("Created AWSSync instance.") @@ -101,6 +102,33 @@ def create_aws_organization(self): self.logger.debug(f"{error}") self.logger.debug(f"{error.response}") + def create_course_iteration_OU(self, iteration_id): + """ + Create an OU for the course iteration. + + :param iteration_id: The ID of the course iteration + + :return: The ID of the OU + """ + client = boto3.client("organizations") + if self.org_info is None: + self.logger.info("No organization info found. Creating an AWS organization.") + self.fail = True + else: + try: + response = client.create_organizational_unit( + ParentId=self.org_info["Id"], + Name=f"Course Iteration {iteration_id}", + ) + self.logger.info(f"Created an OU for course iteration {iteration_id}.") + self.iterationOU_info = response["OrganizationalUnit"] + return response["OrganizationalUnit"]["Id"] + except ClientError as error: + self.fail = True + self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_id}.") + self.logger.debug(f"{error}") + self.logger.debug(f"{error.response}") + def generate_aws_sync_list(self, giphouse_data, aws_data): """ Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS. diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index 83823aa6..5fc1f583 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -5,6 +5,7 @@ import boto3 +import botocore from botocore.exceptions import ClientError from django.test import TestCase @@ -43,12 +44,53 @@ def setUp(self): self.mailing_list = MailingList.objects.create(address="test1") self.project = Project.objects.create(id=1, name="test1", semester=self.semester, slug="test1") self.mailing_list.projects.add(self.project) + self.mock_org = mock_organizations() + self.mock_org.start() + + def tearDown(self): + self.mock_org.stop() def test_button_pressed(self): """Test button_pressed function.""" return_value = self.sync.button_pressed() self.assertTrue(return_value) + def test_create_aws_organization(self): + moto_client = boto3.client("organizations") + org = self.sync + org.create_aws_organization() + describe_org = moto_client.describe_organization()["Organization"] + self.assertEqual(describe_org, org.org_info) + + def test_create_aws_organization__exception(self): + org = self.sync + with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): + org.create_aws_organization() + self.assertTrue(org.fail) + self.assertIsNone(org.org_info) + + def test_create_course_iteration_OU(self): + moto_client = boto3.client("organizations") + org = self.sync + org.create_aws_organization() + org.create_course_iteration_OU(1) + describe_unit = moto_client.describe_organizational_unit(OrganizationalUnitId=org.iterationOU_info["Id"])[ + "OrganizationalUnit" + ] + self.assertEqual(describe_unit, org.iterationOU_info) + + def test_create_course_iteration_OU_without_organization(self): + org = self.sync + org.create_course_iteration_OU(1) + self.assertTrue(org.fail) + + def test_create_course_iteration_OU__exception(self): + org = self.sync + org.create_aws_organization() + with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): + org.create_course_iteration_OU(1) + self.assertTrue(org.fail) + def test_get_all_mailing_lists(self): """Test get_all_mailing_lists function.""" mailing_lists = self.sync.get_all_mailing_lists() @@ -90,73 +132,6 @@ def test_get_emails_with_teamids_different_semester(self): self.assertIsInstance(email_id, list) self.assertEqual(email_id, []) - def mock_api(self, operation_name, kwarg): - if operation_name == "CreateOrganization": - raise ClientError( - { - "Error": { - "Message": "The AWS account is already a member of an organization.", - "Code": "AlreadyInOrganizationException", - }, - "ResponseMetadata": { - "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "HTTPStatusCode": 400, - "HTTPHeaders": { - "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "content-type": "application/x-amz-json-1.1", - "content-length": "111", - "date": "Sun, 01 Jan 2023 00:00:00 GMT", - "connection": "close", - }, - "RetryAttempts": 0, - }, - "Message": "The AWS account is already a member of an organization.", - }, - "create_organization", - ) - - if operation_name == "CreatePolicy": - raise ClientError( - { - "Error": { - "Message": """The provided policy document does not meet the - requirements of the specified policy type.""", - "Code": "MalformedPolicyDocumentException", - }, - "ResponseMetadata": { - "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "HTTPStatusCode": 400, - "HTTPHeaders": { - "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "content-type": "application/x-amz-json-1.1", - "content-length": "147", - "date": "Sun, 01 Jan 2023 00:00:00 GMT", - "connection": "close", - }, - "RetryAttempts": 0, - }, - "Message": """The provided policy document does not meet the - requirements of the specified policy type.""", - }, - "create_policy", - ) - - @mock_organizations - def test_create_aws_organization(self): - moto_client = boto3.client("organizations") - org = self.sync - org.create_aws_organization() - describe_org = moto_client.describe_organization()["Organization"] - self.assertEqual(describe_org, org.org_info) - - @patch("botocore.client.BaseClient._make_api_call", mock_api) - def test_create_aws_organization__exception(self): - org = self.sync - org.create_aws_organization() - self.assertTrue(org.fail) - self.assertIsNone(org.org_info) - - @mock_organizations def test_create_scp_policy(self): self.sync.create_aws_organization() @@ -170,7 +145,6 @@ def test_create_scp_policy(self): self.assertEqual(policy["PolicySummary"]["Description"], policy_description) self.assertEqual(policy["Content"], json.dumps(policy_content)) - @mock_organizations def test_create_scp_policy__exception(self): self.sync.create_aws_organization() @@ -180,13 +154,12 @@ def test_create_scp_policy__exception(self): "Version": "2012-10-17", "Statement": [{"Effect": "NonExistentEffect", "Action": "*", "Resource": "*"}], } - with patch("botocore.client.BaseClient._make_api_call", self.mock_api): + with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) self.assertTrue(self.sync.fail) self.assertIsNone(policy) - @mock_organizations def test_attach_scp_policy(self): moto_client = boto3.client("organizations") self.sync.create_aws_organization() @@ -206,7 +179,6 @@ def test_attach_scp_policy(self): self.assertIn(policy_id, current_scp_policy_ids) self.assertFalse(self.sync.fail) - @mock_organizations def test_attach_scp_policy__exception(self): self.sync.create_aws_organization() @@ -252,3 +224,79 @@ def test_AWS_sync_list_both_full(self): gip_list = [self.test1, self.test2] aws_list = [self.test2, self.test3] self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1]) + + +class AWSAPITalkerTest(TestCase): + def mock_api(self, operation_name, kwarg): + if operation_name == "CreateOrganization": + raise ClientError( + { + "Error": { + "Message": "The AWS account is already a member of an organization.", + "Code": "AlreadyInOrganizationException", + }, + "ResponseMetadata": { + "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "HTTPStatusCode": 400, + "HTTPHeaders": { + "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "content-type": "application/x-amz-json-1.1", + "content-length": "111", + "date": "Sun, 01 Jan 2023 00:00:00 GMT", + "connection": "close", + }, + "RetryAttempts": 0, + }, + "Message": "The AWS account is already a member of an organization.", + }, + "create_organization", + ) + if operation_name == "CreateOrganizationalUnit": + raise ClientError( + { + "Error": { + "Message": "The OU already exists.", + "Code": "ParentNotFoundException", + }, + "ResponseMetadata": { + "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "HTTPStatusCode": 400, + "HTTPHeaders": { + "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "content-type": "application/x-amz-json-1.1", + "content-length": "111", + "date": "Sun, 01 Jan 2023 00:00:00 GMT", + "connection": "close", + }, + "RetryAttempts": 0, + }, + "Message": "The OU already exists.", + }, + "create_organizational_unit", + ) + if operation_name == "CreatePolicy": + raise ClientError( + { + "Error": { + "Message": """The provided policy document does not meet the + requirements of the specified policy type.""", + "Code": "MalformedPolicyDocumentException", + }, + "ResponseMetadata": { + "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "HTTPStatusCode": 400, + "HTTPHeaders": { + "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "content-type": "application/x-amz-json-1.1", + "content-length": "147", + "date": "Sun, 01 Jan 2023 00:00:00 GMT", + "connection": "close", + }, + "RetryAttempts": 0, + }, + "Message": """The provided policy document does not meet the + requirements of the specified policy type.""", + }, + "create_policy", + ) + return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg) diff --git a/website/room_reservation/views.py b/website/room_reservation/views.py index eeef5b36..f617d2ee 100644 --- a/website/room_reservation/views.py +++ b/website/room_reservation/views.py @@ -134,7 +134,7 @@ def get_context_data(self, **kwargs): } for reservation in Reservation.objects.filter( start_time__date__gte=timezone.now() - self.time_window_past, - start_time__date__lte=timezone.now() + self.time_window_future, + start_time__date__lt=timezone.now() + self.time_window_future, ) ] ) From 45e70df8f0681247bd5a923760fb87956f3658d1 Mon Sep 17 00:00:00 2001 From: mitchellboes <49476235+mitchellboes@users.noreply.github.com> Date: Thu, 13 Apr 2023 11:17:25 +0200 Subject: [PATCH 14/32] Add checks for edge cases between AWS and Giphouse databases (#37) * added double user check (partly) * added some checks and made two new fancy classes for the storage of AWS tree dictionaries * added tests * added equals for AWSTree and Iteration objects * test stupid error * does it work now? * resolved merge conflicts with rebasing on development * cleaned up code based on pull request comments --- website/projects/awssync.py | 116 +++++++++++++++++- website/projects/tests/test_awssync.py | 162 +++++++++++++++++++++++++ 2 files changed, 277 insertions(+), 1 deletion(-) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index dca36633..517cd083 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -1,4 +1,5 @@ """Framework for synchronisation with Amazon Web Services (AWS).""" +from __future__ import annotations import json import logging @@ -33,6 +34,60 @@ def __eq__(self, other): and self.project_semester == other.project_semester ) + def __repr__(self): + """Overload to string function for SyncData type.""" + return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')" + + +class Iteration: + """Datatype for AWS data in the Course iteration OU.""" + + def __init__(self, name, ou_id, members: list[SyncData]): + """Initialize Iteration object.""" + self.name = name + self.ou_id = ou_id + self.members = members + + def __repr__(self): + """Overload to string function for Iteration datatype.""" + return f"Iteration('{self.name}', '{self.ou_id}', {self.members})" + + def __eq__(self, other: Iteration) -> bool: + """Overload equals operator for Iteration objects.""" + if not isinstance(other, Iteration): + raise TypeError("Must compare to object of type Iteration") + return self.name == other.name and self.ou_id == other.ou_id and self.members == other.members + + +class AWSTree: + """Tree structure for AWS data.""" + + def __init__(self, name, ou_id, iterations: list[Iteration]): + """Initialize AWSTree object.""" + self.name = name + self.ou_id = ou_id + self.iterations = iterations + + def __repr__(self): + """Overload to string function for AWSTree object.""" + return f"AWSTree('{self.name}', '{self.ou_id}', {self.iterations})" + + def __eq__(self, other: AWSTree) -> bool: + """Overload equals operator for AWSTree objects.""" + if not isinstance(other, AWSTree): + raise TypeError("Must compare to object of type AWSTree") + return self.name == other.name and self.ou_id == other.ou_id and self.iterations == other.iterations + + def awstree_to_syncdata_list(self): + """Convert AWSTree to list of SyncData elements.""" + awslist = [] + + for iteration in self.iterations: + for member in iteration.members: + awslist.append(member) + + return awslist + class AWSSync: """Synchronise with Amazon Web Services.""" @@ -129,7 +184,7 @@ def create_course_iteration_OU(self, iteration_id): self.logger.debug(f"{error}") self.logger.debug(f"{error.response}") - def generate_aws_sync_list(self, giphouse_data, aws_data): + def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]): """ Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS. @@ -178,3 +233,62 @@ def attach_scp_policy(self, policy_id, target_id): self.logger.error("Something went wrong attaching an SCP policy to a target.") self.logger.debug(f"{error}") self.logger.debug(f"{error.response}") + + # TODO: check if this function is really needed + + def check_for_double_member_email(self, aws_list: list[SyncData], sync_list: list[SyncData]): + """Check if no users are assigned to multiple projects.""" + sync_emails = [x.project_email for x in sync_list] + aws_emails = [x.project_email for x in aws_list] + + duplicates = [email for email in sync_emails if email in aws_emails] + + for duplicate in duplicates: + error = f"Email address {duplicate} is already in the list of members in AWS" + self.logger.info("An email clash occured while syncing.") + self.logger.debug(error) + + if duplicates != []: + return True + return False + + def check_current_ou_exists(self, AWSdata: AWSTree): + """ + Check if the the OU (organizational unit) for the current semester already exists in AWS. + + Get data in tree structure (dictionary) defined in the function that retrieves the AWS data + """ + current = Semester.objects.get_or_create_current_semester() + + for iteration in AWSdata.iterations: + if current == iteration.name: + return (True, iteration.ou_id) + + return (False, None) + + # TODO: Do we want to check for this? + def check_members_in_correct_iteration(self, AWSdata: AWSTree): + """Check if the data from the member tag matches the semester OU it is in.""" + incorrect_emails = [] + for iteration in AWSdata.iterations: + for member in iteration.members: + if member.project_semester != iteration.name: + incorrect_emails.append(member.project_email) + + if incorrect_emails != []: + return (False, incorrect_emails) + + return (True, None) + + def check_double_iteration_names(self, AWSdata: AWSTree): + """Check if there are multiple OU's with the same name in AWS.""" + names = [iteration.name for iteration in AWSdata.iterations] + doubles = [] + + for name in names: + if names.count(name) != 1 and name not in doubles: + doubles.append(name) + + if doubles != []: + return (True, doubles) + return (False, None) diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index 5fc1f583..a172612d 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -300,3 +300,165 @@ def mock_api(self, operation_name, kwarg): "create_policy", ) return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg) + + +class AWSTreeChecksTest(TestCase): + """Test checks done on AWSTree data struncture.""" + + def setUp(self): + self.sync = awssync.AWSSync() + self.awstree = awssync.AWSTree("Name", "1234", []) + self.iteration = awssync.Iteration("Name", "1234", []) + self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020") + + self.sync_list = [ + awssync.SyncData("email1@example.com", "Spring 2022", "Project A"), + awssync.SyncData("email2@example.com", "Fall 2022", "Project B"), + awssync.SyncData("email3@example.com", "Spring 2022", "Project C"), + ] + self.aws_list = [ + awssync.SyncData("email4@example.com", "Fall 2021", "Project D"), + awssync.SyncData("email5@example.com", "Spring 2022", "Project E"), + awssync.SyncData("email6@example.com", "Fall 2022", "Project F"), + ] + + self.treelist = [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + awssync.SyncData("email3@example.com", "project3", "Spring 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ] + + self.aws_tree1 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Spring 2021", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Spring 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree2 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Spring 2021", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Fall 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree3 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Fall 2020", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Fall 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + def test_repr_AWSTree(self): + self.assertEquals(str(self.awstree), "AWSTree('Name', '1234', [])") + + def test_repr_Iteration(self): + self.assertEquals(str(self.iteration), "Iteration('Name', '1234', [])") + + def test_repr_SyncData(self): + self.assertEquals(str(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')") + + def test_awstree_to_syncdata_list(self): + self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist) + + def test_check_for_double_member_email(self): + # Test when there are no duplicate emails + self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) + + # Test when there is a duplicate email + self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G")) + self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) + + def test_check_current_ou_exists(self): + # Test when current semester OU does not exist + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"): + self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022") + val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) + self.assertEqual((val1, val2), (False, None)) + + # Test when current semester OU exists + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"): + self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021") + val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) + self.assertEqual((val1, val2), (True, "98765")) + + def test_check_members_in_correct_iteration(self): + # Test when correct + val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1) + self.assertEqual((val1, val2), (True, None)) + + # Test when incorrect + val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2) + self.assertEqual((val1, val2), (False, ["email3@example.com"])) + + def test_check_double_iteration_names(self): + # Test when correct + val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1) + self.assertEqual((val1, val2), (False, None)) + + # Test when double + val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3) + self.assertEqual((val1, val2), (True, ["Fall 2020"])) + + def test_AWSTree_equals(self): + self.assertEqual(self.aws_tree1, self.aws_tree1) + self.assertNotEqual(self.aws_tree1, self.aws_tree2) + with self.assertRaises(TypeError): + awssync.AWSTree("", "", []) == [] + self.assertRaises(TypeError) + + def test_Iteration_equals(self): + self.assertEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[0]) + self.assertNotEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[1]) + with self.assertRaises(TypeError): + awssync.Iteration("", "", []) == [] + self.assertRaises(TypeError) From e8086b12227159d986a1be57e103e9341d66d5c3 Mon Sep 17 00:00:00 2001 From: flam123 Date: Fri, 14 Apr 2023 10:32:23 +0200 Subject: [PATCH 15/32] Extraction of AWS data * Added function for extracting data * Added mock format * finished function (testing needed) * Linting fix * fix no return of tree * Fix AttributeError * Unwrap tuple instead of accessing by element to increase code readability * Fixed for new struct * Implementation bug fixes * added tests (not done) * Removed classes for merge * Added function for extracting data * Added mock format * finished function (testing needed) * Linting fix * fix no return of tree * Fix AttributeError * Unwrap tuple instead of accessing by element to increase code readability * Fixed for new struct * Implementation bug fixes * added tests (not done) * Linting fix * git fixes * Black fix * pydocstyle fix * Black fix again * removed flake8 'fix' * Final flake8 fix * Final final flake8 fix * spelling error fix --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> --- website/projects/awssync.py | 47 ++++++++++++++++++++++++-- website/projects/tests/test_awssync.py | 46 +++++++++++++++++++++++++ 2 files changed, 90 insertions(+), 3 deletions(-) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 517cd083..b4fb8d84 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -188,7 +188,7 @@ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[S """ Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS. - This includes their ID and email address, to be able to put users in the correct AWS orginization later. + This includes their ID and email address, to be able to put users in the correct AWS organization later. """ sync_list = [x for x in giphouse_data if x not in aws_data] return sync_list @@ -199,8 +199,8 @@ def create_scp_policy(self, policy_name, policy_description, policy_content): :param policy_name: The policy name. :param policy_description: The policy description. - :param policy_content: The policy configuration as a dictionary. The policy is automatically - converted to JSON format, including escaped quotation marks. + :param policy_content: The policy configuration as a dictionary. + The policy is automatically converted to JSON format, including escaped quotation marks. :return: Details of newly created policy as a dict on success and NoneType object otherwise. """ client = boto3.client("organizations") @@ -292,3 +292,44 @@ def check_double_iteration_names(self, AWSdata: AWSTree): if doubles != []: return (True, doubles) return (False, None) + + def extract_aws_setup(self, parent_ou_id): + """ + Give a list of all the children of the parent OU. + + :param parent_ou_id: The ID of the parent OU. + """ + client = boto3.client("organizations") + try: + response = client.list_organizational_units_for_parent(ParentId=parent_ou_id) + aws_tree = AWSTree("root", parent_ou_id, []) + for iteration in response["OrganizationalUnits"]: + ou_id = iteration["Id"] + ou_name = iteration["Name"] + response = client.list_accounts_for_parent(ParentId=ou_id) + children = response["Accounts"] + syncData = [] + for child in children: + account_id = child["Id"] + account_email = child["Email"] + response = client.list_tags_for_resource(ResourceId=account_id) + tags = response["Tags"] + merged_tags = {d["Key"]: d["Value"] for d in tags} + self.logger.debug(merged_tags) + if all(key in merged_tags for key in ["project_slug", "project_semester"]): + syncData.append( + SyncData(account_email, merged_tags["project_slug"], merged_tags["project_semester"]) + ) + else: + self.logger.error( + "Could not find project_slug or project_semester tag for account with ID: " + account_id + ) + self.fail = True + + aws_tree.iterations.append(Iteration(ou_name, ou_id, syncData)) + return aws_tree + except ClientError as error: + self.fail = True + self.logger.error("Something went wrong extracting the AWS setup.") + self.logger.debug(f"{error}") + self.logger.debug(f"{error.response}") diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index a172612d..8a0c1b12 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -193,6 +193,52 @@ def test_attach_scp_policy__exception(self): self.assertTrue(self.sync.fail) + @mock_organizations + def test_get_aws_data(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + root_id = moto_client.list_roots()["Roots"][0]["Id"] + + response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1") + OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] + response_account_1 = moto_client.create_account( + Email="account_1@gmail.com", + AccountName="account_1", + Tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}], + ) + account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] + moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id) + + aws_tree = self.sync.extract_aws_setup(root_id) + iteration_test = awssync.Iteration("OU_1", OU_1_id, [awssync.SyncData("account_1@gmail.com", "test1", "2021")]) + aws_tree_test = awssync.AWSTree("root", root_id, [iteration_test]) + self.assertEquals(aws_tree, aws_tree_test) + + @mock_organizations + def test_get_aws_data_no_root(self): + boto3.client("organizations") + self.sync.create_aws_organization() + self.sync.extract_aws_setup("NonExistentRootID") + self.assertTrue(self.sync.fail) + + @mock_organizations + def test_get_aws_data_no_slugs(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + root_id = moto_client.list_roots()["Roots"][0]["Id"] + + response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1") + OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] + response_account_1 = moto_client.create_account( + Email="account_1@gmail.com", + AccountName="account_1", + Tags=[], + ) + account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] + moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id) + self.sync.extract_aws_setup(root_id) + self.assertTrue(self.sync.fail) + class AWSSyncListTest(TestCase): """Test AWSSyncList class.""" From d99c9c2a7bf29ef990e9f9fe636bfd94db5fd8f4 Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Sat, 22 Apr 2023 13:09:33 +0000 Subject: [PATCH 16/32] AWS synchronization pipeline (and integration bug fixes) (#42) * Create and attach SCP policies (#29) * Add functions for creating and attaching SCP policies * Improve test cases * Add function template * Fix 'a/an' grammar mistake * Add pipeline preconditions * Add pipeline preconditions tests * Add checks for required API actions * Add test cases for checking required API actions * Added implementation of creating and attaching policy in the pipeline * Remove double API actions * Added implementation of creating and moving accounts in the pipeline. This should still be divided into smaller functions. * Increase code readability * Stop checking account request status after known failure * Fixed small typos and added account details to a debug message about account creation failure * Added tests for pipeline policy and fixed typos in debug messages. * Split creating and moving accounts into multiple functions, and handle exceptions * added update_course_itation_ou with tests * updated pipeline_update_current_course_iteration * Add test cases for creating and moving member accounts * Bug fixes for pipeline dependencies that arose from integration sprint 2 tasks * Revised pipeline policy function and corresponding tests so that it should not fail after first pipeline run * Change duplicate policy attachment to soft-fail; replace organization ID with root ID --------- Co-authored-by: Henk Co-authored-by: Jer111 --- website/projects/awssync.py | 408 ++++++++++++++- website/projects/tests/test_awssync.py | 673 ++++++++++++++++++++++++- 2 files changed, 1062 insertions(+), 19 deletions(-) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index b4fb8d84..96e71327 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -3,10 +3,12 @@ import json import logging +import time import boto3 from botocore.exceptions import ClientError +from botocore.exceptions import NoCredentialsError from courses.models import Semester @@ -94,11 +96,72 @@ class AWSSync: def __init__(self): """Create an AWSSync instance.""" + self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5 + self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3 + self.logger = logging.getLogger("django.aws") self.logger.setLevel(logging.DEBUG) self.org_info = None self.iterationOU_info = None + self.policy_id = "p-examplepolicyid111" self.fail = False + self.required_aws_actions = [ + # "organizations:AcceptHandshake", + "organizations:AttachPolicy", + # "organizations:CancelHandshake", + # "organizations:CloseAccount", + "organizations:CreateAccount", + # "organizations:CreateGovCloudAccount", + "organizations:CreateOrganization", + "organizations:CreateOrganizationalUnit", + "organizations:CreatePolicy", + # "organizations:DeclineHandshake", + # "organizations:DeleteOrganization", + "organizations:DeleteOrganizationalUnit", + "organizations:DeletePolicy", + "organizations:DeleteResourcePolicy", + # "organizations:DeregisterDelegatedAdministrator", + "organizations:DescribeAccount", + "organizations:DescribeCreateAccountStatus", + "organizations:DescribeEffectivePolicy", + # "organizations:DescribeHandshake", + "organizations:DescribeOrganization", + "organizations:DescribeOrganizationalUnit", + "organizations:DescribePolicy", + "organizations:DescribeResourcePolicy", + "organizations:DetachPolicy", + # "organizations:DisableAWSServiceAccess", + "organizations:DisablePolicyType", + # "organizations:EnableAWSServiceAccess", + # "organizations:EnableAllFeatures", + "organizations:EnablePolicyType", + # "organizations:InviteAccountToOrganization", + # "organizations:LeaveOrganization", + # "organizations:ListAWSServiceAccessForOrganization", + "organizations:ListAccounts", + "organizations:ListAccountsForParent", + "organizations:ListChildren", + "organizations:ListCreateAccountStatus", + # "organizations:ListDelegatedAdministrators", + # "organizations:ListDelegatedServicesForAccount", + # "organizations:ListHandshakesForAccount", + # "organizations:ListHandshakesForOrganization", + "organizations:ListOrganizationalUnitsForParent", + "organizations:ListParents", + "organizations:ListPolicies", + "organizations:ListPoliciesForTarget", + "organizations:ListRoots", + "organizations:ListTagsForResource", + "organizations:ListTargetsForPolicy", + "organizations:MoveAccount", + "organizations:PutResourcePolicy", + # "organizations:RegisterDelegatedAdministrator", + # "organizations:RemoveAccountFromOrganization", + "organizations:TagResource", + "organizations:UntagResource", + "organizations:UpdateOrganizationalUnit", + "organizations:UpdatePolicy", + ] self.logger.info("Created AWSSync instance.") def button_pressed(self): @@ -108,7 +171,7 @@ def button_pressed(self): :return: True if function executes successfully """ self.logger.info("Pressed button") - self.logger.info(self.get_emails_with_teamids()) + self.logger.debug(f"Pipeline result: {self.pipeline()}") return True def get_all_mailing_lists(self): @@ -157,11 +220,11 @@ def create_aws_organization(self): self.logger.debug(f"{error}") self.logger.debug(f"{error.response}") - def create_course_iteration_OU(self, iteration_id): + def create_course_iteration_OU(self, iteration_name): """ Create an OU for the course iteration. - :param iteration_id: The ID of the course iteration + :param iteration_name: The name of the course iteration OU :return: The ID of the OU """ @@ -171,16 +234,17 @@ def create_course_iteration_OU(self, iteration_id): self.fail = True else: try: + root_id = client.list_roots()["Roots"][0]["Id"] response = client.create_organizational_unit( - ParentId=self.org_info["Id"], - Name=f"Course Iteration {iteration_id}", + ParentId=root_id, + Name=iteration_name, ) - self.logger.info(f"Created an OU for course iteration {iteration_id}.") + self.logger.info(f"Created an OU for course iteration {iteration_name}.") self.iterationOU_info = response["OrganizationalUnit"] return response["OrganizationalUnit"]["Id"] except ClientError as error: self.fail = True - self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_id}.") + self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_name}.") self.logger.debug(f"{error}") self.logger.debug(f"{error.response}") @@ -195,7 +259,7 @@ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[S def create_scp_policy(self, policy_name, policy_description, policy_content): """ - Create a SCP policy. + Create an SCP policy. :param policy_name: The policy name. :param policy_description: The policy description. @@ -220,7 +284,7 @@ def create_scp_policy(self, policy_name, policy_description, policy_content): def attach_scp_policy(self, policy_id, target_id): """ - Attaches a SCP policy to a target (root, OU, or member account). + Attaches an SCP policy to a target (root, OU, or member account). :param policy_id: The ID of the policy to be attached. :param target_id: The ID of the target root, OU, or member account. @@ -229,12 +293,331 @@ def attach_scp_policy(self, policy_id, target_id): try: client.attach_policy(PolicyId=policy_id, TargetId=target_id) except ClientError as error: - self.fail = True + if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": + self.fail = True self.logger.error("Something went wrong attaching an SCP policy to a target.") self.logger.debug(f"{error}") self.logger.debug(f"{error.response}") - # TODO: check if this function is really needed + def check_aws_api_connection(self): + """ + Check whether boto3 can connect to AWS API with current credentials. + + :returns: First tuple element always exists and indicates success. + Second tuple element is contains information about the entity + who made the successful API call and None otherwise. + """ + client_sts = boto3.client("sts") + try: + caller_identity_info = client_sts.get_caller_identity() + except (NoCredentialsError, ClientError) as error: + self.logger.info("Establishing AWS API connection failed.") + self.logger.debug(error) + return False, None + else: + self.logger.info("Establishing AWS API connection succeeded.") + + return True, caller_identity_info + + def check_iam_policy(self, iam_user_arn, desired_actions): + """ + Check for the specified IAM user ARN whether the actions in list \ + desired_actions are allowed according to its IAM policy. + + :param iam_user_arn: ARN of the IAM user being checked. + :param iam_actions: List of AWS API actions to check. + :returns: True iff all actions in desired_actions are allowed. + """ + client_iam = boto3.client("iam") + + try: + response = client_iam.simulate_principal_policy(PolicySourceArn=iam_user_arn, ActionNames=desired_actions) + except ClientError as error: + self.logger.info("AWS API actions check failed.") + self.logger.debug(error) + return False + + success = True + for evaluation_result in response["EvaluationResults"]: + action_name = evaluation_result["EvalActionName"] + if evaluation_result["EvalDecision"] != "allowed": + self.logger.debug(f"The AWS API action {action_name} is denied for IAM user {iam_user_arn}.") + success = False + + if success: + self.logger.info("AWS API actions check succeeded.") + + return success + + def check_organization_existence(self): + """ + Check whether an AWS organization exists for the AWS API caller's account. + + :returns: First tuple element always exists and indicates success. + Second tuple element is describes properties of the organization and None otherwise. + """ + client_organizations = boto3.client("organizations") + + try: + response_org = client_organizations.describe_organization() + except ClientError as error: + self.logger.info("AWS organization existence check failed.") + self.logger.debug(error) + return False, None + else: + self.logger.info("AWS organization existence check succeeded.") + + return True, response_org["Organization"] + + def check_is_management_account(self, api_caller_info, organization_info): + """ + Check whether caller of AWS API has organization's management account ID. + + :returns: True iff the current organization's management account ID equals the AWS API caller's account ID. + """ + management_account_id = organization_info["MasterAccountId"] + api_caller_account_id = api_caller_info["Account"] + is_management_account = management_account_id == api_caller_account_id + + if is_management_account: + self.logger.info("Management account check succeeded.") + else: + self.logger.info("Management account check failed.") + self.logger.debug(f"The organization's management account ID is: '{management_account_id}'.") + self.logger.debug(f"The AWS API caller account ID is: '{api_caller_account_id}'.") + + return is_management_account + + def check_scp_enabled(self, organization_info): + """ + Check whether the SCP policy type is an enabled feature for the AWS organization. + + :returns: True iff the SCP policy type feature is enabled for the organization. + """ + scp_is_enabled = False + for policy in organization_info["AvailablePolicyTypes"]: + if policy["Type"] == "SERVICE_CONTROL_POLICY" and policy["Status"] == "ENABLED": + scp_is_enabled = True + break + + if not scp_is_enabled: + self.logger.info("The SCP policy type is disabled for the organization.") + self.logger.debug(organization_info["AvailablePolicyTypes"]) + else: + self.logger.info("Organization SCP policy status check succeeded.") + + return scp_is_enabled + + def pipeline_preconditions(self): + """ + Check all crucial pipeline preconditions. + + 1. Locatable boto3 credentials and successful AWS API connection + 2. Check allowed AWS API actions based on IAM policy of caller + 3. Existing organization for AWS API caller + 4. AWS API caller acts under same account ID as organization's management account ID + 5. SCP policy type feature enabled for organization + + :return: True iff all pipeline preconditions are met. + """ + check_api_connection, api_caller_info = self.check_aws_api_connection() + if not check_api_connection: + return False + + check_api_actions = self.check_iam_policy(api_caller_info["Arn"], self.required_aws_actions) + if not check_api_actions: + return False + + check_org_existence, organization_info = self.check_organization_existence() + self.org_info = organization_info + if not check_org_existence: + return False + + check_acc_management = self.check_is_management_account(api_caller_info, organization_info) + if not check_acc_management: + return False + + check_scp_enabled = self.check_scp_enabled(organization_info) + if not check_scp_enabled: + return False + + return True + + def pipeline_policy(self, ou_id): + """ + Create an SCP policy and attaches it to the organizational unit of the current semester. + + :param ou_id: ID of the organizational unit for the current semester. + :return: True iff the policy to be attached to the OU already exists and is successfully attached. + """ + client = boto3.client("organizations") + try: + client.describe_policy(PolicyId=self.policy_id) + except ClientError as error: + self.logger.debug(error) + return False + + self.attach_scp_policy(self.policy_id, ou_id) + if self.fail: + return False + return True + + def pipeline_create_account(self, sync_data): + """ + Create a single new AWS member account in the organization of the API caller. + + The status of the member account request is repeatedly checked based on the class' attributes: + self.ACCOUNT_REQUEST_INTERVAL_SECONDS: thread sleeping time before each status check + self.ACCOUNT_REQUEST_MAX_ATTEMPTS: maximum number of times to thread sleep and check + + :param email: The e-mail address of the new member account. + :param username: The username of the new member account. + :returns: (True, account_id) on success and otherwise (False, failure_reason). + """ + client = boto3.client("organizations") + + # Request new member account. + try: + response_create = client.create_account( + Email=sync_data.project_email, + AccountName=sync_data.project_slug, + IamUserAccessToBilling="DENY", + Tags=[ + {"Key": "project_slug", "Value": sync_data.project_slug}, + {"Key": "project_semester", "Value": sync_data.project_semester}, + ], + ) + except ClientError as error: + self.logger.debug(error) + return False, "CLIENTERROR_CREATE_ACCOUNT" + + # Repeatedly check status of new member account request. + request_id = response_create["CreateAccountStatus"]["Id"] + for _ in range(1, self.ACCOUNT_REQUEST_MAX_ATTEMPTS + 1): + time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS) + + try: + response_status = client.describe_create_account_status(CreateAccountRequestId=request_id) + except ClientError as error: + self.logger.debug(error) + return False, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS" + + request_state = response_status["CreateAccountStatus"]["State"] + if request_state == "FAILED": + return False, response_status["CreateAccountStatus"]["FailureReason"] + elif request_state == "SUCCEEDED": + return True, response_status["CreateAccountStatus"]["AccountId"] + + return False, "STILL_IN_PROGRESS" + + def pipeline_create_and_move_accounts(self, new_member_accounts, root_id, destination_ou_id): + """ + Create multiple accounts in the organization of the API caller and move them from the root to a destination OU. + + :param new_member_accounts: List of SyncData objects. + :param root_id: The organization's root ID. + :param destination_ou_id: The organization's destination OU ID. + :returns: True iff **all** new member accounts were created and moved successfully. + """ + client = boto3.client("organizations") + overall_success = True + + for new_member in new_member_accounts: + success, response = self.pipeline_create_account(new_member) + if success: + account_id = response + try: + root_id = client.list_roots()["Roots"][0]["Id"] + client.move_account( + AccountId=account_id, SourceParentId=root_id, DestinationParentId=destination_ou_id + ) + except ClientError as error: + self.logger.debug(error) + overall_success = False + else: + failure_reason = response + self.logger.debug(failure_reason) + overall_success = False + + return overall_success + + def pipeline_update_current_course_iteration_ou(self, aws_tree): + """ + Update the AWS tree with the new course iteration OU's. + + :param aws_tree: The AWS tree to be checked. + :returns: True, iteration_id on success and otherwise False, failure_reason. + """ + is_current_iteration, iteration_ou_id = self.check_current_ou_exists(aws_tree) + + if not is_current_iteration: + iteration_name = str(Semester.objects.get_or_create_current_semester()) + iteration_ou_id = self.create_course_iteration_OU(iteration_name) + + if not self.fail: + return True, iteration_ou_id + else: + return False, "ITERATION_OU_CREATION_FAILED" + + def pipeline(self): + """ + Single pipeline that integrates all buildings blocks for the AWS integration process. + + :return: True iff all pipeline stages successfully executed. + """ + # Check preconditions. + if not self.pipeline_preconditions(): + return False + + # Get synchronization data. + client = boto3.client("organizations") + try: + root_id = client.list_roots()["Roots"][0]["Id"] + except ClientError as error: + self.logger.debug("Failed to retrieve root ID of organization.") + self.logger.debug(error) + return False + + aws_tree = self.extract_aws_setup(root_id) + if self.fail: + self.logger.debug("Extracting AWS setup failed.") + return False + + aws_sync_data = aws_tree.awstree_to_syncdata_list() + giphouse_sync_data = self.get_emails_with_teamids() + merged_sync_data = self.generate_aws_sync_list(giphouse_sync_data, aws_sync_data) + + # Check edge cases. + if self.check_for_double_member_email(aws_sync_data, merged_sync_data): + return False + + success, incorrect_emails = self.check_members_in_correct_iteration(aws_tree) + if not success: + self.logger.debug(f"Got incorrectly placed AWS member accounts: {incorrect_emails}.") + return False + + failure, double_iteration_names = self.check_double_iteration_names(aws_tree) + if failure: + self.logger.debug(f"Found double iteration names: {double_iteration_names}.") + return False + + # Check/create course iteration OU. + current_course_iteration_exists, response = self.pipeline_update_current_course_iteration_ou(aws_tree) + if not current_course_iteration_exists: + failure_reason = response + self.logger.debug(failure_reason) + return False + course_iteration_ou_id = response + + # Create and attach SCP policy to course iteration OU. + if not self.pipeline_policy(course_iteration_ou_id): + return False + + # Create new member accounts and move to course iteration OU. + if not self.pipeline_create_and_move_accounts(merged_sync_data, root_id, course_iteration_ou_id): + return False + + return True def check_for_double_member_email(self, aws_list: list[SyncData], sync_list: list[SyncData]): """Check if no users are assigned to multiple projects.""" @@ -258,7 +641,7 @@ def check_current_ou_exists(self, AWSdata: AWSTree): Get data in tree structure (dictionary) defined in the function that retrieves the AWS data """ - current = Semester.objects.get_or_create_current_semester() + current = str(Semester.objects.get_or_create_current_semester()) for iteration in AWSdata.iterations: if current == iteration.name: @@ -266,7 +649,6 @@ def check_current_ou_exists(self, AWSdata: AWSTree): return (False, None) - # TODO: Do we want to check for this? def check_members_in_correct_iteration(self, AWSdata: AWSTree): """Check if the data from the member tag matches the semester OU it is in.""" incorrect_emails = [] diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index 8a0c1b12..d2e7cec7 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -1,7 +1,7 @@ """Tests for awssync.py.""" import json -from unittest.mock import patch +from unittest.mock import MagicMock, patch import boto3 @@ -10,7 +10,7 @@ from django.test import TestCase -from moto import mock_organizations +from moto import mock_organizations, mock_sts from courses.models import Semester @@ -50,6 +50,9 @@ def setUp(self): def tearDown(self): self.mock_org.stop() + def simulateFailure(self): + self.sync.fail = True + def test_button_pressed(self): """Test button_pressed function.""" return_value = self.sync.button_pressed() @@ -73,7 +76,7 @@ def test_create_course_iteration_OU(self): moto_client = boto3.client("organizations") org = self.sync org.create_aws_organization() - org.create_course_iteration_OU(1) + org.create_course_iteration_OU("1") describe_unit = moto_client.describe_organizational_unit(OrganizationalUnitId=org.iterationOU_info["Id"])[ "OrganizationalUnit" ] @@ -81,14 +84,15 @@ def test_create_course_iteration_OU(self): def test_create_course_iteration_OU_without_organization(self): org = self.sync - org.create_course_iteration_OU(1) + org.create_course_iteration_OU("1") self.assertTrue(org.fail) def test_create_course_iteration_OU__exception(self): org = self.sync org.create_aws_organization() - with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): - org.create_course_iteration_OU(1) + with patch("boto3.client") as mocker: + mocker().list_roots.side_effect = ClientError({}, "list_roots") + org.create_course_iteration_OU("1") self.assertTrue(org.fail) def test_get_all_mailing_lists(self): @@ -193,6 +197,663 @@ def test_attach_scp_policy__exception(self): self.assertTrue(self.sync.fail) + @mock_sts + def test_check_aws_api_connection(self): + success, caller_identity_info = self.sync.check_aws_api_connection() + + self.assertTrue(success) + self.assertIsNotNone(caller_identity_info) + + @mock_sts + def test_check_aws_api_connection__exception(self): + with patch("boto3.client") as mocker: + mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity") + mocker.return_value = mocker + success, caller_identity_info = self.sync.check_aws_api_connection() + + self.assertFalse(success) + self.assertIsNone(caller_identity_info) + + # IAM simulate_principal_policy is not covered by moto. + def test_check_iam_policy(self): + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + # success == True + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + success = self.sync.check_iam_policy(iam_user_arn, desired_actions) + self.assertTrue(success) + + # success == False + mock_evaluation_results["EvaluationResults"][0]["EvalDecision"] = "implicitDeny" + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + success = self.sync.check_iam_policy(iam_user_arn, desired_actions) + self.assertFalse(success) + + def test_check_iam_policy__exception(self): + iam_user_arn = "daddy" + desired_actions = [] + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.side_effect = ClientError({}, "simulate_principal_policy") + success = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + self.assertFalse(success) + + def test_check_organization_existence(self): + moto_client = boto3.client("organizations") + organization_create_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] + success, organization_describe_info = self.sync.check_organization_existence() + + self.assertTrue(success) + self.assertEqual(organization_create_info, organization_describe_info) + + def test_check_organization_existence__exception(self): + with patch("boto3.client") as mocker: + mocker.describe_organization.side_effect = ClientError({}, "describe_organization") + mocker.return_value = mocker + success, organization_info = self.sync.check_organization_existence() + + self.assertFalse(success) + self.assertIsNone(organization_info) + + @mock_sts + def test_check_is_management_account(self): + moto_client = boto3.client("organizations") + + moto_client.create_organization(FeatureSet="ALL")["Organization"] + _, caller_identity_info = self.sync.check_aws_api_connection() + _, organization_info = self.sync.check_organization_existence() + + # is_management_account == True + success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info) + self.assertTrue(success_acc) + + # is_management_account == False + caller_identity_info["Account"] = "daddy" + success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info) + self.assertFalse(success_acc) + + def test_check_scp_enabled(self): + moto_client = boto3.client("organizations") + + # SCP enabled. + organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] + scp_is_enabled = self.sync.check_scp_enabled(organization_info) + self.assertTrue(scp_is_enabled) + + # SCP semi-disabled (pending). + organization_info["AvailablePolicyTypes"][0]["Status"] = "PENDING_DISABLE" + scp_is_enabled = self.sync.check_scp_enabled(organization_info) + self.assertFalse(scp_is_enabled) + + # SCP disabled (empty list). + organization_info["AvailablePolicyTypes"] = [] + scp_is_enabled = self.sync.check_scp_enabled(organization_info) + self.assertFalse(scp_is_enabled) + + @mock_sts + def test_pipeline_preconditions__all_success(self): + # Create organization. + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + mocker.return_value = check_iam_policy + success = self.sync.pipeline_preconditions() + + self.assertTrue(success) + + @mock_sts + def test_pipeline_preconditions__no_connection(self): + with patch("boto3.client") as mocker: + mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity") + mocker.return_value = mocker + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + def test_pipeline_preconditions__no_iam(self): + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "implicitDeny", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_api_actions = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + mocker.return_value = check_api_actions + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + @mock_sts + def test_pipeline_preconditions__no_organization(self): + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + mocker.return_value = check_iam_policy + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + @mock_sts + def test_pipeline_preconditions__no_management(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL") + + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam: + mocker_iam.return_value = check_iam_policy + with patch("projects.awssync.AWSSync.check_aws_api_connection") as mocker_api: + mocker_api.return_value = True, {"Account": "daddy", "Arn": "01234567890123456789"} + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + @mock_sts + def test_pipeline_preconditions__no_scp(self): + moto_client = boto3.client("organizations") + + organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] + + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam: + mocker_iam.return_value = check_iam_policy + + # Mock return value of check_organization_existence with no SCP policy enabled. + organization_info["AvailablePolicyTypes"] = [] + with patch("projects.awssync.AWSSync.check_organization_existence") as mocker: + mocker.return_value = True, organization_info + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + """ + def test_pipeline_create_scp_policy(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + + policy = self.sync.pipeline_create_scp_policy() + + self.assertFalse(self.sync.fail) + self.assertEqual(policy["PolicySummary"]["Name"], policy_name) + self.assertEqual(policy["PolicySummary"]["Description"], policy_description) + self.assertEqual(policy["Content"], json.dumps(policy_content)) + + def test_pipeline_create_scp_policy__exception(self): + self.sync.create_aws_organization() + + with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): + policy = self.sync.pipeline_create_scp_policy() + + self.assertTrue(self.sync.fail) + self.assertIsNone(policy) + """ + + def test_pipeline_policy(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + self.sync.policy_id = policy["PolicySummary"]["Id"] + + ou_id = self.sync.create_course_iteration_OU("Test") + + success = self.sync.pipeline_policy(ou_id) + self.assertTrue(success) + + def test_pipeline_policy__exception(self): + self.sync.create_aws_organization() + + ou_id = self.sync.create_course_iteration_OU("Test") + + success = self.sync.pipeline_policy(ou_id) + self.assertFalse(success) + + def test_pipeline_policy__failure_attach(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + self.sync.policy_id = policy["PolicySummary"]["Id"] + + ou_id = self.sync.create_course_iteration_OU("Test") + + self.sync.attach_scp_policy = MagicMock(side_effect=self.simulateFailure()) + + success = self.sync.pipeline_policy(ou_id) + self.assertFalse(success) + + @mock_sts + def test_pipeline(self): + moto_client = boto3.client("organizations") + + # pipeline_preconditions() == False + success = self.sync.pipeline() + self.assertFalse(success) + + # pipeline_preconditions() == True + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + self.sync.policy_id = policy["PolicySummary"]["Id"] + + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + mocker.return_value = check_iam_policy + success = self.sync.pipeline() + + self.assertTrue(success) + + def test_pipeline__exception_list_roots(self): + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().list_roots.side_effect = ClientError({}, "list_roots") + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__edge_case_double_emails(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + aws_tree = awssync.AWSTree( + "Root", + "123", + [ + awssync.Iteration( + "Spring 2023", + "456", + [ + awssync.SyncData("email1@example.com", "project1", "Spring 2023"), + ], + ) + ], + ) + + gip_teams = [ + awssync.SyncData("email1@example.com", "project1", "Spring 2023"), + awssync.SyncData("email1@example.com", "project2", "Spring 2023"), + ] + + self.sync.pipeline_preconditions = MagicMock(return_value=True) + self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) + self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__edge_case_incorrectly_placed(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + aws_tree = awssync.AWSTree( + "Root", + "123", + [ + awssync.Iteration( + "Fall 2023", + "456", + [ + awssync.SyncData("email1@example.com", "project1", "Spring 2023"), + ], + ) + ], + ) + + gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] + + self.sync.pipeline_preconditions = MagicMock(return_value=True) + self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) + self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__edge_case_double_iteration_names(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + aws_tree = awssync.AWSTree( + "Root", + "123", + [ + awssync.Iteration( + "Spring 2023", "456", [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] + ), + awssync.Iteration("Spring 2023", "789", []), + ], + ) + + gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] + + self.sync.pipeline_preconditions = MagicMock(return_value=True) + self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) + self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__failed_creating_iteration_ou(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + self.sync.pipeline_preconditions = MagicMock(return_value=True) + with patch("boto3.client") as mocker: + mocker().create_organizational_unit.side_effect = ClientError({}, "create_organizational_unit") + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__exception_attaching_policy(self): + self.sync.create_aws_organization() + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().attach_policy.side_effect = ClientError( + {"Error": {"Code": "PolicyTypeNotEnabledException"}}, "attach_policy" + ) + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__already_attached_policy(self): + self.sync.create_aws_organization() + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().attach_policy.side_effect = ClientError( + {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy" + ) + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__failed_create_and_move_account(self): + self.sync.create_aws_organization() + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().move_account.side_effect = ClientError({}, "move_account") + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__exception_extract_aws_setup(self): + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().list_organizational_units_for_parent.side_effect = ClientError( + {}, "list_organizational_units_for_parent" + ) + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline_update_current_course_iteration_ou___failure_check_current_ou(self): + + self.sync.check_current_ou_exists = MagicMock(return_value=(False, None)) + + self.sync.create_aws_organization() + success, id = self.sync.pipeline_update_current_course_iteration_ou(None) + self.assertTrue(success) + self.assertFalse(id is None) + + def test_pipeline_update_current_course_iteration_ou___success(self): + + self.sync.check_current_ou_exists = MagicMock(return_value=(True, "1234")) + + self.sync.create_aws_organization() + success, id = self.sync.pipeline_update_current_course_iteration_ou(None) + self.assertTrue(success) + self.assertEquals(id, "1234") + + def test_pipeline_update_current_course_iteration_ou___failure_create_ou(self): + + self.sync.check_current_ou_exists = MagicMock(return_value=(False, None)) + self.sync.create_course_iteration_OU = MagicMock(side_effect=self.simulateFailure()) + + self.sync.create_aws_organization() + success, failure_reason = self.sync.pipeline_update_current_course_iteration_ou(None) + + self.assertFalse(success) + self.assertEquals(failure_reason, "ITERATION_OU_CREATION_FAILED") + self.assertTrue(self.sync.fail) + + def test_pipeline_create_account(self): + self.sync.create_aws_organization() + + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertTrue(success) + self.assertIsNotNone(response) + + def test_pipeline_create_account__exception_create_account(self): + self.sync.create_aws_organization() + + with patch("boto3.client") as mocker: + mocker().create_account.side_effect = ClientError({}, "create_account") + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertFalse(success) + self.assertEquals(response, "CLIENTERROR_CREATE_ACCOUNT") + + def test_pipeline_create_account__exception_describe_account_status(self): + self.sync.create_aws_organization() + + with patch("boto3.client") as mocker: + mocker().describe_create_account_status.side_effect = ClientError({}, "describe_create_account_status") + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertFalse(success) + self.assertEquals(response, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS") + + def test_pipeline_create_account__state_failed(self): + self.sync.create_aws_organization() + + with patch("boto3.client") as mocker: + response = {"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}} + mocker().describe_create_account_status.return_value = response + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertFalse(success) + self.assertEquals(response, "EMAIL_ALREADY_EXISTS") + + def test_pipeline_create_account__state_in_progress(self): + self.sync.create_aws_organization() + + with patch("boto3.client") as mocker: + response = { + "CreateAccountStatus": { + "State": "IN_PROGRESS", + } + } + mocker().describe_create_account_status.return_value = response + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertFalse(success) + self.assertEquals(response, "STILL_IN_PROGRESS") + + def test_pipeline_create_and_move_accounts(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + + new_member_accounts = [ + awssync.SyncData("alice@example.com", "alice", "Spring 2023"), + awssync.SyncData("bob@example.com", "bob", "Spring 2023"), + ] + root_id = moto_client.list_roots()["Roots"][0]["Id"] + course_iteration_id = self.sync.create_course_iteration_OU("Spring 2023") + + success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) + self.assertTrue(success) + + def test_pipeline_create_and_move_accounts__email_exists(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + + new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")] + root_id = moto_client.list_roots()["Roots"][0]["Id"] + course_iteration_id = self.sync.create_course_iteration_OU("2023Fall") + + with patch("projects.awssync.AWSSync.pipeline_create_account") as mocker: + mocker.return_value = False, "EMAIL_ALREADY_EXISTS" + success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) + + self.assertFalse(success) + + def test_pipeline_create_and_move_accounts__exception_move_account(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + + new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")] + root_id = moto_client.list_roots()["Roots"][0]["Id"] + course_iteration_id = self.sync.create_course_iteration_OU("2023Fall") + + self.sync.pipeline_create_account = MagicMock(return_value=(True, 1234)) + with patch("boto3.client") as mocker: + mocker().move_account.side_effect = ClientError({}, "move_account") + success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) + + self.assertFalse(success) + @mock_organizations def test_get_aws_data(self): moto_client = boto3.client("organizations") From 5ae9396671f5946d5d76074aa5cf54f6720528b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filip=20=C5=81ysak?= <92109241+FilipLysak001@users.noreply.github.com> Date: Sat, 22 Apr 2023 15:38:50 +0200 Subject: [PATCH 17/32] Sprint 2 deliverable (#43) * AWS synchronisation button (#8) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Removed unnecessary docstrings. * Add boto3 and moto dependencies (#11) * Add logger and replace prints with logs * Add function to create AWS organization * Add unit tests for creating AWS organization * bugfix (#619) Co-authored-by: nvoers * Added logger setlevel (#20) * Db sync (#16) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Added get mailinglist to the awssync file * Added first version of get_all_managers * Added test case for mailing lists * Removed some prints * reformatted using black * flake8 intentation added * flake8 intentation correction * Removed get manager * Linting added * unused import removed * Added get_teamid_from_email * Changed function email with teamid * Updated get_emails_with_teamids, working now * Added test for get_emails_with_ids * Added linting * linting * Added more test * Linting in awssync and its test file * Moved the imports around * moved the imports around * Black linting * switched imports around * Switched imports around part 2 * Switched imports around part 3 * Switched imports around part 4 * Fixed when no project exist for mailing list * Added some more tests * Removed exeption try expect * Black linting * Changed get_email_with_teamid to new format * Changed get_emails_with_teamids to go over project * Added tests for get_emails_with_teamids * changed info for get_emails_with_teamids --------- Co-authored-by: Henk * Db sync (#25) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Added get mailinglist to the awssync file * Added first version of get_all_managers * Added test case for mailing lists * Removed some prints * reformatted using black * flake8 intentation added * flake8 intentation correction * Removed get manager * Linting added * unused import removed * Added get_teamid_from_email * Changed function email with teamid * Updated get_emails_with_teamids, working now * Added test for get_emails_with_ids * Added linting * linting * Added more test * Linting in awssync and its test file * Moved the imports around * moved the imports around * Black linting * switched imports around * Switched imports around part 2 * Switched imports around part 3 * Switched imports around part 4 * Fixed when no project exist for mailing list * Added some more tests * Removed exeption try expect * Black linting * Changed get_email_with_teamid to new format * Changed get_emails_with_teamids to go over project * Added tests for get_emails_with_teamids * changed info for get_emails_with_teamids * Changed email data dict to struct * added test for TypeError exception for eq operator * resolved linting errors * changed comment to correct datatype * dramatically improved test class name --------- Co-authored-by: Henk Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com> Co-authored-by: Jer111 * Added function to generate which users have to be invited after the sync button is pressed (#23) * Added 'generate_aws_sync_list' function and tests * solved black errors * changed generate_aws_sync_list to use SyncData structure * Create and attach SCP policies (#29) * Add functions for creating and attaching SCP policies * Improve test cases * 12 moto helper (#36) * merged with development and added create_c_i_OU * Added some tests for create_c_i_OU * Added some tests for create_c_i_ou * Linting * Changed the mock_api call back to orginal * Added create_team_ou with tests * Fix problems with moto testing * Worked on tests and added apitalkerclass * Make test asserts more meaningful * black * Added tests for create_ou's without parts * Added one test that gets all children under OU * Fix linting * Changed return of response create team ou did not save the name of the team OU * Fix test create team OU * Resolved linting issues * Fix flake8 * remove create_team_ou --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: Fouad Lamsettef * Add checks for edge cases between AWS and Giphouse databases (#37) * added double user check (partly) * added some checks and made two new fancy classes for the storage of AWS tree dictionaries * added tests * added equals for AWSTree and Iteration objects * test stupid error * does it work now? * resolved merge conflicts with rebasing on development * cleaned up code based on pull request comments * Extraction of AWS data * Added function for extracting data * Added mock format * finished function (testing needed) * Linting fix * fix no return of tree * Fix AttributeError * Unwrap tuple instead of accessing by element to increase code readability * Fixed for new struct * Implementation bug fixes * added tests (not done) * Removed classes for merge * Added function for extracting data * Added mock format * finished function (testing needed) * Linting fix * fix no return of tree * Fix AttributeError * Unwrap tuple instead of accessing by element to increase code readability * Fixed for new struct * Implementation bug fixes * added tests (not done) * Linting fix * git fixes * Black fix * pydocstyle fix * Black fix again * removed flake8 'fix' * Final flake8 fix * Final final flake8 fix * spelling error fix --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> * AWS synchronization pipeline (and integration bug fixes) (#42) * Create and attach SCP policies (#29) * Add functions for creating and attaching SCP policies * Improve test cases * Add function template * Fix 'a/an' grammar mistake * Add pipeline preconditions * Add pipeline preconditions tests * Add checks for required API actions * Add test cases for checking required API actions * Added implementation of creating and attaching policy in the pipeline * Remove double API actions * Added implementation of creating and moving accounts in the pipeline. This should still be divided into smaller functions. * Increase code readability * Stop checking account request status after known failure * Fixed small typos and added account details to a debug message about account creation failure * Added tests for pipeline policy and fixed typos in debug messages. * Split creating and moving accounts into multiple functions, and handle exceptions * added update_course_itation_ou with tests * updated pipeline_update_current_course_iteration * Add test cases for creating and moving member accounts * Bug fixes for pipeline dependencies that arose from integration sprint 2 tasks * Revised pipeline policy function and corresponding tests so that it should not fail after first pipeline run * Change duplicate policy attachment to soft-fail; replace organization ID with root ID --------- Co-authored-by: Henk Co-authored-by: Jer111 --------- Co-authored-by: Henk Berendsen <61596108+hb140502@users.noreply.github.com> Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com> Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com> Co-authored-by: nvoers Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com> Co-authored-by: Henk Co-authored-by: mitchellboes <49476235+mitchellboes@users.noreply.github.com> Co-authored-by: Jer111 Co-authored-by: Fouad Lamsettef --- website/projects/awssync.py | 678 ++++++++++++++ website/projects/tests/test_awssync.py | 1139 +++++++++++++++++++++++- website/room_reservation/views.py | 2 +- 3 files changed, 1803 insertions(+), 16 deletions(-) diff --git a/website/projects/awssync.py b/website/projects/awssync.py index e0f44734..96e71327 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -1,8 +1,94 @@ +"""Framework for synchronisation with Amazon Web Services (AWS).""" +from __future__ import annotations + +import json import logging +import time import boto3 from botocore.exceptions import ClientError +from botocore.exceptions import NoCredentialsError + +from courses.models import Semester + +from mailing_lists.models import MailingList + +from projects.models import Project + + +class SyncData: + """Structure for AWS giphouse sync data.""" + + def __init__(self, project_email, project_slug, project_semester): + """Create SyncData instance.""" + self.project_email = project_email + self.project_slug = project_slug + self.project_semester = project_semester + + def __eq__(self, other): + """Overload equals for SyncData type.""" + if not isinstance(other, SyncData): + raise TypeError("Must compare to object of type SyncData") + return ( + self.project_email == other.project_email + and self.project_slug == other.project_slug + and self.project_semester == other.project_semester + ) + + def __repr__(self): + """Overload to string function for SyncData type.""" + return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')" + + +class Iteration: + """Datatype for AWS data in the Course iteration OU.""" + + def __init__(self, name, ou_id, members: list[SyncData]): + """Initialize Iteration object.""" + self.name = name + self.ou_id = ou_id + self.members = members + + def __repr__(self): + """Overload to string function for Iteration datatype.""" + return f"Iteration('{self.name}', '{self.ou_id}', {self.members})" + + def __eq__(self, other: Iteration) -> bool: + """Overload equals operator for Iteration objects.""" + if not isinstance(other, Iteration): + raise TypeError("Must compare to object of type Iteration") + return self.name == other.name and self.ou_id == other.ou_id and self.members == other.members + + +class AWSTree: + """Tree structure for AWS data.""" + + def __init__(self, name, ou_id, iterations: list[Iteration]): + """Initialize AWSTree object.""" + self.name = name + self.ou_id = ou_id + self.iterations = iterations + + def __repr__(self): + """Overload to string function for AWSTree object.""" + return f"AWSTree('{self.name}', '{self.ou_id}', {self.iterations})" + + def __eq__(self, other: AWSTree) -> bool: + """Overload equals operator for AWSTree objects.""" + if not isinstance(other, AWSTree): + raise TypeError("Must compare to object of type AWSTree") + return self.name == other.name and self.ou_id == other.ou_id and self.iterations == other.iterations + + def awstree_to_syncdata_list(self): + """Convert AWSTree to list of SyncData elements.""" + awslist = [] + + for iteration in self.iterations: + for member in iteration.members: + awslist.append(member) + + return awslist class AWSSync: @@ -10,10 +96,72 @@ class AWSSync: def __init__(self): """Create an AWSSync instance.""" + self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5 + self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3 + self.logger = logging.getLogger("django.aws") self.logger.setLevel(logging.DEBUG) self.org_info = None + self.iterationOU_info = None + self.policy_id = "p-examplepolicyid111" self.fail = False + self.required_aws_actions = [ + # "organizations:AcceptHandshake", + "organizations:AttachPolicy", + # "organizations:CancelHandshake", + # "organizations:CloseAccount", + "organizations:CreateAccount", + # "organizations:CreateGovCloudAccount", + "organizations:CreateOrganization", + "organizations:CreateOrganizationalUnit", + "organizations:CreatePolicy", + # "organizations:DeclineHandshake", + # "organizations:DeleteOrganization", + "organizations:DeleteOrganizationalUnit", + "organizations:DeletePolicy", + "organizations:DeleteResourcePolicy", + # "organizations:DeregisterDelegatedAdministrator", + "organizations:DescribeAccount", + "organizations:DescribeCreateAccountStatus", + "organizations:DescribeEffectivePolicy", + # "organizations:DescribeHandshake", + "organizations:DescribeOrganization", + "organizations:DescribeOrganizationalUnit", + "organizations:DescribePolicy", + "organizations:DescribeResourcePolicy", + "organizations:DetachPolicy", + # "organizations:DisableAWSServiceAccess", + "organizations:DisablePolicyType", + # "organizations:EnableAWSServiceAccess", + # "organizations:EnableAllFeatures", + "organizations:EnablePolicyType", + # "organizations:InviteAccountToOrganization", + # "organizations:LeaveOrganization", + # "organizations:ListAWSServiceAccessForOrganization", + "organizations:ListAccounts", + "organizations:ListAccountsForParent", + "organizations:ListChildren", + "organizations:ListCreateAccountStatus", + # "organizations:ListDelegatedAdministrators", + # "organizations:ListDelegatedServicesForAccount", + # "organizations:ListHandshakesForAccount", + # "organizations:ListHandshakesForOrganization", + "organizations:ListOrganizationalUnitsForParent", + "organizations:ListParents", + "organizations:ListPolicies", + "organizations:ListPoliciesForTarget", + "organizations:ListRoots", + "organizations:ListTagsForResource", + "organizations:ListTargetsForPolicy", + "organizations:MoveAccount", + "organizations:PutResourcePolicy", + # "organizations:RegisterDelegatedAdministrator", + # "organizations:RemoveAccountFromOrganization", + "organizations:TagResource", + "organizations:UntagResource", + "organizations:UpdateOrganizationalUnit", + "organizations:UpdatePolicy", + ] self.logger.info("Created AWSSync instance.") def button_pressed(self): @@ -23,8 +171,42 @@ def button_pressed(self): :return: True if function executes successfully """ self.logger.info("Pressed button") + self.logger.debug(f"Pipeline result: {self.pipeline()}") return True + def get_all_mailing_lists(self): + """ + Get all mailing lists from the database. + + :return: List of mailing lists + """ + mailing_lists = MailingList.objects.all() + mailing_list_names = [ml.email_address for ml in mailing_lists] + return mailing_list_names + + def get_emails_with_teamids(self): + """ + Create a list of SyncData struct containing email, slug and semester. + + Slug and semester combined are together an uniqueness constraint. + + :return: list of SyncData structs with email, slug and semester + """ + email_ids = [] + + for project in ( + Project.objects.filter(mailinglist__isnull=False) + .filter(semester=Semester.objects.get_or_create_current_semester()) + .values("slug", "semester", "mailinglist") + ): + project_slug = project["slug"] + project_semester = str(Semester.objects.get(pk=project["semester"])) + project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address + + sync_data = SyncData(project_email, project_slug, project_semester) + email_ids.append(sync_data) + return email_ids + def create_aws_organization(self): """Create an AWS organization with the current user as the management account.""" client = boto3.client("organizations") @@ -37,3 +219,499 @@ def create_aws_organization(self): self.logger.error("Something went wrong creating an AWS organization.") self.logger.debug(f"{error}") self.logger.debug(f"{error.response}") + + def create_course_iteration_OU(self, iteration_name): + """ + Create an OU for the course iteration. + + :param iteration_name: The name of the course iteration OU + + :return: The ID of the OU + """ + client = boto3.client("organizations") + if self.org_info is None: + self.logger.info("No organization info found. Creating an AWS organization.") + self.fail = True + else: + try: + root_id = client.list_roots()["Roots"][0]["Id"] + response = client.create_organizational_unit( + ParentId=root_id, + Name=iteration_name, + ) + self.logger.info(f"Created an OU for course iteration {iteration_name}.") + self.iterationOU_info = response["OrganizationalUnit"] + return response["OrganizationalUnit"]["Id"] + except ClientError as error: + self.fail = True + self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_name}.") + self.logger.debug(f"{error}") + self.logger.debug(f"{error.response}") + + def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]): + """ + Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS. + + This includes their ID and email address, to be able to put users in the correct AWS organization later. + """ + sync_list = [x for x in giphouse_data if x not in aws_data] + return sync_list + + def create_scp_policy(self, policy_name, policy_description, policy_content): + """ + Create an SCP policy. + + :param policy_name: The policy name. + :param policy_description: The policy description. + :param policy_content: The policy configuration as a dictionary. + The policy is automatically converted to JSON format, including escaped quotation marks. + :return: Details of newly created policy as a dict on success and NoneType object otherwise. + """ + client = boto3.client("organizations") + try: + response = client.create_policy( + Content=json.dumps(policy_content), + Description=policy_description, + Name=policy_name, + Type="SERVICE_CONTROL_POLICY", + ) + except ClientError as error: + self.fail = True + self.logger.error("Something went wrong creating an SCP policy.") + self.logger.error(error) + else: + return response["Policy"] + + def attach_scp_policy(self, policy_id, target_id): + """ + Attaches an SCP policy to a target (root, OU, or member account). + + :param policy_id: The ID of the policy to be attached. + :param target_id: The ID of the target root, OU, or member account. + """ + client = boto3.client("organizations") + try: + client.attach_policy(PolicyId=policy_id, TargetId=target_id) + except ClientError as error: + if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": + self.fail = True + self.logger.error("Something went wrong attaching an SCP policy to a target.") + self.logger.debug(f"{error}") + self.logger.debug(f"{error.response}") + + def check_aws_api_connection(self): + """ + Check whether boto3 can connect to AWS API with current credentials. + + :returns: First tuple element always exists and indicates success. + Second tuple element is contains information about the entity + who made the successful API call and None otherwise. + """ + client_sts = boto3.client("sts") + try: + caller_identity_info = client_sts.get_caller_identity() + except (NoCredentialsError, ClientError) as error: + self.logger.info("Establishing AWS API connection failed.") + self.logger.debug(error) + return False, None + else: + self.logger.info("Establishing AWS API connection succeeded.") + + return True, caller_identity_info + + def check_iam_policy(self, iam_user_arn, desired_actions): + """ + Check for the specified IAM user ARN whether the actions in list \ + desired_actions are allowed according to its IAM policy. + + :param iam_user_arn: ARN of the IAM user being checked. + :param iam_actions: List of AWS API actions to check. + :returns: True iff all actions in desired_actions are allowed. + """ + client_iam = boto3.client("iam") + + try: + response = client_iam.simulate_principal_policy(PolicySourceArn=iam_user_arn, ActionNames=desired_actions) + except ClientError as error: + self.logger.info("AWS API actions check failed.") + self.logger.debug(error) + return False + + success = True + for evaluation_result in response["EvaluationResults"]: + action_name = evaluation_result["EvalActionName"] + if evaluation_result["EvalDecision"] != "allowed": + self.logger.debug(f"The AWS API action {action_name} is denied for IAM user {iam_user_arn}.") + success = False + + if success: + self.logger.info("AWS API actions check succeeded.") + + return success + + def check_organization_existence(self): + """ + Check whether an AWS organization exists for the AWS API caller's account. + + :returns: First tuple element always exists and indicates success. + Second tuple element is describes properties of the organization and None otherwise. + """ + client_organizations = boto3.client("organizations") + + try: + response_org = client_organizations.describe_organization() + except ClientError as error: + self.logger.info("AWS organization existence check failed.") + self.logger.debug(error) + return False, None + else: + self.logger.info("AWS organization existence check succeeded.") + + return True, response_org["Organization"] + + def check_is_management_account(self, api_caller_info, organization_info): + """ + Check whether caller of AWS API has organization's management account ID. + + :returns: True iff the current organization's management account ID equals the AWS API caller's account ID. + """ + management_account_id = organization_info["MasterAccountId"] + api_caller_account_id = api_caller_info["Account"] + is_management_account = management_account_id == api_caller_account_id + + if is_management_account: + self.logger.info("Management account check succeeded.") + else: + self.logger.info("Management account check failed.") + self.logger.debug(f"The organization's management account ID is: '{management_account_id}'.") + self.logger.debug(f"The AWS API caller account ID is: '{api_caller_account_id}'.") + + return is_management_account + + def check_scp_enabled(self, organization_info): + """ + Check whether the SCP policy type is an enabled feature for the AWS organization. + + :returns: True iff the SCP policy type feature is enabled for the organization. + """ + scp_is_enabled = False + for policy in organization_info["AvailablePolicyTypes"]: + if policy["Type"] == "SERVICE_CONTROL_POLICY" and policy["Status"] == "ENABLED": + scp_is_enabled = True + break + + if not scp_is_enabled: + self.logger.info("The SCP policy type is disabled for the organization.") + self.logger.debug(organization_info["AvailablePolicyTypes"]) + else: + self.logger.info("Organization SCP policy status check succeeded.") + + return scp_is_enabled + + def pipeline_preconditions(self): + """ + Check all crucial pipeline preconditions. + + 1. Locatable boto3 credentials and successful AWS API connection + 2. Check allowed AWS API actions based on IAM policy of caller + 3. Existing organization for AWS API caller + 4. AWS API caller acts under same account ID as organization's management account ID + 5. SCP policy type feature enabled for organization + + :return: True iff all pipeline preconditions are met. + """ + check_api_connection, api_caller_info = self.check_aws_api_connection() + if not check_api_connection: + return False + + check_api_actions = self.check_iam_policy(api_caller_info["Arn"], self.required_aws_actions) + if not check_api_actions: + return False + + check_org_existence, organization_info = self.check_organization_existence() + self.org_info = organization_info + if not check_org_existence: + return False + + check_acc_management = self.check_is_management_account(api_caller_info, organization_info) + if not check_acc_management: + return False + + check_scp_enabled = self.check_scp_enabled(organization_info) + if not check_scp_enabled: + return False + + return True + + def pipeline_policy(self, ou_id): + """ + Create an SCP policy and attaches it to the organizational unit of the current semester. + + :param ou_id: ID of the organizational unit for the current semester. + :return: True iff the policy to be attached to the OU already exists and is successfully attached. + """ + client = boto3.client("organizations") + try: + client.describe_policy(PolicyId=self.policy_id) + except ClientError as error: + self.logger.debug(error) + return False + + self.attach_scp_policy(self.policy_id, ou_id) + if self.fail: + return False + return True + + def pipeline_create_account(self, sync_data): + """ + Create a single new AWS member account in the organization of the API caller. + + The status of the member account request is repeatedly checked based on the class' attributes: + self.ACCOUNT_REQUEST_INTERVAL_SECONDS: thread sleeping time before each status check + self.ACCOUNT_REQUEST_MAX_ATTEMPTS: maximum number of times to thread sleep and check + + :param email: The e-mail address of the new member account. + :param username: The username of the new member account. + :returns: (True, account_id) on success and otherwise (False, failure_reason). + """ + client = boto3.client("organizations") + + # Request new member account. + try: + response_create = client.create_account( + Email=sync_data.project_email, + AccountName=sync_data.project_slug, + IamUserAccessToBilling="DENY", + Tags=[ + {"Key": "project_slug", "Value": sync_data.project_slug}, + {"Key": "project_semester", "Value": sync_data.project_semester}, + ], + ) + except ClientError as error: + self.logger.debug(error) + return False, "CLIENTERROR_CREATE_ACCOUNT" + + # Repeatedly check status of new member account request. + request_id = response_create["CreateAccountStatus"]["Id"] + for _ in range(1, self.ACCOUNT_REQUEST_MAX_ATTEMPTS + 1): + time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS) + + try: + response_status = client.describe_create_account_status(CreateAccountRequestId=request_id) + except ClientError as error: + self.logger.debug(error) + return False, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS" + + request_state = response_status["CreateAccountStatus"]["State"] + if request_state == "FAILED": + return False, response_status["CreateAccountStatus"]["FailureReason"] + elif request_state == "SUCCEEDED": + return True, response_status["CreateAccountStatus"]["AccountId"] + + return False, "STILL_IN_PROGRESS" + + def pipeline_create_and_move_accounts(self, new_member_accounts, root_id, destination_ou_id): + """ + Create multiple accounts in the organization of the API caller and move them from the root to a destination OU. + + :param new_member_accounts: List of SyncData objects. + :param root_id: The organization's root ID. + :param destination_ou_id: The organization's destination OU ID. + :returns: True iff **all** new member accounts were created and moved successfully. + """ + client = boto3.client("organizations") + overall_success = True + + for new_member in new_member_accounts: + success, response = self.pipeline_create_account(new_member) + if success: + account_id = response + try: + root_id = client.list_roots()["Roots"][0]["Id"] + client.move_account( + AccountId=account_id, SourceParentId=root_id, DestinationParentId=destination_ou_id + ) + except ClientError as error: + self.logger.debug(error) + overall_success = False + else: + failure_reason = response + self.logger.debug(failure_reason) + overall_success = False + + return overall_success + + def pipeline_update_current_course_iteration_ou(self, aws_tree): + """ + Update the AWS tree with the new course iteration OU's. + + :param aws_tree: The AWS tree to be checked. + :returns: True, iteration_id on success and otherwise False, failure_reason. + """ + is_current_iteration, iteration_ou_id = self.check_current_ou_exists(aws_tree) + + if not is_current_iteration: + iteration_name = str(Semester.objects.get_or_create_current_semester()) + iteration_ou_id = self.create_course_iteration_OU(iteration_name) + + if not self.fail: + return True, iteration_ou_id + else: + return False, "ITERATION_OU_CREATION_FAILED" + + def pipeline(self): + """ + Single pipeline that integrates all buildings blocks for the AWS integration process. + + :return: True iff all pipeline stages successfully executed. + """ + # Check preconditions. + if not self.pipeline_preconditions(): + return False + + # Get synchronization data. + client = boto3.client("organizations") + try: + root_id = client.list_roots()["Roots"][0]["Id"] + except ClientError as error: + self.logger.debug("Failed to retrieve root ID of organization.") + self.logger.debug(error) + return False + + aws_tree = self.extract_aws_setup(root_id) + if self.fail: + self.logger.debug("Extracting AWS setup failed.") + return False + + aws_sync_data = aws_tree.awstree_to_syncdata_list() + giphouse_sync_data = self.get_emails_with_teamids() + merged_sync_data = self.generate_aws_sync_list(giphouse_sync_data, aws_sync_data) + + # Check edge cases. + if self.check_for_double_member_email(aws_sync_data, merged_sync_data): + return False + + success, incorrect_emails = self.check_members_in_correct_iteration(aws_tree) + if not success: + self.logger.debug(f"Got incorrectly placed AWS member accounts: {incorrect_emails}.") + return False + + failure, double_iteration_names = self.check_double_iteration_names(aws_tree) + if failure: + self.logger.debug(f"Found double iteration names: {double_iteration_names}.") + return False + + # Check/create course iteration OU. + current_course_iteration_exists, response = self.pipeline_update_current_course_iteration_ou(aws_tree) + if not current_course_iteration_exists: + failure_reason = response + self.logger.debug(failure_reason) + return False + course_iteration_ou_id = response + + # Create and attach SCP policy to course iteration OU. + if not self.pipeline_policy(course_iteration_ou_id): + return False + + # Create new member accounts and move to course iteration OU. + if not self.pipeline_create_and_move_accounts(merged_sync_data, root_id, course_iteration_ou_id): + return False + + return True + + def check_for_double_member_email(self, aws_list: list[SyncData], sync_list: list[SyncData]): + """Check if no users are assigned to multiple projects.""" + sync_emails = [x.project_email for x in sync_list] + aws_emails = [x.project_email for x in aws_list] + + duplicates = [email for email in sync_emails if email in aws_emails] + + for duplicate in duplicates: + error = f"Email address {duplicate} is already in the list of members in AWS" + self.logger.info("An email clash occured while syncing.") + self.logger.debug(error) + + if duplicates != []: + return True + return False + + def check_current_ou_exists(self, AWSdata: AWSTree): + """ + Check if the the OU (organizational unit) for the current semester already exists in AWS. + + Get data in tree structure (dictionary) defined in the function that retrieves the AWS data + """ + current = str(Semester.objects.get_or_create_current_semester()) + + for iteration in AWSdata.iterations: + if current == iteration.name: + return (True, iteration.ou_id) + + return (False, None) + + def check_members_in_correct_iteration(self, AWSdata: AWSTree): + """Check if the data from the member tag matches the semester OU it is in.""" + incorrect_emails = [] + for iteration in AWSdata.iterations: + for member in iteration.members: + if member.project_semester != iteration.name: + incorrect_emails.append(member.project_email) + + if incorrect_emails != []: + return (False, incorrect_emails) + + return (True, None) + + def check_double_iteration_names(self, AWSdata: AWSTree): + """Check if there are multiple OU's with the same name in AWS.""" + names = [iteration.name for iteration in AWSdata.iterations] + doubles = [] + + for name in names: + if names.count(name) != 1 and name not in doubles: + doubles.append(name) + + if doubles != []: + return (True, doubles) + return (False, None) + + def extract_aws_setup(self, parent_ou_id): + """ + Give a list of all the children of the parent OU. + + :param parent_ou_id: The ID of the parent OU. + """ + client = boto3.client("organizations") + try: + response = client.list_organizational_units_for_parent(ParentId=parent_ou_id) + aws_tree = AWSTree("root", parent_ou_id, []) + for iteration in response["OrganizationalUnits"]: + ou_id = iteration["Id"] + ou_name = iteration["Name"] + response = client.list_accounts_for_parent(ParentId=ou_id) + children = response["Accounts"] + syncData = [] + for child in children: + account_id = child["Id"] + account_email = child["Email"] + response = client.list_tags_for_resource(ResourceId=account_id) + tags = response["Tags"] + merged_tags = {d["Key"]: d["Value"] for d in tags} + self.logger.debug(merged_tags) + if all(key in merged_tags for key in ["project_slug", "project_semester"]): + syncData.append( + SyncData(account_email, merged_tags["project_slug"], merged_tags["project_semester"]) + ) + else: + self.logger.error( + "Could not find project_slug or project_semester tag for account with ID: " + account_id + ) + self.fail = True + + aws_tree.iterations.append(Iteration(ou_name, ou_id, syncData)) + return aws_tree + except ClientError as error: + self.fail = True + self.logger.error("Something went wrong extracting the AWS setup.") + self.logger.debug(f"{error}") + self.logger.debug(f"{error.response}") diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index 6822fc14..d2e7cec7 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -1,26 +1,939 @@ -from unittest.mock import patch +"""Tests for awssync.py.""" + +import json +from unittest.mock import MagicMock, patch import boto3 +import botocore from botocore.exceptions import ClientError from django.test import TestCase -from moto import mock_organizations +from moto import mock_organizations, mock_sts + +from courses.models import Semester + +from mailing_lists.models import MailingList from projects import awssync +from projects.models import Project + + +class SyncDataTest(TestCase): + """Test SyncData class (struct).""" + + def setUp(self): + """setup test environment.""" + self.sync = awssync.SyncData + + def test_throw_type_error_SyncData_class(self): + """Test Type Error when equals is called on wrong type.""" + with self.assertRaises(TypeError) as context: + self.sync("", "", "") == [] + self.assertTrue("Must compare to object of type SyncData" in str(context.exception)) class AWSSyncTest(TestCase): """Test AWSSync class.""" def setUp(self): + """Set up testing environment.""" self.sync = awssync.AWSSync() + self.semester = Semester.objects.create(year=2023, season=Semester.SPRING) + self.mailing_list = MailingList.objects.create(address="test1") + self.project = Project.objects.create(id=1, name="test1", semester=self.semester, slug="test1") + self.mailing_list.projects.add(self.project) + self.mock_org = mock_organizations() + self.mock_org.start() + + def tearDown(self): + self.mock_org.stop() + + def simulateFailure(self): + self.sync.fail = True def test_button_pressed(self): + """Test button_pressed function.""" return_value = self.sync.button_pressed() self.assertTrue(return_value) + def test_create_aws_organization(self): + moto_client = boto3.client("organizations") + org = self.sync + org.create_aws_organization() + describe_org = moto_client.describe_organization()["Organization"] + self.assertEqual(describe_org, org.org_info) + + def test_create_aws_organization__exception(self): + org = self.sync + with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): + org.create_aws_organization() + self.assertTrue(org.fail) + self.assertIsNone(org.org_info) + + def test_create_course_iteration_OU(self): + moto_client = boto3.client("organizations") + org = self.sync + org.create_aws_organization() + org.create_course_iteration_OU("1") + describe_unit = moto_client.describe_organizational_unit(OrganizationalUnitId=org.iterationOU_info["Id"])[ + "OrganizationalUnit" + ] + self.assertEqual(describe_unit, org.iterationOU_info) + + def test_create_course_iteration_OU_without_organization(self): + org = self.sync + org.create_course_iteration_OU("1") + self.assertTrue(org.fail) + + def test_create_course_iteration_OU__exception(self): + org = self.sync + org.create_aws_organization() + with patch("boto3.client") as mocker: + mocker().list_roots.side_effect = ClientError({}, "list_roots") + org.create_course_iteration_OU("1") + self.assertTrue(org.fail) + + def test_get_all_mailing_lists(self): + """Test get_all_mailing_lists function.""" + mailing_lists = self.sync.get_all_mailing_lists() + self.assertIsInstance(mailing_lists, list) + + def test_get_emails_with_teamids_normal(self): + """Test get_emails_with_teamids function.""" + email_id = self.sync.get_emails_with_teamids() + + self.assertIsInstance(email_id, list) + self.assertIsInstance(email_id[0], awssync.SyncData) + expected_result = [awssync.SyncData("test1@giphouse.nl", "test1", "Spring 2023")] + self.assertEqual(email_id, expected_result) + + def test_get_emails_with_teamids_no_project(self): + """Test get_emails_with_teamids function.""" + MailingList.objects.all().delete() + self.mailing_list = MailingList.objects.create(address="test2") + email_id = self.sync.get_emails_with_teamids() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + + def test_get_emails_with_teamids_no_mailing_list(self): + """Test get_emails_with_teamids function.""" + MailingList.objects.all().delete() + Project.objects.all().delete() + email_id = self.sync.get_emails_with_teamids() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + + def test_get_emails_with_teamids_different_semester(self): + """Test get_emails_with_teamids function.""" + MailingList.objects.all().delete() + new_semester = Semester.objects.create(year=2022, season=Semester.FALL) + self.mailing_list = MailingList.objects.create(address="test2") + self.project = Project.objects.create(id=2, name="test2", semester=new_semester, slug="test2") + self.mailing_list.projects.add(self.project) + email_id = self.sync.get_emails_with_teamids() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + + def test_create_scp_policy(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + + self.assertFalse(self.sync.fail) + self.assertEqual(policy["PolicySummary"]["Name"], policy_name) + self.assertEqual(policy["PolicySummary"]["Description"], policy_description) + self.assertEqual(policy["Content"], json.dumps(policy_content)) + + def test_create_scp_policy__exception(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = { + "Version": "2012-10-17", + "Statement": [{"Effect": "NonExistentEffect", "Action": "*", "Resource": "*"}], + } + with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + + self.assertTrue(self.sync.fail) + self.assertIsNone(policy) + + def test_attach_scp_policy(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + + policy_id = policy["PolicySummary"]["Id"] + root_id = moto_client.list_roots()["Roots"][0]["Id"] + self.sync.attach_scp_policy(policy_id, root_id) + + current_scp_policies = moto_client.list_policies_for_target(TargetId=root_id, Filter="SERVICE_CONTROL_POLICY") + current_scp_policy_ids = [scp_policy["Id"] for scp_policy in current_scp_policies["Policies"]] + + self.assertIn(policy_id, current_scp_policy_ids) + self.assertFalse(self.sync.fail) + + def test_attach_scp_policy__exception(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + + policy_id = policy["PolicySummary"]["Id"] + root_id = self.sync.org_info["Id"] # Retrieves organization ID, not root ID, resulting in ClientError. + self.sync.attach_scp_policy(policy_id, root_id) + + self.assertTrue(self.sync.fail) + + @mock_sts + def test_check_aws_api_connection(self): + success, caller_identity_info = self.sync.check_aws_api_connection() + + self.assertTrue(success) + self.assertIsNotNone(caller_identity_info) + + @mock_sts + def test_check_aws_api_connection__exception(self): + with patch("boto3.client") as mocker: + mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity") + mocker.return_value = mocker + success, caller_identity_info = self.sync.check_aws_api_connection() + + self.assertFalse(success) + self.assertIsNone(caller_identity_info) + + # IAM simulate_principal_policy is not covered by moto. + def test_check_iam_policy(self): + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + # success == True + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + success = self.sync.check_iam_policy(iam_user_arn, desired_actions) + self.assertTrue(success) + + # success == False + mock_evaluation_results["EvaluationResults"][0]["EvalDecision"] = "implicitDeny" + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + success = self.sync.check_iam_policy(iam_user_arn, desired_actions) + self.assertFalse(success) + + def test_check_iam_policy__exception(self): + iam_user_arn = "daddy" + desired_actions = [] + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.side_effect = ClientError({}, "simulate_principal_policy") + success = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + self.assertFalse(success) + + def test_check_organization_existence(self): + moto_client = boto3.client("organizations") + organization_create_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] + success, organization_describe_info = self.sync.check_organization_existence() + + self.assertTrue(success) + self.assertEqual(organization_create_info, organization_describe_info) + + def test_check_organization_existence__exception(self): + with patch("boto3.client") as mocker: + mocker.describe_organization.side_effect = ClientError({}, "describe_organization") + mocker.return_value = mocker + success, organization_info = self.sync.check_organization_existence() + + self.assertFalse(success) + self.assertIsNone(organization_info) + + @mock_sts + def test_check_is_management_account(self): + moto_client = boto3.client("organizations") + + moto_client.create_organization(FeatureSet="ALL")["Organization"] + _, caller_identity_info = self.sync.check_aws_api_connection() + _, organization_info = self.sync.check_organization_existence() + + # is_management_account == True + success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info) + self.assertTrue(success_acc) + + # is_management_account == False + caller_identity_info["Account"] = "daddy" + success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info) + self.assertFalse(success_acc) + + def test_check_scp_enabled(self): + moto_client = boto3.client("organizations") + + # SCP enabled. + organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] + scp_is_enabled = self.sync.check_scp_enabled(organization_info) + self.assertTrue(scp_is_enabled) + + # SCP semi-disabled (pending). + organization_info["AvailablePolicyTypes"][0]["Status"] = "PENDING_DISABLE" + scp_is_enabled = self.sync.check_scp_enabled(organization_info) + self.assertFalse(scp_is_enabled) + + # SCP disabled (empty list). + organization_info["AvailablePolicyTypes"] = [] + scp_is_enabled = self.sync.check_scp_enabled(organization_info) + self.assertFalse(scp_is_enabled) + + @mock_sts + def test_pipeline_preconditions__all_success(self): + # Create organization. + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + mocker.return_value = check_iam_policy + success = self.sync.pipeline_preconditions() + + self.assertTrue(success) + + @mock_sts + def test_pipeline_preconditions__no_connection(self): + with patch("boto3.client") as mocker: + mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity") + mocker.return_value = mocker + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + def test_pipeline_preconditions__no_iam(self): + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "implicitDeny", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_api_actions = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + mocker.return_value = check_api_actions + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + @mock_sts + def test_pipeline_preconditions__no_organization(self): + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + mocker.return_value = check_iam_policy + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + @mock_sts + def test_pipeline_preconditions__no_management(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL") + + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam: + mocker_iam.return_value = check_iam_policy + with patch("projects.awssync.AWSSync.check_aws_api_connection") as mocker_api: + mocker_api.return_value = True, {"Account": "daddy", "Arn": "01234567890123456789"} + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + @mock_sts + def test_pipeline_preconditions__no_scp(self): + moto_client = boto3.client("organizations") + + organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] + + # Mock return value of simulate_principal_policy. + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + # Mock return value of check_iam_policy. + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam: + mocker_iam.return_value = check_iam_policy + + # Mock return value of check_organization_existence with no SCP policy enabled. + organization_info["AvailablePolicyTypes"] = [] + with patch("projects.awssync.AWSSync.check_organization_existence") as mocker: + mocker.return_value = True, organization_info + success = self.sync.pipeline_preconditions() + + self.assertFalse(success) + + """ + def test_pipeline_create_scp_policy(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + + policy = self.sync.pipeline_create_scp_policy() + + self.assertFalse(self.sync.fail) + self.assertEqual(policy["PolicySummary"]["Name"], policy_name) + self.assertEqual(policy["PolicySummary"]["Description"], policy_description) + self.assertEqual(policy["Content"], json.dumps(policy_content)) + + def test_pipeline_create_scp_policy__exception(self): + self.sync.create_aws_organization() + + with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): + policy = self.sync.pipeline_create_scp_policy() + + self.assertTrue(self.sync.fail) + self.assertIsNone(policy) + """ + + def test_pipeline_policy(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + self.sync.policy_id = policy["PolicySummary"]["Id"] + + ou_id = self.sync.create_course_iteration_OU("Test") + + success = self.sync.pipeline_policy(ou_id) + self.assertTrue(success) + + def test_pipeline_policy__exception(self): + self.sync.create_aws_organization() + + ou_id = self.sync.create_course_iteration_OU("Test") + + success = self.sync.pipeline_policy(ou_id) + self.assertFalse(success) + + def test_pipeline_policy__failure_attach(self): + self.sync.create_aws_organization() + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + self.sync.policy_id = policy["PolicySummary"]["Id"] + + ou_id = self.sync.create_course_iteration_OU("Test") + + self.sync.attach_scp_policy = MagicMock(side_effect=self.simulateFailure()) + + success = self.sync.pipeline_policy(ou_id) + self.assertFalse(success) + + @mock_sts + def test_pipeline(self): + moto_client = boto3.client("organizations") + + # pipeline_preconditions() == False + success = self.sync.pipeline() + self.assertFalse(success) + + # pipeline_preconditions() == True + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) + self.sync.policy_id = policy["PolicySummary"]["Id"] + + iam_user_arn = "daddy" + desired_actions = [] + mock_evaluation_results = { + "EvaluationResults": [ + { + "EvalActionName": "organizations:CreateOrganizationalUnit", + "EvalDecision": "allowed", + "EvalResourceName": "*", + "MissingContextValues": [], + } + ] + } + + with patch("boto3.client") as mocker: + mocker().simulate_principal_policy.return_value = mock_evaluation_results + check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + + with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + mocker.return_value = check_iam_policy + success = self.sync.pipeline() + + self.assertTrue(success) + + def test_pipeline__exception_list_roots(self): + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().list_roots.side_effect = ClientError({}, "list_roots") + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__edge_case_double_emails(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + aws_tree = awssync.AWSTree( + "Root", + "123", + [ + awssync.Iteration( + "Spring 2023", + "456", + [ + awssync.SyncData("email1@example.com", "project1", "Spring 2023"), + ], + ) + ], + ) + + gip_teams = [ + awssync.SyncData("email1@example.com", "project1", "Spring 2023"), + awssync.SyncData("email1@example.com", "project2", "Spring 2023"), + ] + + self.sync.pipeline_preconditions = MagicMock(return_value=True) + self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) + self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__edge_case_incorrectly_placed(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + aws_tree = awssync.AWSTree( + "Root", + "123", + [ + awssync.Iteration( + "Fall 2023", + "456", + [ + awssync.SyncData("email1@example.com", "project1", "Spring 2023"), + ], + ) + ], + ) + + gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] + + self.sync.pipeline_preconditions = MagicMock(return_value=True) + self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) + self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__edge_case_double_iteration_names(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + aws_tree = awssync.AWSTree( + "Root", + "123", + [ + awssync.Iteration( + "Spring 2023", "456", [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] + ), + awssync.Iteration("Spring 2023", "789", []), + ], + ) + + gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] + + self.sync.pipeline_preconditions = MagicMock(return_value=True) + self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) + self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__failed_creating_iteration_ou(self): + moto_client = boto3.client("organizations") + moto_client.create_organization(FeatureSet="ALL")["Organization"] + + self.sync.pipeline_preconditions = MagicMock(return_value=True) + with patch("boto3.client") as mocker: + mocker().create_organizational_unit.side_effect = ClientError({}, "create_organizational_unit") + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__exception_attaching_policy(self): + self.sync.create_aws_organization() + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().attach_policy.side_effect = ClientError( + {"Error": {"Code": "PolicyTypeNotEnabledException"}}, "attach_policy" + ) + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__already_attached_policy(self): + self.sync.create_aws_organization() + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().attach_policy.side_effect = ClientError( + {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy" + ) + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__failed_create_and_move_account(self): + self.sync.create_aws_organization() + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().move_account.side_effect = ClientError({}, "move_account") + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline__exception_extract_aws_setup(self): + self.sync.pipeline_preconditions = MagicMock(return_value=True) + + with patch("boto3.client") as mocker: + mocker().list_organizational_units_for_parent.side_effect = ClientError( + {}, "list_organizational_units_for_parent" + ) + success = self.sync.pipeline() + + self.assertFalse(success) + + def test_pipeline_update_current_course_iteration_ou___failure_check_current_ou(self): + + self.sync.check_current_ou_exists = MagicMock(return_value=(False, None)) + + self.sync.create_aws_organization() + success, id = self.sync.pipeline_update_current_course_iteration_ou(None) + self.assertTrue(success) + self.assertFalse(id is None) + + def test_pipeline_update_current_course_iteration_ou___success(self): + + self.sync.check_current_ou_exists = MagicMock(return_value=(True, "1234")) + + self.sync.create_aws_organization() + success, id = self.sync.pipeline_update_current_course_iteration_ou(None) + self.assertTrue(success) + self.assertEquals(id, "1234") + + def test_pipeline_update_current_course_iteration_ou___failure_create_ou(self): + + self.sync.check_current_ou_exists = MagicMock(return_value=(False, None)) + self.sync.create_course_iteration_OU = MagicMock(side_effect=self.simulateFailure()) + + self.sync.create_aws_organization() + success, failure_reason = self.sync.pipeline_update_current_course_iteration_ou(None) + + self.assertFalse(success) + self.assertEquals(failure_reason, "ITERATION_OU_CREATION_FAILED") + self.assertTrue(self.sync.fail) + + def test_pipeline_create_account(self): + self.sync.create_aws_organization() + + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertTrue(success) + self.assertIsNotNone(response) + + def test_pipeline_create_account__exception_create_account(self): + self.sync.create_aws_organization() + + with patch("boto3.client") as mocker: + mocker().create_account.side_effect = ClientError({}, "create_account") + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertFalse(success) + self.assertEquals(response, "CLIENTERROR_CREATE_ACCOUNT") + + def test_pipeline_create_account__exception_describe_account_status(self): + self.sync.create_aws_organization() + + with patch("boto3.client") as mocker: + mocker().describe_create_account_status.side_effect = ClientError({}, "describe_create_account_status") + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertFalse(success) + self.assertEquals(response, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS") + + def test_pipeline_create_account__state_failed(self): + self.sync.create_aws_organization() + + with patch("boto3.client") as mocker: + response = {"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}} + mocker().describe_create_account_status.return_value = response + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertFalse(success) + self.assertEquals(response, "EMAIL_ALREADY_EXISTS") + + def test_pipeline_create_account__state_in_progress(self): + self.sync.create_aws_organization() + + with patch("boto3.client") as mocker: + response = { + "CreateAccountStatus": { + "State": "IN_PROGRESS", + } + } + mocker().describe_create_account_status.return_value = response + success, response = self.sync.pipeline_create_account( + awssync.SyncData("alice@example.com", "alice", "Spring 2023") + ) + + self.assertFalse(success) + self.assertEquals(response, "STILL_IN_PROGRESS") + + def test_pipeline_create_and_move_accounts(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + + new_member_accounts = [ + awssync.SyncData("alice@example.com", "alice", "Spring 2023"), + awssync.SyncData("bob@example.com", "bob", "Spring 2023"), + ] + root_id = moto_client.list_roots()["Roots"][0]["Id"] + course_iteration_id = self.sync.create_course_iteration_OU("Spring 2023") + + success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) + self.assertTrue(success) + + def test_pipeline_create_and_move_accounts__email_exists(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + + new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")] + root_id = moto_client.list_roots()["Roots"][0]["Id"] + course_iteration_id = self.sync.create_course_iteration_OU("2023Fall") + + with patch("projects.awssync.AWSSync.pipeline_create_account") as mocker: + mocker.return_value = False, "EMAIL_ALREADY_EXISTS" + success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) + + self.assertFalse(success) + + def test_pipeline_create_and_move_accounts__exception_move_account(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + + new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")] + root_id = moto_client.list_roots()["Roots"][0]["Id"] + course_iteration_id = self.sync.create_course_iteration_OU("2023Fall") + + self.sync.pipeline_create_account = MagicMock(return_value=(True, 1234)) + with patch("boto3.client") as mocker: + mocker().move_account.side_effect = ClientError({}, "move_account") + success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) + + self.assertFalse(success) + + @mock_organizations + def test_get_aws_data(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + root_id = moto_client.list_roots()["Roots"][0]["Id"] + + response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1") + OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] + response_account_1 = moto_client.create_account( + Email="account_1@gmail.com", + AccountName="account_1", + Tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}], + ) + account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] + moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id) + + aws_tree = self.sync.extract_aws_setup(root_id) + iteration_test = awssync.Iteration("OU_1", OU_1_id, [awssync.SyncData("account_1@gmail.com", "test1", "2021")]) + aws_tree_test = awssync.AWSTree("root", root_id, [iteration_test]) + self.assertEquals(aws_tree, aws_tree_test) + + @mock_organizations + def test_get_aws_data_no_root(self): + boto3.client("organizations") + self.sync.create_aws_organization() + self.sync.extract_aws_setup("NonExistentRootID") + self.assertTrue(self.sync.fail) + + @mock_organizations + def test_get_aws_data_no_slugs(self): + moto_client = boto3.client("organizations") + self.sync.create_aws_organization() + root_id = moto_client.list_roots()["Roots"][0]["Id"] + + response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1") + OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] + response_account_1 = moto_client.create_account( + Email="account_1@gmail.com", + AccountName="account_1", + Tags=[], + ) + account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] + moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id) + self.sync.extract_aws_setup(root_id) + self.assertTrue(self.sync.fail) + + +class AWSSyncListTest(TestCase): + """Test AWSSyncList class.""" + + def setUp(self): + self.sync = awssync.AWSSync() + self.syncData = awssync.SyncData + + self.test1 = self.syncData("test1@test1.test1", "test1", "test1") + self.test2 = self.syncData("test2@test2.test2", "test2", "test2") + self.test3 = self.syncData("test3@test3.test3", "test3", "test3") + + def test_AWS_sync_list_both_empty(self): + gip_list = [] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_empty_AWS(self): + gip_list = [self.test1, self.test2] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list) + + def test_AWS_sync_list_empty_GiP(self): + gip_list = [] + aws_list = [self.test1, self.test2] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_both_full(self): + gip_list = [self.test1, self.test2] + aws_list = [self.test2, self.test3] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1]) + + +class AWSAPITalkerTest(TestCase): def mock_api(self, operation_name, kwarg): if operation_name == "CreateOrganization": raise ClientError( @@ -45,18 +958,214 @@ def mock_api(self, operation_name, kwarg): }, "create_organization", ) + if operation_name == "CreateOrganizationalUnit": + raise ClientError( + { + "Error": { + "Message": "The OU already exists.", + "Code": "ParentNotFoundException", + }, + "ResponseMetadata": { + "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "HTTPStatusCode": 400, + "HTTPHeaders": { + "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "content-type": "application/x-amz-json-1.1", + "content-length": "111", + "date": "Sun, 01 Jan 2023 00:00:00 GMT", + "connection": "close", + }, + "RetryAttempts": 0, + }, + "Message": "The OU already exists.", + }, + "create_organizational_unit", + ) + if operation_name == "CreatePolicy": + raise ClientError( + { + "Error": { + "Message": """The provided policy document does not meet the + requirements of the specified policy type.""", + "Code": "MalformedPolicyDocumentException", + }, + "ResponseMetadata": { + "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "HTTPStatusCode": 400, + "HTTPHeaders": { + "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", + "content-type": "application/x-amz-json-1.1", + "content-length": "147", + "date": "Sun, 01 Jan 2023 00:00:00 GMT", + "connection": "close", + }, + "RetryAttempts": 0, + }, + "Message": """The provided policy document does not meet the + requirements of the specified policy type.""", + }, + "create_policy", + ) + return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg) - @mock_organizations - def test_create_aws_organization(self): - moto_client = boto3.client("organizations") - org = self.sync - org.create_aws_organization() - describe_org = moto_client.describe_organization()["Organization"] - self.assertEqual(describe_org, org.org_info) - @patch("botocore.client.BaseClient._make_api_call", mock_api) - def test_create_aws_organization__exception(self): - org = self.sync - org.create_aws_organization() - self.assertTrue(org.fail) - self.assertIsNone(org.org_info) +class AWSTreeChecksTest(TestCase): + """Test checks done on AWSTree data struncture.""" + + def setUp(self): + self.sync = awssync.AWSSync() + self.awstree = awssync.AWSTree("Name", "1234", []) + self.iteration = awssync.Iteration("Name", "1234", []) + self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020") + + self.sync_list = [ + awssync.SyncData("email1@example.com", "Spring 2022", "Project A"), + awssync.SyncData("email2@example.com", "Fall 2022", "Project B"), + awssync.SyncData("email3@example.com", "Spring 2022", "Project C"), + ] + self.aws_list = [ + awssync.SyncData("email4@example.com", "Fall 2021", "Project D"), + awssync.SyncData("email5@example.com", "Spring 2022", "Project E"), + awssync.SyncData("email6@example.com", "Fall 2022", "Project F"), + ] + + self.treelist = [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + awssync.SyncData("email3@example.com", "project3", "Spring 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ] + + self.aws_tree1 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Spring 2021", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Spring 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree2 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Spring 2021", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Fall 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree3 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Fall 2020", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Fall 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + def test_repr_AWSTree(self): + self.assertEquals(str(self.awstree), "AWSTree('Name', '1234', [])") + + def test_repr_Iteration(self): + self.assertEquals(str(self.iteration), "Iteration('Name', '1234', [])") + + def test_repr_SyncData(self): + self.assertEquals(str(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')") + + def test_awstree_to_syncdata_list(self): + self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist) + + def test_check_for_double_member_email(self): + # Test when there are no duplicate emails + self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) + + # Test when there is a duplicate email + self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G")) + self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) + + def test_check_current_ou_exists(self): + # Test when current semester OU does not exist + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"): + self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022") + val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) + self.assertEqual((val1, val2), (False, None)) + + # Test when current semester OU exists + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"): + self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021") + val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) + self.assertEqual((val1, val2), (True, "98765")) + + def test_check_members_in_correct_iteration(self): + # Test when correct + val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1) + self.assertEqual((val1, val2), (True, None)) + + # Test when incorrect + val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2) + self.assertEqual((val1, val2), (False, ["email3@example.com"])) + + def test_check_double_iteration_names(self): + # Test when correct + val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1) + self.assertEqual((val1, val2), (False, None)) + + # Test when double + val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3) + self.assertEqual((val1, val2), (True, ["Fall 2020"])) + + def test_AWSTree_equals(self): + self.assertEqual(self.aws_tree1, self.aws_tree1) + self.assertNotEqual(self.aws_tree1, self.aws_tree2) + with self.assertRaises(TypeError): + awssync.AWSTree("", "", []) == [] + self.assertRaises(TypeError) + + def test_Iteration_equals(self): + self.assertEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[0]) + self.assertNotEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[1]) + with self.assertRaises(TypeError): + awssync.Iteration("", "", []) == [] + self.assertRaises(TypeError) diff --git a/website/room_reservation/views.py b/website/room_reservation/views.py index eeef5b36..f617d2ee 100644 --- a/website/room_reservation/views.py +++ b/website/room_reservation/views.py @@ -134,7 +134,7 @@ def get_context_data(self, **kwargs): } for reservation in Reservation.objects.filter( start_time__date__gte=timezone.now() - self.time_window_past, - start_time__date__lte=timezone.now() + self.time_window_future, + start_time__date__lt=timezone.now() + self.time_window_future, ) ] ) From 9f6738fb73e9a996f4f134fe66da8890659a710d Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Tue, 25 Apr 2023 10:23:18 +0000 Subject: [PATCH 18/32] Dedicated module for AWS helper data structures (#47) * Separate AWS helper data structures into a dedicated module * Fix asserts for exceptions in tests * Add missing type hinting * Replace str calls with repr --- website/projects/awssync.py | 76 +------ website/projects/awssync_structs.py | 69 ++++++ website/projects/tests/test_awssync.py | 208 ----------------- .../projects/tests/test_awssync_structs.py | 211 ++++++++++++++++++ 4 files changed, 281 insertions(+), 283 deletions(-) create mode 100644 website/projects/awssync_structs.py create mode 100644 website/projects/tests/test_awssync_structs.py diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 96e71327..95562452 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -1,5 +1,4 @@ """Framework for synchronisation with Amazon Web Services (AWS).""" -from __future__ import annotations import json import logging @@ -14,83 +13,10 @@ from mailing_lists.models import MailingList +from projects.awssync_structs import AWSTree, Iteration, SyncData from projects.models import Project -class SyncData: - """Structure for AWS giphouse sync data.""" - - def __init__(self, project_email, project_slug, project_semester): - """Create SyncData instance.""" - self.project_email = project_email - self.project_slug = project_slug - self.project_semester = project_semester - - def __eq__(self, other): - """Overload equals for SyncData type.""" - if not isinstance(other, SyncData): - raise TypeError("Must compare to object of type SyncData") - return ( - self.project_email == other.project_email - and self.project_slug == other.project_slug - and self.project_semester == other.project_semester - ) - - def __repr__(self): - """Overload to string function for SyncData type.""" - return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')" - - -class Iteration: - """Datatype for AWS data in the Course iteration OU.""" - - def __init__(self, name, ou_id, members: list[SyncData]): - """Initialize Iteration object.""" - self.name = name - self.ou_id = ou_id - self.members = members - - def __repr__(self): - """Overload to string function for Iteration datatype.""" - return f"Iteration('{self.name}', '{self.ou_id}', {self.members})" - - def __eq__(self, other: Iteration) -> bool: - """Overload equals operator for Iteration objects.""" - if not isinstance(other, Iteration): - raise TypeError("Must compare to object of type Iteration") - return self.name == other.name and self.ou_id == other.ou_id and self.members == other.members - - -class AWSTree: - """Tree structure for AWS data.""" - - def __init__(self, name, ou_id, iterations: list[Iteration]): - """Initialize AWSTree object.""" - self.name = name - self.ou_id = ou_id - self.iterations = iterations - - def __repr__(self): - """Overload to string function for AWSTree object.""" - return f"AWSTree('{self.name}', '{self.ou_id}', {self.iterations})" - - def __eq__(self, other: AWSTree) -> bool: - """Overload equals operator for AWSTree objects.""" - if not isinstance(other, AWSTree): - raise TypeError("Must compare to object of type AWSTree") - return self.name == other.name and self.ou_id == other.ou_id and self.iterations == other.iterations - - def awstree_to_syncdata_list(self): - """Convert AWSTree to list of SyncData elements.""" - awslist = [] - - for iteration in self.iterations: - for member in iteration.members: - awslist.append(member) - - return awslist - - class AWSSync: """Synchronise with Amazon Web Services.""" diff --git a/website/projects/awssync_structs.py b/website/projects/awssync_structs.py new file mode 100644 index 00000000..e0d8b934 --- /dev/null +++ b/website/projects/awssync_structs.py @@ -0,0 +1,69 @@ +from __future__ import annotations + + +class SyncData: + """Structure for AWS giphouse sync data.""" + + def __init__(self, project_email: str, project_slug: str, project_semester: str) -> None: + """Create SyncData instance.""" + self.project_email = project_email + self.project_slug = project_slug + self.project_semester = project_semester + + def __eq__(self, other: SyncData) -> bool: + """Overload equals for SyncData type.""" + if not isinstance(other, SyncData): + raise TypeError("Must compare to object of type SyncData") + return ( + self.project_email == other.project_email + and self.project_slug == other.project_slug + and self.project_semester == other.project_semester + ) + + def __repr__(self) -> str: + """Overload to repr function for SyncData type.""" + return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')" + + +class Iteration: + """Datatype for AWS data in the Course iteration OU.""" + + def __init__(self, name: str, ou_id: str, members: list[SyncData]) -> None: + """Initialize Iteration object.""" + self.name = name + self.ou_id = ou_id + self.members = members + + def __repr__(self) -> str: + """Overload to repr function for Iteration datatype.""" + return f"Iteration('{self.name}', '{self.ou_id}', {self.members})" + + def __eq__(self, other: Iteration) -> bool: + """Overload equals operator for Iteration objects.""" + if not isinstance(other, Iteration): + raise TypeError("Must compare to object of type Iteration") + return self.name == other.name and self.ou_id == other.ou_id and self.members == other.members + + +class AWSTree: + """Tree structure for AWS data.""" + + def __init__(self, name: str, ou_id: str, iterations: list[Iteration]) -> None: + """Initialize AWSTree object.""" + self.name = name + self.ou_id = ou_id + self.iterations = iterations + + def __repr__(self) -> str: + """Overload to repr function for AWSTree object.""" + return f"AWSTree('{self.name}', '{self.ou_id}', {self.iterations})" + + def __eq__(self, other: AWSTree) -> bool: + """Overload equals operator for AWSTree objects.""" + if not isinstance(other, AWSTree): + raise TypeError("Must compare to object of type AWSTree") + return self.name == other.name and self.ou_id == other.ou_id and self.iterations == other.iterations + + def awstree_to_syncdata_list(self) -> list[SyncData]: + """Convert AWSTree to list of SyncData elements.""" + return [member for iteration in self.iterations for member in iteration.members] diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index d2e7cec7..6c49494f 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -20,20 +20,6 @@ from projects.models import Project -class SyncDataTest(TestCase): - """Test SyncData class (struct).""" - - def setUp(self): - """setup test environment.""" - self.sync = awssync.SyncData - - def test_throw_type_error_SyncData_class(self): - """Test Type Error when equals is called on wrong type.""" - with self.assertRaises(TypeError) as context: - self.sync("", "", "") == [] - self.assertTrue("Must compare to object of type SyncData" in str(context.exception)) - - class AWSSyncTest(TestCase): """Test AWSSync class.""" @@ -901,38 +887,6 @@ def test_get_aws_data_no_slugs(self): self.assertTrue(self.sync.fail) -class AWSSyncListTest(TestCase): - """Test AWSSyncList class.""" - - def setUp(self): - self.sync = awssync.AWSSync() - self.syncData = awssync.SyncData - - self.test1 = self.syncData("test1@test1.test1", "test1", "test1") - self.test2 = self.syncData("test2@test2.test2", "test2", "test2") - self.test3 = self.syncData("test3@test3.test3", "test3", "test3") - - def test_AWS_sync_list_both_empty(self): - gip_list = [] - aws_list = [] - self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) - - def test_AWS_sync_list_empty_AWS(self): - gip_list = [self.test1, self.test2] - aws_list = [] - self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list) - - def test_AWS_sync_list_empty_GiP(self): - gip_list = [] - aws_list = [self.test1, self.test2] - self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) - - def test_AWS_sync_list_both_full(self): - gip_list = [self.test1, self.test2] - aws_list = [self.test2, self.test3] - self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1]) - - class AWSAPITalkerTest(TestCase): def mock_api(self, operation_name, kwarg): if operation_name == "CreateOrganization": @@ -1007,165 +961,3 @@ def mock_api(self, operation_name, kwarg): "create_policy", ) return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg) - - -class AWSTreeChecksTest(TestCase): - """Test checks done on AWSTree data struncture.""" - - def setUp(self): - self.sync = awssync.AWSSync() - self.awstree = awssync.AWSTree("Name", "1234", []) - self.iteration = awssync.Iteration("Name", "1234", []) - self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020") - - self.sync_list = [ - awssync.SyncData("email1@example.com", "Spring 2022", "Project A"), - awssync.SyncData("email2@example.com", "Fall 2022", "Project B"), - awssync.SyncData("email3@example.com", "Spring 2022", "Project C"), - ] - self.aws_list = [ - awssync.SyncData("email4@example.com", "Fall 2021", "Project D"), - awssync.SyncData("email5@example.com", "Spring 2022", "Project E"), - awssync.SyncData("email6@example.com", "Fall 2022", "Project F"), - ] - - self.treelist = [ - awssync.SyncData("email1@example.com", "project1", "Fall 2020"), - awssync.SyncData("email2@example.com", "project2", "Fall 2020"), - awssync.SyncData("email3@example.com", "project3", "Spring 2021"), - awssync.SyncData("email4@example.com", "project4", "Spring 2021"), - ] - - self.aws_tree1 = awssync.AWSTree( - "AWS Tree", - "12345", - [ - awssync.Iteration( - "Fall 2020", - "54321", - [ - awssync.SyncData("email1@example.com", "project1", "Fall 2020"), - awssync.SyncData("email2@example.com", "project2", "Fall 2020"), - ], - ), - awssync.Iteration( - "Spring 2021", - "98765", - [ - awssync.SyncData("email3@example.com", "project3", "Spring 2021"), - awssync.SyncData("email4@example.com", "project4", "Spring 2021"), - ], - ), - ], - ) - - self.aws_tree2 = awssync.AWSTree( - "AWS Tree", - "12345", - [ - awssync.Iteration( - "Fall 2020", - "54321", - [ - awssync.SyncData("email1@example.com", "project1", "Fall 2020"), - awssync.SyncData("email2@example.com", "project2", "Fall 2020"), - ], - ), - awssync.Iteration( - "Spring 2021", - "98765", - [ - awssync.SyncData("email3@example.com", "project3", "Fall 2021"), - awssync.SyncData("email4@example.com", "project4", "Spring 2021"), - ], - ), - ], - ) - - self.aws_tree3 = awssync.AWSTree( - "AWS Tree", - "12345", - [ - awssync.Iteration( - "Fall 2020", - "54321", - [ - awssync.SyncData("email1@example.com", "project1", "Fall 2020"), - awssync.SyncData("email2@example.com", "project2", "Fall 2020"), - ], - ), - awssync.Iteration( - "Fall 2020", - "98765", - [ - awssync.SyncData("email3@example.com", "project3", "Fall 2021"), - awssync.SyncData("email4@example.com", "project4", "Spring 2021"), - ], - ), - ], - ) - - def test_repr_AWSTree(self): - self.assertEquals(str(self.awstree), "AWSTree('Name', '1234', [])") - - def test_repr_Iteration(self): - self.assertEquals(str(self.iteration), "Iteration('Name', '1234', [])") - - def test_repr_SyncData(self): - self.assertEquals(str(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')") - - def test_awstree_to_syncdata_list(self): - self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist) - - def test_check_for_double_member_email(self): - # Test when there are no duplicate emails - self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) - - # Test when there is a duplicate email - self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G")) - self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) - - def test_check_current_ou_exists(self): - # Test when current semester OU does not exist - with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"): - self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022") - val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) - self.assertEqual((val1, val2), (False, None)) - - # Test when current semester OU exists - with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"): - self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021") - val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) - self.assertEqual((val1, val2), (True, "98765")) - - def test_check_members_in_correct_iteration(self): - # Test when correct - val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1) - self.assertEqual((val1, val2), (True, None)) - - # Test when incorrect - val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2) - self.assertEqual((val1, val2), (False, ["email3@example.com"])) - - def test_check_double_iteration_names(self): - # Test when correct - val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1) - self.assertEqual((val1, val2), (False, None)) - - # Test when double - val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3) - self.assertEqual((val1, val2), (True, ["Fall 2020"])) - - def test_AWSTree_equals(self): - self.assertEqual(self.aws_tree1, self.aws_tree1) - self.assertNotEqual(self.aws_tree1, self.aws_tree2) - with self.assertRaises(TypeError): - awssync.AWSTree("", "", []) == [] - self.assertRaises(TypeError) - - def test_Iteration_equals(self): - self.assertEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[0]) - self.assertNotEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[1]) - with self.assertRaises(TypeError): - awssync.Iteration("", "", []) == [] - self.assertRaises(TypeError) diff --git a/website/projects/tests/test_awssync_structs.py b/website/projects/tests/test_awssync_structs.py new file mode 100644 index 00000000..6f6b4fff --- /dev/null +++ b/website/projects/tests/test_awssync_structs.py @@ -0,0 +1,211 @@ +"""Tests for awssync_structs.py.""" + +from unittest.mock import patch + +from django.test import TestCase + +from courses.models import Semester + +from projects import awssync + + +class SyncDataTest(TestCase): + """Test SyncData class (struct).""" + + def setUp(self): + """setup test environment.""" + self.sync = awssync.SyncData + + def test_throw_type_error_SyncData_class(self): + """Test Type Error when equals is called on wrong type.""" + self.assertRaises(TypeError, self.sync("a", "b", "c").__eq__, 123) + + +class AWSSyncListTest(TestCase): + """Test AWSSyncList class.""" + + def setUp(self): + self.sync = awssync.AWSSync() + self.syncData = awssync.SyncData + + self.test1 = self.syncData("test1@test1.test1", "test1", "test1") + self.test2 = self.syncData("test2@test2.test2", "test2", "test2") + self.test3 = self.syncData("test3@test3.test3", "test3", "test3") + + def test_AWS_sync_list_both_empty(self): + gip_list = [] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_empty_AWS(self): + gip_list = [self.test1, self.test2] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list) + + def test_AWS_sync_list_empty_GiP(self): + gip_list = [] + aws_list = [self.test1, self.test2] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_both_full(self): + gip_list = [self.test1, self.test2] + aws_list = [self.test2, self.test3] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1]) + + +class AWSTreeChecksTest(TestCase): + """Test checks done on AWSTree data struncture.""" + + def setUp(self): + self.sync = awssync.AWSSync() + self.awstree = awssync.AWSTree("Name", "1234", []) + self.iteration = awssync.Iteration("Name", "1234", []) + self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020") + + self.sync_list = [ + awssync.SyncData("email1@example.com", "Spring 2022", "Project A"), + awssync.SyncData("email2@example.com", "Fall 2022", "Project B"), + awssync.SyncData("email3@example.com", "Spring 2022", "Project C"), + ] + self.aws_list = [ + awssync.SyncData("email4@example.com", "Fall 2021", "Project D"), + awssync.SyncData("email5@example.com", "Spring 2022", "Project E"), + awssync.SyncData("email6@example.com", "Fall 2022", "Project F"), + ] + + self.treelist = [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + awssync.SyncData("email3@example.com", "project3", "Spring 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ] + + self.aws_tree1 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Spring 2021", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Spring 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree2 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Spring 2021", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Fall 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree3 = awssync.AWSTree( + "AWS Tree", + "12345", + [ + awssync.Iteration( + "Fall 2020", + "54321", + [ + awssync.SyncData("email1@example.com", "project1", "Fall 2020"), + awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + awssync.Iteration( + "Fall 2020", + "98765", + [ + awssync.SyncData("email3@example.com", "project3", "Fall 2021"), + awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + def test_repr_AWSTree(self): + self.assertEquals(repr(self.awstree), "AWSTree('Name', '1234', [])") + + def test_repr_Iteration(self): + self.assertEquals(repr(self.iteration), "Iteration('Name', '1234', [])") + + def test_repr_SyncData(self): + self.assertEquals(repr(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')") + + def test_awstree_to_syncdata_list(self): + self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist) + + def test_check_for_double_member_email(self): + # Test when there are no duplicate emails + self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) + + # Test when there is a duplicate email + self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G")) + self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) + + def test_check_current_ou_exists(self): + # Test when current semester OU does not exist + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"): + self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022") + val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) + self.assertEqual((val1, val2), (False, None)) + + # Test when current semester OU exists + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"): + self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021") + val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) + self.assertEqual((val1, val2), (True, "98765")) + + def test_check_members_in_correct_iteration(self): + # Test when correct + val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1) + self.assertEqual((val1, val2), (True, None)) + + # Test when incorrect + val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2) + self.assertEqual((val1, val2), (False, ["email3@example.com"])) + + def test_check_double_iteration_names(self): + # Test when correct + val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1) + self.assertEqual((val1, val2), (False, None)) + + # Test when double + val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3) + self.assertEqual((val1, val2), (True, ["Fall 2020"])) + + def test_AWSTree_equals(self): + self.assertEqual(self.aws_tree1, self.aws_tree1) + self.assertNotEqual(self.aws_tree1, self.aws_tree2) + self.assertRaises(TypeError, awssync.AWSTree("", "", []).__eq__, []) + + def test_Iteration_equals(self): + self.assertEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[0]) + self.assertNotEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[1]) + self.assertRaises(TypeError, awssync.Iteration("", "", []).__eq__, []) From 0acac89f62ad7022dec8b648d6326d10e666c648 Mon Sep 17 00:00:00 2001 From: Henk Berendsen <61596108+hb140502@users.noreply.github.com> Date: Tue, 9 May 2023 10:17:35 +0200 Subject: [PATCH 19/32] 44 class for handling all aws api calls (#50) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Updated deliverable sprint 1 (#22) * AWS synchronisation button (#8) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Removed unnecessary docstrings. * Add boto3 and moto dependencies (#11) * Add logger and replace prints with logs * Add function to create AWS organization * Add unit tests for creating AWS organization * bugfix (#619) Co-authored-by: nvoers * Added logger setlevel (#20) --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com> Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com> Co-authored-by: nvoers Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com> * Sprint 2 deliverable (#43) * AWS synchronisation button (#8) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Removed unnecessary docstrings. * Add boto3 and moto dependencies (#11) * Add logger and replace prints with logs * Add function to create AWS organization * Add unit tests for creating AWS organization * bugfix (#619) Co-authored-by: nvoers * Added logger setlevel (#20) * Db sync (#16) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Added get mailinglist to the awssync file * Added first version of get_all_managers * Added test case for mailing lists * Removed some prints * reformatted using black * flake8 intentation added * flake8 intentation correction * Removed get manager * Linting added * unused import removed * Added get_teamid_from_email * Changed function email with teamid * Updated get_emails_with_teamids, working now * Added test for get_emails_with_ids * Added linting * linting * Added more test * Linting in awssync and its test file * Moved the imports around * moved the imports around * Black linting * switched imports around * Switched imports around part 2 * Switched imports around part 3 * Switched imports around part 4 * Fixed when no project exist for mailing list * Added some more tests * Removed exeption try expect * Black linting * Changed get_email_with_teamid to new format * Changed get_emails_with_teamids to go over project * Added tests for get_emails_with_teamids * changed info for get_emails_with_teamids --------- Co-authored-by: Henk * Db sync (#25) * Added button to /admin/projects/project page on website * Framework for AWS synchronisation button functionality * Linked synchronisation button to framework * Fixed import order to comply with flake8 * Added docstring to synchronise_to_AWS * Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed. * Added tests and changed awssync.py to allow for testing the button_pressed function. * Added get mailinglist to the awssync file * Added first version of get_all_managers * Added test case for mailing lists * Removed some prints * reformatted using black * flake8 intentation added * flake8 intentation correction * Removed get manager * Linting added * unused import removed * Added get_teamid_from_email * Changed function email with teamid * Updated get_emails_with_teamids, working now * Added test for get_emails_with_ids * Added linting * linting * Added more test * Linting in awssync and its test file * Moved the imports around * moved the imports around * Black linting * switched imports around * Switched imports around part 2 * Switched imports around part 3 * Switched imports around part 4 * Fixed when no project exist for mailing list * Added some more tests * Removed exeption try expect * Black linting * Changed get_email_with_teamid to new format * Changed get_emails_with_teamids to go over project * Added tests for get_emails_with_teamids * changed info for get_emails_with_teamids * Changed email data dict to struct * added test for TypeError exception for eq operator * resolved linting errors * changed comment to correct datatype * dramatically improved test class name --------- Co-authored-by: Henk Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com> Co-authored-by: Jer111 * Added function to generate which users have to be invited after the sync button is pressed (#23) * Added 'generate_aws_sync_list' function and tests * solved black errors * changed generate_aws_sync_list to use SyncData structure * Create and attach SCP policies (#29) * Add functions for creating and attaching SCP policies * Improve test cases * 12 moto helper (#36) * merged with development and added create_c_i_OU * Added some tests for create_c_i_OU * Added some tests for create_c_i_ou * Linting * Changed the mock_api call back to orginal * Added create_team_ou with tests * Fix problems with moto testing * Worked on tests and added apitalkerclass * Make test asserts more meaningful * black * Added tests for create_ou's without parts * Added one test that gets all children under OU * Fix linting * Changed return of response create team ou did not save the name of the team OU * Fix test create team OU * Resolved linting issues * Fix flake8 * remove create_team_ou --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: Fouad Lamsettef * Add checks for edge cases between AWS and Giphouse databases (#37) * added double user check (partly) * added some checks and made two new fancy classes for the storage of AWS tree dictionaries * added tests * added equals for AWSTree and Iteration objects * test stupid error * does it work now? * resolved merge conflicts with rebasing on development * cleaned up code based on pull request comments * Extraction of AWS data * Added function for extracting data * Added mock format * finished function (testing needed) * Linting fix * fix no return of tree * Fix AttributeError * Unwrap tuple instead of accessing by element to increase code readability * Fixed for new struct * Implementation bug fixes * added tests (not done) * Removed classes for merge * Added function for extracting data * Added mock format * finished function (testing needed) * Linting fix * fix no return of tree * Fix AttributeError * Unwrap tuple instead of accessing by element to increase code readability * Fixed for new struct * Implementation bug fixes * added tests (not done) * Linting fix * git fixes * Black fix * pydocstyle fix * Black fix again * removed flake8 'fix' * Final flake8 fix * Final final flake8 fix * spelling error fix --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> * AWS synchronization pipeline (and integration bug fixes) (#42) * Create and attach SCP policies (#29) * Add functions for creating and attaching SCP policies * Improve test cases * Add function template * Fix 'a/an' grammar mistake * Add pipeline preconditions * Add pipeline preconditions tests * Add checks for required API actions * Add test cases for checking required API actions * Added implementation of creating and attaching policy in the pipeline * Remove double API actions * Added implementation of creating and moving accounts in the pipeline. This should still be divided into smaller functions. * Increase code readability * Stop checking account request status after known failure * Fixed small typos and added account details to a debug message about account creation failure * Added tests for pipeline policy and fixed typos in debug messages. * Split creating and moving accounts into multiple functions, and handle exceptions * added update_course_itation_ou with tests * updated pipeline_update_current_course_iteration * Add test cases for creating and moving member accounts * Bug fixes for pipeline dependencies that arose from integration sprint 2 tasks * Revised pipeline policy function and corresponding tests so that it should not fail after first pipeline run * Change duplicate policy attachment to soft-fail; replace organization ID with root ID --------- Co-authored-by: Henk Co-authored-by: Jer111 --------- Co-authored-by: Henk Berendsen <61596108+hb140502@users.noreply.github.com> Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com> Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com> Co-authored-by: nvoers Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com> Co-authored-by: Henk Co-authored-by: mitchellboes <49476235+mitchellboes@users.noreply.github.com> Co-authored-by: Jer111 Co-authored-by: Fouad Lamsettef * Added class for API calls and preliminary version of the tests * Added final test for API talker class * Changed folder structure to organize AWS-related files * Fixed tests which failed due to not being mocked correctly * Removed aws files which have been moved to different folders * Fixed bug which caused mocking to not work * Added version of the api talker test which attempts to fix the error in test_simulate_principal_policy * Fix mocking issue * Added three remaining API calls and a test for one of them (list_tags_for_resource). Also added a function in the tests which creates an organization and returns its ID to clean up the other tests a bit. * Added function and test for list_roots API call * Fixed review comments * Fixed formatting issue in test_awssync.py and resolved review comment in test_awsapitalker.py --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com> Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com> Co-authored-by: nvoers Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com> Co-authored-by: Filip Łysak <92109241+FilipLysak001@users.noreply.github.com> Co-authored-by: mitchellboes <49476235+mitchellboes@users.noreply.github.com> Co-authored-by: Jer111 Co-authored-by: Fouad Lamsettef --- website/projects/aws/__init__.py | 0 website/projects/aws/awsapitalker.py | 162 ++++++++++++++ website/projects/tests/tests_aws/__init__.py | 0 .../tests/tests_aws/test_awsapitalker.py | 197 ++++++++++++++++++ 4 files changed, 359 insertions(+) create mode 100644 website/projects/aws/__init__.py create mode 100644 website/projects/aws/awsapitalker.py create mode 100644 website/projects/tests/tests_aws/__init__.py create mode 100644 website/projects/tests/tests_aws/test_awsapitalker.py diff --git a/website/projects/aws/__init__.py b/website/projects/aws/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/website/projects/aws/awsapitalker.py b/website/projects/aws/awsapitalker.py new file mode 100644 index 00000000..308a07da --- /dev/null +++ b/website/projects/aws/awsapitalker.py @@ -0,0 +1,162 @@ +import boto3 + +import botocore + + +class AWSAPITalker: + """Communicate with AWS API using boto3.""" + + def __init__(self): + """ + Initialize in order to communicate with the AWS API. + + First, initializes the boto3 clients which communicate with AWS. + Second, sets the maximum amount of elements to fit on one page of an AWS response. + """ + self.iam_client = boto3.client("iam") + self.org_client = boto3.client("organizations") + self.sts_client = boto3.client("sts") + + self.max_results = 20 + + def create_organization(self, feature_set: str) -> dict: + """ + Create an AWS organization. + + :param feature_set: enabled features in the organization (either 'ALL' or 'CONSOLIDATED BILLING'). + :return: dictionary containing information about the organization. + """ + return self.org_client.create_organization(FeatureSet=feature_set) + + def create_organizational_unit(self, parent_id: str, ou_name: str, tags: list[dict] = []) -> dict: + """ + Create an organizational unit. + + :param parent_id: the root/OU below which where the new OU will be created. + :param ou_name: the name of the new OU. + :param tags: tags (list of dictionaries containing the keys 'Key' and 'Value') to be attached to the account. + :return: dictionary containing information about the organizational unit. + """ + return self.org_client.create_organizational_unit(ParentId=parent_id, Name=ou_name, Tags=tags) + + def attach_policy(self, target_id: str, policy_id: str): + """ + Attach the specified policy to the specified target. + + :param target_id: ID of the target to which the policy should be attached. + :param policy_id: ID of the policy to attach. + """ + self.org_client.attach_policy(TargetId=target_id, PolicyId=policy_id) + + def get_caller_identity(self) -> dict: + """Get the identity of the caller of the API actions.""" + return self.sts_client.get_caller_identity() + + def simulate_principal_policy(self, policy_source_arn: str, action_names: list[str]) -> dict: + """ + Determine the effective permissions of the policies of an IAM entity by simulating API actions. + + :param policy_source: ARN of the IAM entity. + :param action_names: list of AWS API actions to simulate. + :return: dictionary containing information about the simulation's outcome. + """ + return self.iam_client.simulate_principal_policy(PolicySourceArn=policy_source_arn, ActionNames=action_names) + + def describe_organization(self) -> dict: + """Describe the AWS organization.""" + return self.org_client.describe_organization() + + def describe_policy(self, policy_id: str) -> dict: + """Describe the policy with the specified ID.""" + return self.org_client.describe_policy(PolicyId=policy_id) + + def create_account(self, email: str, account_name: str, tags: list[dict] = []) -> dict: + """ + Move an AWS account in the organization. + + :param email: email address of the account. + :param account_name: name of the account. + :param tags: tags (list of dictionaries containing the keys 'Key' and 'Value') to be attached to the account. + :return: dictionary containing information about the account creation status. + """ + return self.org_client.create_account( + Email=email, AccountName=account_name, IamUserAccessToBilling="DENY", Tags=tags + ) + + def move_account(self, account_id: str, source_parent_id: str, dest_parent_id: str): + """ + Move an AWS account in the organization. + + :param account_id: ID of the account. + :param source_parent_id: ID of the root/OU containing the account. + :param dest_parent_id: ID of the root/OU which the account should be moved to. + """ + self.org_client.move_account( + AccountId=account_id, SourceParentId=source_parent_id, DestinationParentId=dest_parent_id + ) + + def combine_pages(self, page_iterator: botocore.paginate.PageIterator, key: str) -> list[dict]: + """ + Combine the information on each page of an AWS API response into a list. + + This function is only used for AWS API operations which can return multiple pages as a response. + + :param page_iterator: boto3 feature which iterates over all pages. + :param key: the key corresponding to the list of values to be retrieved from each page. + :return: a list that combines the values from all pages. + """ + list = [] + + for page in page_iterator: + list = list + page[key] + + return list + + def list_organizational_units_for_parent(self, parent_id: str) -> list[dict]: + """ + List all organizational units below the specified parent. + + :param parent_id: ID of the parent. + :return: list of dictionaries containing organizational unit information. + """ + paginator = self.org_client.get_paginator("list_organizational_units_for_parent") + page_iterator = paginator.paginate(ParentId=parent_id, MaxResults=self.max_results) + + return self.combine_pages(page_iterator, "OrganizationalUnits") + + def list_accounts_for_parent(self, parent_id: str) -> list[dict]: + """ + List all accounts below the specified parent. + + :param parent_id: ID of the parent. + :return: list of dictionaries containing account information + """ + paginator = self.org_client.get_paginator("list_accounts_for_parent") + page_iterator = paginator.paginate(ParentId=parent_id, MaxResults=self.max_results) + + return self.combine_pages(page_iterator, "Accounts") + + def list_tags_for_resource(self, resource_id: str) -> list[dict]: + """ + List all tags belonging to the specified resource. + + :param resource_id: ID of the resource. + :return: list of dictionaries containing tag information + """ + paginator = self.org_client.get_paginator("list_tags_for_resource") + page_iterator = paginator.paginate( + ResourceId=resource_id, + ) + + return self.combine_pages(page_iterator, "Tags") + + def list_roots(self) -> list[dict]: + """ + List all roots in the organization. + + :return: list of dictionaries containing root information. + """ + paginator = self.org_client.get_paginator("list_roots") + page_iterator = paginator.paginate() + + return self.combine_pages(page_iterator, "Roots") diff --git a/website/projects/tests/tests_aws/__init__.py b/website/projects/tests/tests_aws/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py new file mode 100644 index 00000000..5d6a1c9e --- /dev/null +++ b/website/projects/tests/tests_aws/test_awsapitalker.py @@ -0,0 +1,197 @@ +import json +from unittest.mock import MagicMock, patch + +import boto3 + +from django.test import TestCase + +from moto import mock_organizations, mock_sts + +from projects.aws import awsapitalker + + +class AWSAPITalkerTest(TestCase): + """Test AWSAPITalker class.""" + + def setUp(self): + """Set up testing environment.""" + self.mock_org = mock_organizations() + self.mock_sts = mock_sts() + self.mock_org.start() + self.mock_sts.start() + self.api_talker = awsapitalker.AWSAPITalker() + + def tearDown(self): + self.mock_org.stop() + self.mock_sts.stop() + + def create_organization(self): + """Returns the ID of the organization created for testing""" + org_info = self.api_talker.create_organization("ALL") + return org_info["Organization"]["Id"] + + def create_dummy_policy_content(self): + """Returns a string containing the content of a policy used for testing.""" + return json.dumps({"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}) + + def create_dummy_policy(self): + """ + Creates a policy used for testing. + + :return: ID of the created policy. + """ + moto_client = boto3.client("organizations") + + policy_content = self.create_dummy_policy_content() + + return moto_client.create_policy( + Name="Test policy", + Content=policy_content, + Type="SERVICE_CONTROL_POLICY", + Description="Policy for testing purposes", + )["Policy"]["PolicySummary"]["Id"] + + def test_create_organization(self): + response = self.api_talker.create_organization("ALL") + + self.assertEquals(response["Organization"]["FeatureSet"], "ALL") + + def test_create_organizational_unit(self): + org_id = self.create_organization() + + response = self.api_talker.create_organizational_unit(org_id, "Test OU") + + self.assertEqual(response["OrganizationalUnit"]["Name"], "Test OU") + + def test_attach_policy(self): + moto_client = boto3.client("organizations") + + org_id = self.create_organization() + + policy_id = self.create_dummy_policy() + + ou_info = self.api_talker.create_organizational_unit(org_id, "Test OU") + ou_id = ou_info["OrganizationalUnit"]["Id"] + + self.api_talker.attach_policy(ou_id, policy_id) + + response = moto_client.list_policies_for_target(TargetId=ou_id, Filter="SERVICE_CONTROL_POLICY") + self.assertIn(policy_id, [p["Id"] for p in response["Policies"]]) + + def test_get_caller_identity(self): + response = self.api_talker.get_caller_identity() + self.assertIsNotNone(response) + + def test_simulate_principal_policy(self): + arn = self.api_talker.get_caller_identity()["Arn"] + + with patch.object( + self.api_talker.iam_client, + "simulate_principal_policy", + MagicMock(return_value={"EvaluationResults": [{"EvalDecision": "allowed"}]}), + ): + eval_results = self.api_talker.simulate_principal_policy(arn, ["sts:SimulatePrincipalPolicy"])[ + "EvaluationResults" + ] + + self.assertEquals(eval_results[0]["EvalDecision"], "allowed") + + def test_describe_organization(self): + self.create_organization() + + response = self.api_talker.describe_organization() + + self.assertIn("Organization", response) + self.assertIn("MasterAccountId", response["Organization"]) + self.assertIn("MasterAccountEmail", response["Organization"]) + + def test_describe_policy(self): + self.create_organization() + + policy_id = self.create_dummy_policy() + + policy = self.api_talker.describe_policy(policy_id)["Policy"] + policy_summary = policy["PolicySummary"] + policy_content = self.create_dummy_policy_content() + + self.assertEquals(policy_summary["Name"], "Test policy") + self.assertEquals(policy_summary["Description"], "Policy for testing purposes") + self.assertEquals(policy_content, policy["Content"]) + + def test_create_account(self): + moto_client = boto3.client("organizations") + + self.create_organization() + + response = self.api_talker.create_account("test@example.com", "Test") + + accounts = moto_client.list_accounts()["Accounts"] + + self.assertEquals(response["CreateAccountStatus"]["AccountName"], "Test") + self.assertIn(("Test", "test@example.com"), [(account["Name"], account["Email"]) for account in accounts]) + + def test_move_account(self): + moto_client = boto3.client("organizations") + + org_id = self.create_organization() + + account_status = self.api_talker.create_account("test@example.com", "Test") + account_id = account_status["CreateAccountStatus"]["AccountId"] + + source_ou_info = self.api_talker.create_organizational_unit(org_id, "Source OU") + source_ou_id = source_ou_info["OrganizationalUnit"]["Id"] + dest_ou_info = self.api_talker.create_organizational_unit(org_id, "Destination OU") + dest_ou_id = dest_ou_info["OrganizationalUnit"]["Id"] + + self.api_talker.move_account(account_id, source_ou_id, dest_ou_id) + + accounts_under_source = moto_client.list_children(ParentId=source_ou_id, ChildType="ACCOUNT")["Children"] + accounts_under_dest = moto_client.list_children(ParentId=dest_ou_id, ChildType="ACCOUNT")["Children"] + self.assertNotIn(account_id, [account["Id"] for account in accounts_under_source]) + self.assertIn(account_id, [account["Id"] for account in accounts_under_dest]) + + def test_list_organizational_units_for_parent(self): + self.create_organization() + + root_id = self.api_talker.list_roots()[0]["Id"] + + ou_1 = self.api_talker.create_organizational_unit(root_id, "Test OU 1")["OrganizationalUnit"] + ou_2 = self.api_talker.create_organizational_unit(root_id, "Test OU 2")["OrganizationalUnit"] + + received_ou_list = self.api_talker.list_organizational_units_for_parent(root_id) + + self.assertCountEqual([ou_1, ou_2], received_ou_list) + + def test_list_accounts_for_parent(self): + self.create_organization() + + self.api_talker.create_account("test1@example.com", "Test Account 1") + self.api_talker.create_account("test2@example.com", "Test Account 2") + + root_id = self.api_talker.list_roots()[0]["Id"] + + received_accounts = self.api_talker.list_accounts_for_parent(root_id) + received_emails = [account["Email"] for account in received_accounts] + + expected_emails = ["master@example.com", "test1@example.com", "test2@example.com"] + + self.assertEqual(expected_emails, received_emails) + + def test_list_tags_for_resource(self): + org_id = self.create_organization() + + specified_tags = [{"Key": "key1", "Value": "val1"}, {"Key": "key2", "Value": "val2"}] + + response = self.api_talker.create_organizational_unit(org_id, "Test OU", specified_tags) + ou_id = response["OrganizationalUnit"]["Id"] + + received_tags = self.api_talker.list_tags_for_resource(ou_id) + + self.assertEqual(specified_tags, received_tags) + + def test_list_roots(self): + self.create_organization() + + roots = self.api_talker.list_roots() + + self.assertTrue(len(roots) == 1) From d87c28855530f2026fa369f4291861cfafd9b3b9 Mon Sep 17 00:00:00 2001 From: mitchellboes <49476235+mitchellboes@users.noreply.github.com> Date: Tue, 9 May 2023 11:39:53 +0200 Subject: [PATCH 20/32] Merge #41 (check double iteration names, members in correct iteration) into development * refactored only the necessary checks to other files, as well as their tests. * made changes based on pull request --- website/projects/awssync.py | 40 +------- website/projects/awssync_checks.py | 32 +++++++ website/projects/tests/test_awssync.py | 8 +- website/projects/tests/test_awssync_checks.py | 93 +++++++++++++++++++ .../projects/tests/test_awssync_structs.py | 18 ---- 5 files changed, 132 insertions(+), 59 deletions(-) create mode 100644 website/projects/awssync_checks.py create mode 100644 website/projects/tests/test_awssync_checks.py diff --git a/website/projects/awssync.py b/website/projects/awssync.py index 95562452..7221cf02 100644 --- a/website/projects/awssync.py +++ b/website/projects/awssync.py @@ -1,4 +1,5 @@ """Framework for synchronisation with Amazon Web Services (AWS).""" +from __future__ import annotations import json import logging @@ -13,6 +14,7 @@ from mailing_lists.models import MailingList +from projects.awssync_checks import Checks from projects.awssync_structs import AWSTree, Iteration, SyncData from projects.models import Project @@ -517,15 +519,9 @@ def pipeline(self): if self.check_for_double_member_email(aws_sync_data, merged_sync_data): return False - success, incorrect_emails = self.check_members_in_correct_iteration(aws_tree) - if not success: - self.logger.debug(f"Got incorrectly placed AWS member accounts: {incorrect_emails}.") - return False - - failure, double_iteration_names = self.check_double_iteration_names(aws_tree) - if failure: - self.logger.debug(f"Found double iteration names: {double_iteration_names}.") - return False + checker = Checks() + checker.check_members_in_correct_iteration(aws_tree) + checker.check_double_iteration_names(aws_tree) # Check/create course iteration OU. current_course_iteration_exists, response = self.pipeline_update_current_course_iteration_ou(aws_tree) @@ -575,32 +571,6 @@ def check_current_ou_exists(self, AWSdata: AWSTree): return (False, None) - def check_members_in_correct_iteration(self, AWSdata: AWSTree): - """Check if the data from the member tag matches the semester OU it is in.""" - incorrect_emails = [] - for iteration in AWSdata.iterations: - for member in iteration.members: - if member.project_semester != iteration.name: - incorrect_emails.append(member.project_email) - - if incorrect_emails != []: - return (False, incorrect_emails) - - return (True, None) - - def check_double_iteration_names(self, AWSdata: AWSTree): - """Check if there are multiple OU's with the same name in AWS.""" - names = [iteration.name for iteration in AWSdata.iterations] - doubles = [] - - for name in names: - if names.count(name) != 1 and name not in doubles: - doubles.append(name) - - if doubles != []: - return (True, doubles) - return (False, None) - def extract_aws_setup(self, parent_ou_id): """ Give a list of all the children of the parent OU. diff --git a/website/projects/awssync_checks.py b/website/projects/awssync_checks.py new file mode 100644 index 00000000..7c28f02d --- /dev/null +++ b/website/projects/awssync_checks.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from projects.awssync_structs import AWSTree + + +class Checks: + """Class for pipeline checks.""" + + def check_members_in_correct_iteration(self, AWSdata: AWSTree) -> None: + """Check if the data from the member tag matches the semester OU it is in.""" + emails_inconsistent_accounts = [ + member.project_email + for iteration in AWSdata.iterations + for member in iteration.members + if member.project_semester != iteration.name + ] + + if emails_inconsistent_accounts: + raise Exception( + f"There are members in a course iteration OU with an inconsistent course iteration tag.\ + Inconsistent names are {emails_inconsistent_accounts}" + ) + + def check_double_iteration_names(self, AWSdata: AWSTree) -> None: + """Check if there are multiple OU's with the same name in AWS.""" + names = [iteration.name for iteration in AWSdata.iterations] + duplicates = [iteration_name for iteration_name in set(names) if names.count(iteration_name) > 1] + + if duplicates: + raise Exception( + f"There are multiple course iteration OUs with the same name. Duplicates are: {duplicates}" + ) diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py index 6c49494f..0f493b77 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/test_awssync.py @@ -618,9 +618,7 @@ def test_pipeline__edge_case_incorrectly_placed(self): self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): - success = self.sync.pipeline() - - self.assertFalse(success) + self.assertRaises(Exception, self.sync.pipeline) def test_pipeline__edge_case_double_iteration_names(self): moto_client = boto3.client("organizations") @@ -643,9 +641,7 @@ def test_pipeline__edge_case_double_iteration_names(self): self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): - success = self.sync.pipeline() - - self.assertFalse(success) + self.assertRaises(Exception, self.sync.pipeline) def test_pipeline__failed_creating_iteration_ou(self): moto_client = boto3.client("organizations") diff --git a/website/projects/tests/test_awssync_checks.py b/website/projects/tests/test_awssync_checks.py new file mode 100644 index 00000000..6e40fc70 --- /dev/null +++ b/website/projects/tests/test_awssync_checks.py @@ -0,0 +1,93 @@ +"""Tests for awssync/checks.py.""" + +from django.test import TestCase + +from projects.awssync_checks import Checks +from projects.awssync_structs import AWSTree, Iteration, SyncData + + +class ChecksTest(TestCase): + def setUp(self): + self.checks = Checks() + self.aws_tree1 = AWSTree( + "AWS Tree", + "12345", + [ + Iteration( + "Fall 2020", + "54321", + [ + SyncData("email1@example.com", "project1", "Fall 2020"), + SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + Iteration( + "Spring 2021", + "98765", + [ + SyncData("email3@example.com", "project3", "Spring 2021"), + SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree2 = AWSTree( + "AWS Tree", + "12345", + [ + Iteration( + "Fall 2020", + "54321", + [ + SyncData("email1@example.com", "project1", "Fall 2020"), + SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + Iteration( + "Spring 2021", + "98765", + [ + SyncData("email3@example.com", "project3", "Fall 2021"), + SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree3 = AWSTree( + "AWS Tree", + "12345", + [ + Iteration( + "Fall 2020", + "54321", + [ + SyncData("email1@example.com", "project1", "Fall 2020"), + SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + Iteration( + "Fall 2020", + "98765", + [ + SyncData("email3@example.com", "project3", "Fall 2021"), + SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + def test_check_members_in_correct_iteration(self): + # Test when correct + self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1)) + + # Test when incorrect + self.assertRaises(Exception, self.checks.check_members_in_correct_iteration, self.aws_tree2) + + def test_check_double_iteration_names(self): + # Test when correct + self.assertIsNone(self.checks.check_double_iteration_names(self.aws_tree1)) + + # Test when double + self.assertRaises(Exception, self.checks.check_double_iteration_names, self.aws_tree3) diff --git a/website/projects/tests/test_awssync_structs.py b/website/projects/tests/test_awssync_structs.py index 6f6b4fff..8b27840a 100644 --- a/website/projects/tests/test_awssync_structs.py +++ b/website/projects/tests/test_awssync_structs.py @@ -182,24 +182,6 @@ def test_check_current_ou_exists(self): val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) self.assertEqual((val1, val2), (True, "98765")) - def test_check_members_in_correct_iteration(self): - # Test when correct - val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1) - self.assertEqual((val1, val2), (True, None)) - - # Test when incorrect - val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2) - self.assertEqual((val1, val2), (False, ["email3@example.com"])) - - def test_check_double_iteration_names(self): - # Test when correct - val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1) - self.assertEqual((val1, val2), (False, None)) - - # Test when double - val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3) - self.assertEqual((val1, val2), (True, ["Fall 2020"])) - def test_AWSTree_equals(self): self.assertEqual(self.aws_tree1, self.aws_tree1) self.assertNotEqual(self.aws_tree1, self.aws_tree2) From a4dfc69fa6400bf12c6d6bc037a5bfdba055274e Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Wed, 10 May 2023 12:18:51 +0000 Subject: [PATCH 21/32] Refactor pipeline preconditions (#54) * Move all AWS files to dedicated AWS directory * Add refactored pipeline preconditions * Add coverage unit tests --- website/projects/admin.py | 2 +- website/projects/{ => aws}/awssync.py | 4 +- website/projects/aws/awssync_checks.py | 110 +++++++++++ .../aws/awssync_checks_permissions.py | 57 ++++++ website/projects/{ => aws}/awssync_structs.py | 0 website/projects/awssync_checks.py | 32 --- website/projects/tests/test_awssync_checks.py | 93 --------- .../tests/{ => tests_aws}/test_awssync.py | 20 +- .../tests/tests_aws/test_awssync_checks.py | 183 ++++++++++++++++++ .../{ => tests_aws}/test_awssync_structs.py | 2 +- 10 files changed, 364 insertions(+), 139 deletions(-) rename website/projects/{ => aws}/awssync.py (99%) create mode 100644 website/projects/aws/awssync_checks.py create mode 100644 website/projects/aws/awssync_checks_permissions.py rename website/projects/{ => aws}/awssync_structs.py (100%) delete mode 100644 website/projects/awssync_checks.py delete mode 100644 website/projects/tests/test_awssync_checks.py rename website/projects/tests/{ => tests_aws}/test_awssync.py (98%) create mode 100644 website/projects/tests/tests_aws/test_awssync_checks.py rename website/projects/tests/{ => tests_aws}/test_awssync_structs.py (99%) diff --git a/website/projects/admin.py b/website/projects/admin.py index 7fae8a6d..1cc2e032 100644 --- a/website/projects/admin.py +++ b/website/projects/admin.py @@ -12,7 +12,7 @@ from mailing_lists.models import MailingList -from projects.awssync import AWSSync +from projects.aws.awssync import AWSSync from projects.forms import ProjectAdminForm, RepositoryInlineForm from projects.githubsync import GitHubSync from projects.models import Client, Project, Repository diff --git a/website/projects/awssync.py b/website/projects/aws/awssync.py similarity index 99% rename from website/projects/awssync.py rename to website/projects/aws/awssync.py index 7221cf02..45a2a301 100644 --- a/website/projects/awssync.py +++ b/website/projects/aws/awssync.py @@ -14,8 +14,8 @@ from mailing_lists.models import MailingList -from projects.awssync_checks import Checks -from projects.awssync_structs import AWSTree, Iteration, SyncData +from projects.aws.awssync_checks import Checks +from projects.aws.awssync_structs import AWSTree, Iteration, SyncData from projects.models import Project diff --git a/website/projects/aws/awssync_checks.py b/website/projects/aws/awssync_checks.py new file mode 100644 index 00000000..3c7c0e4b --- /dev/null +++ b/website/projects/aws/awssync_checks.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +import logging + +from projects.aws.awsapitalker import AWSAPITalker +from projects.aws.awssync_structs import AWSTree + + +class Checks: + """Class for pipeline checks.""" + + def __init__(self): + """Initialize an instance with an AWSAPITalker and a logger.""" + self.api_talker = AWSAPITalker() + self.logger = logging.getLogger("django.aws") + + def check_members_in_correct_iteration(self, AWSdata: AWSTree) -> None: + """Check if the data from the member tag matches the semester OU it is in.""" + emails_inconsistent_accounts = [ + member.project_email + for iteration in AWSdata.iterations + for member in iteration.members + if member.project_semester != iteration.name + ] + + if emails_inconsistent_accounts: + raise Exception( + f"There are members in a course iteration OU with an inconsistent course iteration tag.\ + Inconsistent names are {emails_inconsistent_accounts}" + ) + + def check_double_iteration_names(self, AWSdata: AWSTree) -> None: + """Check if there are multiple OU's with the same name in AWS.""" + names = [iteration.name for iteration in AWSdata.iterations] + duplicates = [iteration_name for iteration_name in set(names) if names.count(iteration_name) > 1] + + if duplicates: + raise Exception( + f"There are multiple course iteration OUs with the same name. Duplicates are: {duplicates}" + ) + + def check_aws_api_connection(self) -> None: + """Check AWS API connection establishment with current boto3 credentials.""" + self.api_talker.get_caller_identity() + + def check_iam_policy(self, desired_actions: list[str]) -> None: + """Check permissions for list of AWS API actions.""" + iam_user_arn = self.api_talker.get_caller_identity()["Arn"] + policy_evaluations = self.api_talker.simulate_principal_policy(iam_user_arn, desired_actions) + + denied_api_actions = [ + evaluation_result["EvalActionName"] + for evaluation_result in policy_evaluations["EvaluationResults"] + if evaluation_result["EvalDecision"] != "allowed" + ] + + if denied_api_actions: + raise Exception(f"Some AWS API actions have been denied: {denied_api_actions}.") + + def check_organization_existence(self) -> None: + """Check existence AWS organization.""" + self.api_talker.describe_organization() + + def check_is_management_account(self) -> None: + """Check if AWS API caller has same effective account ID as the organization's management account.""" + organization_info = self.api_talker.describe_organization() + iam_user_info = self.api_talker.get_caller_identity() + + management_account_id = organization_info["Organization"]["MasterAccountId"] + api_caller_account_id = iam_user_info["Account"] + is_management_account = management_account_id == api_caller_account_id + + if not is_management_account: + raise Exception("AWS API caller and organization's management account have different account IDs.") + + def check_scp_enabled(self) -> None: + """Check if SCP policy type feature is enabled for the AWS organization.""" + organization_info = self.api_talker.describe_organization() + available_policy_types = organization_info["Organization"]["AvailablePolicyTypes"] + + scp_is_enabled = any( + policy["Type"] == "SERVICE_CONTROL_POLICY" and policy["Status"] == "ENABLED" + for policy in available_policy_types + ) + + if not scp_is_enabled: + raise Exception("The SCP policy type is disabled for the organization.") + + def pipeline_preconditions(self, api_permissions: list[str]) -> None: + """ + Check all crucial pipeline preconditions. Raises exception prematurely on failure. + + Preconditions: + 1. Locatable boto3 credentials and successful AWS API connection + 2. Check allowed AWS API actions based on IAM policy of caller + 3. Existing organization for AWS API caller + 4. AWS API caller acts under same account ID as organization's management account ID + 5. SCP policy type feature enabled for organization + """ + preconditions = [ + (self.check_aws_api_connection, (), "AWS API connection established"), + (self.check_iam_policy, (api_permissions,), "AWS API actions permissions"), + (self.check_organization_existence, (), "AWS organization existence"), + (self.check_is_management_account, (), "AWS API caller is management account"), + (self.check_scp_enabled, (), "SCP enabled"), + ] + + for precondition, args, description in preconditions: + precondition(*args) + self.logger.info(f"Pipeline precondition success: {description}.") diff --git a/website/projects/aws/awssync_checks_permissions.py b/website/projects/aws/awssync_checks_permissions.py new file mode 100644 index 00000000..6028a04a --- /dev/null +++ b/website/projects/aws/awssync_checks_permissions.py @@ -0,0 +1,57 @@ +api_permissions = [ + # "organizations:AcceptHandshake", + "organizations:AttachPolicy", + # "organizations:CancelHandshake", + # "organizations:CloseAccount", + "organizations:CreateAccount", + # "organizations:CreateGovCloudAccount", + "organizations:CreateOrganization", + "organizations:CreateOrganizationalUnit", + "organizations:CreatePolicy", + # "organizations:DeclineHandshake", + # "organizations:DeleteOrganization", + "organizations:DeleteOrganizationalUnit", + "organizations:DeletePolicy", + "organizations:DeleteResourcePolicy", + # "organizations:DeregisterDelegatedAdministrator", + "organizations:DescribeAccount", + "organizations:DescribeCreateAccountStatus", + "organizations:DescribeEffectivePolicy", + # "organizations:DescribeHandshake", + "organizations:DescribeOrganization", + "organizations:DescribeOrganizationalUnit", + "organizations:DescribePolicy", + "organizations:DescribeResourcePolicy", + "organizations:DetachPolicy", + # "organizations:DisableAWSServiceAccess", + "organizations:DisablePolicyType", + # "organizations:EnableAWSServiceAccess", + # "organizations:EnableAllFeatures", + "organizations:EnablePolicyType", + # "organizations:InviteAccountToOrganization", + # "organizations:LeaveOrganization", + # "organizations:ListAWSServiceAccessForOrganization", + "organizations:ListAccounts", + "organizations:ListAccountsForParent", + "organizations:ListChildren", + "organizations:ListCreateAccountStatus", + # "organizations:ListDelegatedAdministrators", + # "organizations:ListDelegatedServicesForAccount", + # "organizations:ListHandshakesForAccount", + # "organizations:ListHandshakesForOrganization", + "organizations:ListOrganizationalUnitsForParent", + "organizations:ListParents", + "organizations:ListPolicies", + "organizations:ListPoliciesForTarget", + "organizations:ListRoots", + "organizations:ListTagsForResource", + "organizations:ListTargetsForPolicy", + "organizations:MoveAccount", + "organizations:PutResourcePolicy", + # "organizations:RegisterDelegatedAdministrator", + # "organizations:RemoveAccountFromOrganization", + "organizations:TagResource", + "organizations:UntagResource", + "organizations:UpdateOrganizationalUnit", + "organizations:UpdatePolicy", +] diff --git a/website/projects/awssync_structs.py b/website/projects/aws/awssync_structs.py similarity index 100% rename from website/projects/awssync_structs.py rename to website/projects/aws/awssync_structs.py diff --git a/website/projects/awssync_checks.py b/website/projects/awssync_checks.py deleted file mode 100644 index 7c28f02d..00000000 --- a/website/projects/awssync_checks.py +++ /dev/null @@ -1,32 +0,0 @@ -from __future__ import annotations - -from projects.awssync_structs import AWSTree - - -class Checks: - """Class for pipeline checks.""" - - def check_members_in_correct_iteration(self, AWSdata: AWSTree) -> None: - """Check if the data from the member tag matches the semester OU it is in.""" - emails_inconsistent_accounts = [ - member.project_email - for iteration in AWSdata.iterations - for member in iteration.members - if member.project_semester != iteration.name - ] - - if emails_inconsistent_accounts: - raise Exception( - f"There are members in a course iteration OU with an inconsistent course iteration tag.\ - Inconsistent names are {emails_inconsistent_accounts}" - ) - - def check_double_iteration_names(self, AWSdata: AWSTree) -> None: - """Check if there are multiple OU's with the same name in AWS.""" - names = [iteration.name for iteration in AWSdata.iterations] - duplicates = [iteration_name for iteration_name in set(names) if names.count(iteration_name) > 1] - - if duplicates: - raise Exception( - f"There are multiple course iteration OUs with the same name. Duplicates are: {duplicates}" - ) diff --git a/website/projects/tests/test_awssync_checks.py b/website/projects/tests/test_awssync_checks.py deleted file mode 100644 index 6e40fc70..00000000 --- a/website/projects/tests/test_awssync_checks.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Tests for awssync/checks.py.""" - -from django.test import TestCase - -from projects.awssync_checks import Checks -from projects.awssync_structs import AWSTree, Iteration, SyncData - - -class ChecksTest(TestCase): - def setUp(self): - self.checks = Checks() - self.aws_tree1 = AWSTree( - "AWS Tree", - "12345", - [ - Iteration( - "Fall 2020", - "54321", - [ - SyncData("email1@example.com", "project1", "Fall 2020"), - SyncData("email2@example.com", "project2", "Fall 2020"), - ], - ), - Iteration( - "Spring 2021", - "98765", - [ - SyncData("email3@example.com", "project3", "Spring 2021"), - SyncData("email4@example.com", "project4", "Spring 2021"), - ], - ), - ], - ) - - self.aws_tree2 = AWSTree( - "AWS Tree", - "12345", - [ - Iteration( - "Fall 2020", - "54321", - [ - SyncData("email1@example.com", "project1", "Fall 2020"), - SyncData("email2@example.com", "project2", "Fall 2020"), - ], - ), - Iteration( - "Spring 2021", - "98765", - [ - SyncData("email3@example.com", "project3", "Fall 2021"), - SyncData("email4@example.com", "project4", "Spring 2021"), - ], - ), - ], - ) - - self.aws_tree3 = AWSTree( - "AWS Tree", - "12345", - [ - Iteration( - "Fall 2020", - "54321", - [ - SyncData("email1@example.com", "project1", "Fall 2020"), - SyncData("email2@example.com", "project2", "Fall 2020"), - ], - ), - Iteration( - "Fall 2020", - "98765", - [ - SyncData("email3@example.com", "project3", "Fall 2021"), - SyncData("email4@example.com", "project4", "Spring 2021"), - ], - ), - ], - ) - - def test_check_members_in_correct_iteration(self): - # Test when correct - self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1)) - - # Test when incorrect - self.assertRaises(Exception, self.checks.check_members_in_correct_iteration, self.aws_tree2) - - def test_check_double_iteration_names(self): - # Test when correct - self.assertIsNone(self.checks.check_double_iteration_names(self.aws_tree1)) - - # Test when double - self.assertRaises(Exception, self.checks.check_double_iteration_names, self.aws_tree3) diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/tests_aws/test_awssync.py similarity index 98% rename from website/projects/tests/test_awssync.py rename to website/projects/tests/tests_aws/test_awssync.py index 0f493b77..64f6938f 100644 --- a/website/projects/tests/test_awssync.py +++ b/website/projects/tests/tests_aws/test_awssync.py @@ -16,7 +16,7 @@ from mailing_lists.models import MailingList -from projects import awssync +from projects.aws import awssync from projects.models import Project @@ -315,7 +315,7 @@ def test_pipeline_preconditions__all_success(self): check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) # Mock return value of check_iam_policy. - with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker: mocker.return_value = check_iam_policy success = self.sync.pipeline_preconditions() @@ -350,7 +350,7 @@ def test_pipeline_preconditions__no_iam(self): check_api_actions = self.sync.check_iam_policy(iam_user_arn, desired_actions) # Mock return value of check_iam_policy. - with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker: mocker.return_value = check_api_actions success = self.sync.pipeline_preconditions() @@ -377,7 +377,7 @@ def test_pipeline_preconditions__no_organization(self): check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) # Mock return value of check_iam_policy. - with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker: mocker.return_value = check_iam_policy success = self.sync.pipeline_preconditions() @@ -407,9 +407,9 @@ def test_pipeline_preconditions__no_management(self): check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) # Mock return value of check_iam_policy. - with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam: + with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker_iam: mocker_iam.return_value = check_iam_policy - with patch("projects.awssync.AWSSync.check_aws_api_connection") as mocker_api: + with patch("projects.aws.awssync.AWSSync.check_aws_api_connection") as mocker_api: mocker_api.return_value = True, {"Account": "daddy", "Arn": "01234567890123456789"} success = self.sync.pipeline_preconditions() @@ -440,12 +440,12 @@ def test_pipeline_preconditions__no_scp(self): check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) # Mock return value of check_iam_policy. - with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam: + with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker_iam: mocker_iam.return_value = check_iam_policy # Mock return value of check_organization_existence with no SCP policy enabled. organization_info["AvailablePolicyTypes"] = [] - with patch("projects.awssync.AWSSync.check_organization_existence") as mocker: + with patch("projects.aws.awssync.AWSSync.check_organization_existence") as mocker: mocker.return_value = True, organization_info success = self.sync.pipeline_preconditions() @@ -548,7 +548,7 @@ def test_pipeline(self): mocker().simulate_principal_policy.return_value = mock_evaluation_results check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) - with patch("projects.awssync.AWSSync.check_iam_policy") as mocker: + with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker: mocker.return_value = check_iam_policy success = self.sync.pipeline() @@ -815,7 +815,7 @@ def test_pipeline_create_and_move_accounts__email_exists(self): root_id = moto_client.list_roots()["Roots"][0]["Id"] course_iteration_id = self.sync.create_course_iteration_OU("2023Fall") - with patch("projects.awssync.AWSSync.pipeline_create_account") as mocker: + with patch("projects.aws.awssync.AWSSync.pipeline_create_account") as mocker: mocker.return_value = False, "EMAIL_ALREADY_EXISTS" success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) diff --git a/website/projects/tests/tests_aws/test_awssync_checks.py b/website/projects/tests/tests_aws/test_awssync_checks.py new file mode 100644 index 00000000..6a141759 --- /dev/null +++ b/website/projects/tests/tests_aws/test_awssync_checks.py @@ -0,0 +1,183 @@ +"""Tests for awssync/checks.py.""" +from unittest.mock import MagicMock + +from botocore.exceptions import ClientError + +from django.test import TestCase + +from moto import mock_iam, mock_organizations, mock_sts + +from projects.aws.awssync_checks import Checks +from projects.aws.awssync_checks_permissions import api_permissions +from projects.aws.awssync_structs import AWSTree, Iteration, SyncData + + +@mock_sts +@mock_organizations +@mock_iam +class ChecksTest(TestCase): + def setUp(self): + self.checks = Checks() + self.aws_tree1 = AWSTree( + "AWS Tree", + "12345", + [ + Iteration( + "Fall 2020", + "54321", + [ + SyncData("email1@example.com", "project1", "Fall 2020"), + SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + Iteration( + "Spring 2021", + "98765", + [ + SyncData("email3@example.com", "project3", "Spring 2021"), + SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree2 = AWSTree( + "AWS Tree", + "12345", + [ + Iteration( + "Fall 2020", + "54321", + [ + SyncData("email1@example.com", "project1", "Fall 2020"), + SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + Iteration( + "Spring 2021", + "98765", + [ + SyncData("email3@example.com", "project3", "Fall 2021"), + SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + self.aws_tree3 = AWSTree( + "AWS Tree", + "12345", + [ + Iteration( + "Fall 2020", + "54321", + [ + SyncData("email1@example.com", "project1", "Fall 2020"), + SyncData("email2@example.com", "project2", "Fall 2020"), + ], + ), + Iteration( + "Fall 2020", + "98765", + [ + SyncData("email3@example.com", "project3", "Fall 2021"), + SyncData("email4@example.com", "project4", "Spring 2021"), + ], + ), + ], + ) + + def test_check_members_in_correct_iteration(self): + # Test when correct + self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1)) + + # Test when incorrect + self.assertRaises(Exception, self.checks.check_members_in_correct_iteration, self.aws_tree2) + + def test_check_double_iteration_names(self): + # Test when correct + self.assertIsNone(self.checks.check_double_iteration_names(self.aws_tree1)) + + # Test when double + self.assertRaises(Exception, self.checks.check_double_iteration_names, self.aws_tree3) + + def mock_simulate_principal_policy(self, allow: bool, api_operations: list[str]): + return MagicMock( + return_value={ + "EvaluationResults": [ + {"EvalActionName": api_operation_name, "EvalDecision": "allowed" if allow else "implicitDeny"} + for api_operation_name in api_operations + ] + } + ) + + def test_check_aws_api_connection(self): + self.checks.check_aws_api_connection() + + def test_check_iam_policy(self): + self.checks.api_talker.iam_client.simulate_principal_policy = self.mock_simulate_principal_policy( + True, api_permissions + ) + self.checks.check_iam_policy(api_permissions) + + def test_check_iam_policy__exception(self): + self.checks.api_talker.iam_client.simulate_principal_policy = self.mock_simulate_principal_policy( + False, api_permissions + ) + self.assertRaises(Exception, self.checks.check_iam_policy, api_permissions) + + def test_check_organization_existence(self): + self.checks.api_talker.create_organization("ALL") + self.checks.check_organization_existence() + + def test_check_organization_existence__exception(self): + self.assertRaises(ClientError, self.checks.check_organization_existence) + + def test_check_is_management_account(self): + self.checks.api_talker.create_organization("ALL") + self.checks.check_is_management_account() + + def test_check_is_management_account__exception(self): + self.checks.api_talker.create_organization("ALL") + + mock_identity = self.checks.api_talker.sts_client.get_caller_identity() + mock_identity["Account"] = "alice123" + self.checks.api_talker.sts_client.get_caller_identity = MagicMock(return_value=mock_identity) + + self.assertRaises(Exception, self.checks.check_is_management_account) + + def test_check_scp_enabled(self): + self.checks.api_talker.create_organization("ALL") + + self.checks.api_talker.org_client.enable_policy_type( + RootId=self.checks.api_talker.list_roots()[0]["Id"], + PolicyType="SERVICE_CONTROL_POLICY", + ) + + self.checks.check_scp_enabled() + + def test_check_scp_enabled__exception(self): + self.checks.api_talker.create_organization("ALL") + + args = { + "RootId": self.checks.api_talker.list_roots()[0]["Id"], + "PolicyType": "SERVICE_CONTROL_POLICY", + } + + self.checks.api_talker.org_client.enable_policy_type(**args) + response = self.checks.api_talker.org_client.disable_policy_type(**args) + + mock_describe_organization = self.checks.api_talker.describe_organization() + mock_describe_organization["Organization"]["AvailablePolicyTypes"] = response["Root"]["PolicyTypes"] + self.checks.api_talker.org_client.describe_organization = MagicMock(return_value=mock_describe_organization) + + self.assertRaises(Exception, self.checks.check_scp_enabled) + + def test_pipeline_preconditions(self): + self.checks.api_talker.create_organization("ALL") + + self.checks.api_talker.iam_client.simulate_principal_policy = self.mock_simulate_principal_policy( + True, api_permissions + ) + + self.checks.pipeline_preconditions(api_permissions) diff --git a/website/projects/tests/test_awssync_structs.py b/website/projects/tests/tests_aws/test_awssync_structs.py similarity index 99% rename from website/projects/tests/test_awssync_structs.py rename to website/projects/tests/tests_aws/test_awssync_structs.py index 8b27840a..3ecb722c 100644 --- a/website/projects/tests/test_awssync_structs.py +++ b/website/projects/tests/tests_aws/test_awssync_structs.py @@ -6,7 +6,7 @@ from courses.models import Semester -from projects import awssync +from projects.aws import awssync class SyncDataTest(TestCase): From f9187c7567fda2ec117aadebd2540085cf8651ea Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Thu, 11 May 2023 10:40:51 +0000 Subject: [PATCH 22/32] Refactor creating course OU and attaching policy (#57) * Add new refactored AWSSync class * Refactored creating course OU and attaching policy * Add coverage unit tests * Add additional OU name check to unit test * Replace deprecated unittest method alias * Improve naming coverage unit tests * Fix test to check name and ID for single OU instead of over possibly multiple OUs * Remove not used logger * Rename function to be more accurate in what it does --- website/projects/aws/awssync_refactored.py | 36 ++++++++ .../tests_aws/test_awssync_refactored.py | 84 +++++++++++++++++++ 2 files changed, 120 insertions(+) create mode 100644 website/projects/aws/awssync_refactored.py create mode 100644 website/projects/tests/tests_aws/test_awssync_refactored.py diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py new file mode 100644 index 00000000..2f843eff --- /dev/null +++ b/website/projects/aws/awssync_refactored.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from botocore.exceptions import ClientError + +from courses.models import Semester + +from projects.aws.awsapitalker import AWSAPITalker +from projects.aws.awssync_structs import AWSTree + + +class AWSSyncRefactored: + """Synchronise with Amazon Web Services.""" + + def __init__(self): + """Create an AWSSync instance.""" + self.api_talker = AWSAPITalker() + + def get_or_create_course_ou(self, tree: AWSTree) -> str: + """Create organizational unit under root with name of current semester.""" + root_id = tree.ou_id + course_ou_name = str(Semester.objects.get_or_create_current_semester()) + course_ou_id = next((ou.ou_id for ou in tree.iterations if ou.name == course_ou_name), None) + + if not course_ou_id: + course_ou = self.api_talker.create_organizational_unit(root_id, course_ou_name) + course_ou_id = course_ou["OrganizationalUnit"]["Id"] + + return course_ou_id + + def attach_policy(self, target_id: str, policy_id: str) -> None: + """Attach policy to target resource.""" + try: + self.api_talker.attach_policy(target_id, policy_id) + except ClientError as error: + if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": + raise diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py new file mode 100644 index 00000000..abf026b0 --- /dev/null +++ b/website/projects/tests/tests_aws/test_awssync_refactored.py @@ -0,0 +1,84 @@ +"""Tests for awssync_refactored.py.""" +import json +from unittest.mock import patch + +from botocore.exceptions import ClientError + +from django.test import TestCase + +from moto import mock_organizations + +from courses.models import Semester + +from projects.aws.awssync_refactored import AWSSyncRefactored +from projects.aws.awssync_structs import AWSTree, Iteration, SyncData + + +@mock_organizations +class AWSSyncRefactoredTest(TestCase): + def setUp(self): + self.sync = AWSSyncRefactored() + + def test_get_or_create_course_ou__new(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] + tree = AWSTree("root", root_id, []) + current_semester_name = "Spring 2023" + + with patch.object(Semester.objects, "get_or_create_current_semester", return_value=current_semester_name): + course_ou_id = self.sync.get_or_create_course_ou(tree) + + course_ou_exists = any( + ou["Id"] == course_ou_id and ou["Name"] == current_semester_name + for ou in self.sync.api_talker.list_organizational_units_for_parent(root_id) + ) + + self.assertTrue(course_ou_exists) + + def test_get_or_create_course_ou__already_exists(self): + tree = AWSTree( + "root", + "r-123", + [ + Iteration("Spring 2023", "ou-456", [SyncData("alice@giphouse.nl", "alices-project", "Spring 2023")]), + Iteration("Fall 2023", "ou-789", [SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023")]), + ], + ) + + with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): + course_ou_id = self.sync.get_or_create_course_ou(tree) + self.assertEqual("ou-456", course_ou_id) + + def test_attach_policy__not_attached(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] + + new_policy_content = json.dumps( + {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + ) + new_policy_id = self.sync.api_talker.org_client.create_policy( + Content=new_policy_content, Description="Deny all access.", Name="DenyAll", Type="SERVICE_CONTROL_POLICY" + )["Policy"]["PolicySummary"]["Id"] + + self.sync.attach_policy(root_id, new_policy_id) + attached_policies = self.sync.api_talker.org_client.list_policies_for_target( + TargetId=root_id, Filter="SERVICE_CONTROL_POLICY" + )["Policies"] + attached_policy_ids = [policy["Id"] for policy in attached_policies] + + self.assertIn(new_policy_id, attached_policy_ids) + + def test_attach_policy__caught_exception(self): + # Error code "DuplicatePolicyAttachmentException" can not be simulated by moto, so it is mocked. + attach_policy_hard_side_effect = ClientError( + {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy" + ) + with patch.object( + self.sync.api_talker.org_client, "attach_policy", side_effect=attach_policy_hard_side_effect + ): + return_value = self.sync.attach_policy("r-123", "p-123") + + self.assertIsNone(return_value) + + def test_attach_policy__reraised_exception(self): + self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123") From 32d823012a09023c8864f846e0d611e9f293ac91 Mon Sep 17 00:00:00 2001 From: Henk Berendsen <61596108+hb140502@users.noreply.github.com> Date: Fri, 12 May 2023 13:25:46 +0200 Subject: [PATCH 23/32] Remove unnecessary moto_client variables in test_awsapitalker.py (#56) * Changed moto_client into self.api_talker.org_client * Use class decorators for mocking instead of start and stop functions --- .../tests/tests_aws/test_awsapitalker.py | 34 ++++++------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py index 5d6a1c9e..9cf3301e 100644 --- a/website/projects/tests/tests_aws/test_awsapitalker.py +++ b/website/projects/tests/tests_aws/test_awsapitalker.py @@ -1,8 +1,6 @@ import json from unittest.mock import MagicMock, patch -import boto3 - from django.test import TestCase from moto import mock_organizations, mock_sts @@ -10,21 +8,15 @@ from projects.aws import awsapitalker +@mock_organizations +@mock_sts class AWSAPITalkerTest(TestCase): """Test AWSAPITalker class.""" def setUp(self): """Set up testing environment.""" - self.mock_org = mock_organizations() - self.mock_sts = mock_sts() - self.mock_org.start() - self.mock_sts.start() self.api_talker = awsapitalker.AWSAPITalker() - def tearDown(self): - self.mock_org.stop() - self.mock_sts.stop() - def create_organization(self): """Returns the ID of the organization created for testing""" org_info = self.api_talker.create_organization("ALL") @@ -40,11 +32,9 @@ def create_dummy_policy(self): :return: ID of the created policy. """ - moto_client = boto3.client("organizations") - policy_content = self.create_dummy_policy_content() - return moto_client.create_policy( + return self.api_talker.org_client.create_policy( Name="Test policy", Content=policy_content, Type="SERVICE_CONTROL_POLICY", @@ -64,8 +54,6 @@ def test_create_organizational_unit(self): self.assertEqual(response["OrganizationalUnit"]["Name"], "Test OU") def test_attach_policy(self): - moto_client = boto3.client("organizations") - org_id = self.create_organization() policy_id = self.create_dummy_policy() @@ -75,7 +63,7 @@ def test_attach_policy(self): self.api_talker.attach_policy(ou_id, policy_id) - response = moto_client.list_policies_for_target(TargetId=ou_id, Filter="SERVICE_CONTROL_POLICY") + response = self.api_talker.org_client.list_policies_for_target(TargetId=ou_id, Filter="SERVICE_CONTROL_POLICY") self.assertIn(policy_id, [p["Id"] for p in response["Policies"]]) def test_get_caller_identity(self): @@ -119,20 +107,16 @@ def test_describe_policy(self): self.assertEquals(policy_content, policy["Content"]) def test_create_account(self): - moto_client = boto3.client("organizations") - self.create_organization() response = self.api_talker.create_account("test@example.com", "Test") - accounts = moto_client.list_accounts()["Accounts"] + accounts = self.api_talker.org_client.list_accounts()["Accounts"] self.assertEquals(response["CreateAccountStatus"]["AccountName"], "Test") self.assertIn(("Test", "test@example.com"), [(account["Name"], account["Email"]) for account in accounts]) def test_move_account(self): - moto_client = boto3.client("organizations") - org_id = self.create_organization() account_status = self.api_talker.create_account("test@example.com", "Test") @@ -145,8 +129,12 @@ def test_move_account(self): self.api_talker.move_account(account_id, source_ou_id, dest_ou_id) - accounts_under_source = moto_client.list_children(ParentId=source_ou_id, ChildType="ACCOUNT")["Children"] - accounts_under_dest = moto_client.list_children(ParentId=dest_ou_id, ChildType="ACCOUNT")["Children"] + accounts_under_source = self.api_talker.org_client.list_children(ParentId=source_ou_id, ChildType="ACCOUNT")[ + "Children" + ] + accounts_under_dest = self.api_talker.org_client.list_children(ParentId=dest_ou_id, ChildType="ACCOUNT")[ + "Children" + ] self.assertNotIn(account_id, [account["Id"] for account in accounts_under_source]) self.assertIn(account_id, [account["Id"] for account in accounts_under_dest]) From 54e11dab072ae31c29ccaa147478b980aa135dd5 Mon Sep 17 00:00:00 2001 From: Jer111 <82157107+Jer111@users.noreply.github.com> Date: Tue, 16 May 2023 10:01:55 +0200 Subject: [PATCH 24/32] Refactor generate synchronization list and extract AWS tree * refactored files * removed create_aws_org from refactored * flake8 * Correct refactored functions * Changed extract_aws_setup to be more elegant * rm list_all_mailing_lists and update extract_aws * Changed extract_aws * black * Changed extract_aws to get_values * pydocstring * pydocstring * Black --- website/projects/aws/awssync_refactored.py | 84 +++++++++++- .../tests_aws/test_awssync_refactored.py | 129 ++++++++++++++++++ 2 files changed, 212 insertions(+), 1 deletion(-) diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py index 2f843eff..91358645 100644 --- a/website/projects/aws/awssync_refactored.py +++ b/website/projects/aws/awssync_refactored.py @@ -1,11 +1,16 @@ from __future__ import annotations +import logging + from botocore.exceptions import ClientError from courses.models import Semester +from mailing_lists.models import MailingList + from projects.aws.awsapitalker import AWSAPITalker -from projects.aws.awssync_structs import AWSTree +from projects.aws.awssync_structs import AWSTree, Iteration, SyncData +from projects.models import Project class AWSSyncRefactored: @@ -14,6 +19,83 @@ class AWSSyncRefactored: def __init__(self): """Create an AWSSync instance.""" self.api_talker = AWSAPITalker() + self.logger = logging.getLogger("django.aws") + self.logger.setLevel(logging.DEBUG) + self.fail = False + + def get_syncdata_from_giphouse(self) -> list[SyncData]: + """ + Create a list of SyncData struct containing email, slug and semester. + + Slug and semester combined are together an uniqueness constraint. + + :return: list of SyncData structs with email, slug and semester + """ + sync_data_list = [] + current_semester = Semester.objects.get_or_create_current_semester() + + for project in Project.objects.filter(mailinglist__isnull=False, semester=current_semester).values( + "slug", "semester", "mailinglist" + ): + project_slug = project["slug"] + project_semester = str(Semester.objects.get(pk=project["semester"])) + project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address + + sync_data = SyncData(project_email, project_slug, project_semester) + sync_data_list.append(sync_data) + return sync_data_list + + def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]) -> list[SyncData]: + """ + Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS. + + This includes their ID and email address, to be able to put users in the correct AWS organization later. + """ + return [project for project in giphouse_data if project not in aws_data] + + def get_tag_value(self, tags: list[dict[str, str]], key: str) -> str: + """Return the value of the tag with the given key, or None if no such tag exists.""" + for tag in tags: + if tag["Key"] == key: + return tag["Value"] + return None + + def extract_aws_setup(self, parent_ou_id: str) -> AWSTree: + """ + Give a list of all the children of the parent OU. + + :param parent_ou_id: The ID of the parent OU. + :return: A AWSTree object containing all the children of the parent OU. + """ + aws_tree = AWSTree( + "root", + parent_ou_id, + [ + Iteration( + ou["Name"], + ou["Id"], + member_accounts := [ + SyncData( + account["Email"], + self.get_tag_value(tags, "project_slug"), + self.get_tag_value(tags, "project_semester"), + ) + for account in self.api_talker.list_accounts_for_parent(parent_id=ou["Id"]) + for tags in [self.api_talker.list_tags_for_resource(resource_id=account["Id"])] + ], + ) + for ou in self.api_talker.list_organizational_units_for_parent(parent_id=parent_ou_id) + ], + ) + + incomplete_accounts = [ + account for account in member_accounts if not (account.project_slug and account.project_semester) + ] + + if incomplete_accounts: + raise Exception(f"Found incomplete accounts in AWS: {incomplete_accounts}.") + + return aws_tree def get_or_create_course_ou(self, tree: AWSTree) -> str: """Create organizational unit under root with name of current semester.""" diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py index abf026b0..0346dfb6 100644 --- a/website/projects/tests/tests_aws/test_awssync_refactored.py +++ b/website/projects/tests/tests_aws/test_awssync_refactored.py @@ -2,6 +2,7 @@ import json from unittest.mock import patch + from botocore.exceptions import ClientError from django.test import TestCase @@ -10,14 +11,142 @@ from courses.models import Semester +from mailing_lists.models import MailingList + from projects.aws.awssync_refactored import AWSSyncRefactored from projects.aws.awssync_structs import AWSTree, Iteration, SyncData +from projects.models import Project @mock_organizations class AWSSyncRefactoredTest(TestCase): def setUp(self): + """Set up testing environment.""" self.sync = AWSSyncRefactored() + self.api_talker = self.sync.api_talker + + def test_get_syncdata_from_giphouse_normal(self): + """Test get_emails_with_teamids function in optimal conditions.""" + self.semester = Semester.objects.create(year=2023, season=Semester.SPRING) + for i in range(3): + self.mailing_list = MailingList.objects.create(address="test" + str(i)) + self.project = Project.objects.create( + id=i, name="test" + str(i), semester=self.semester, slug="test" + str(i) + ) + self.mailing_list.projects.add(self.project) + + email_id = self.sync.get_syncdata_from_giphouse() + + self.assertIsInstance(email_id, list) + self.assertIsInstance(email_id[0], SyncData) + expected_result = [ + SyncData("test0@giphouse.nl", "test0", "Spring 2023"), + SyncData("test1@giphouse.nl", "test1", "Spring 2023"), + SyncData("test2@giphouse.nl", "test2", "Spring 2023"), + ] + self.assertEqual(email_id, expected_result) + + def test_get_syncdata_from_giphouse_no_project(self): + """Test get_emails_with_teamids function where the mailinglist is not assigned to a project""" + MailingList.objects.all().delete() + self.mailing_list = MailingList.objects.create(address="test2") + email_id = self.sync.get_syncdata_from_giphouse() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + + def test_get_syncdata_from_giphouse_no_mailing_list(self): + """Test get_emails_with_teamids function where no mailinglists exist""" + MailingList.objects.all().delete() + Project.objects.all().delete() + email_id = self.sync.get_syncdata_from_giphouse() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + + def test_get_syncdata_from_giphouse_different_semester(self): + """Test get_emails_with_teamids function where the semester is not equal to the current semester""" + MailingList.objects.all().delete() + new_semester = Semester.objects.create(year=2022, season=Semester.FALL) + self.mailing_list = MailingList.objects.create(address="test4") + self.project = Project.objects.create(id=4, name="test4", semester=new_semester, slug="test4") + self.mailing_list.projects.add(self.project) + email_id = self.sync.get_syncdata_from_giphouse() + self.assertIsInstance(email_id, list) + self.assertEqual(email_id, []) + + def test_AWS_sync_list_both_empty(self): + gip_list = [] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_empty_AWS(self): + test1 = SyncData("test1@test1.test1", "test1", "test1") + test2 = SyncData("test2@test2.test2", "test2", "test2") + gip_list = [test1, test2] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list) + + def test_AWS_sync_list_empty_GiP(self): + test1 = SyncData("test1@test1.test1", "test1", "test1") + test2 = SyncData("test2@test2.test2", "test2", "test2") + gip_list = [] + aws_list = [test1, test2] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_both_full(self): + test1 = SyncData("test1@test1.test1", "test1", "test1") + test2 = SyncData("test2@test2.test2", "test2", "test2") + test3 = SyncData("test3@test3.test3", "test3", "test3") + gip_list = [test1, test2] + aws_list = [test2, test3] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [test1]) + + def test_get_tag_value(self): + tags = [{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}] + self.assertEquals(self.sync.get_tag_value(tags, "project_semester"), "2021") + self.assertEquals(self.sync.get_tag_value(tags, "project_slug"), "test1") + self.assertEquals(self.sync.get_tag_value(tags, "project_name"), None) + + def test_extract_aws_setup(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.api_talker.list_roots()[0]["Id"] + + ou_response = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1") + ou_id = ou_response["OrganizationalUnit"]["Id"] + + account_response = self.api_talker.create_account( + email="account_1@gmail.com", + account_name="account_1", + tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}], + ) + account_id = account_response["CreateAccountStatus"]["AccountId"] + self.api_talker.move_account(account_id=account_id, source_parent_id=root_id, dest_parent_id=ou_id) + + aws_tree = self.sync.extract_aws_setup(root_id) + + expected_sync_data = [SyncData("account_1@gmail.com", "test1", "2021")] + expected_iteration = Iteration("OU_1", ou_id, expected_sync_data) + expected_tree = AWSTree("root", root_id, [expected_iteration]) + + self.assertEqual(aws_tree, expected_tree) + + def test_extract_aws_setup_no_slugs(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.api_talker.list_roots()[0]["Id"] + + response_OU_1 = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1") + OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] + response_account_1 = self.api_talker.create_account( + email="account_1@gmail.com", + account_name="account_1", + tags=[], + ) + account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] + + self.api_talker.move_account(account_id=account_id_1, source_parent_id=root_id, dest_parent_id=OU_1_id) + + with self.assertRaises(Exception) as context: + self.sync.extract_aws_setup(root_id) + self.assertIn("Found incomplete accounts in AWS", str(context.exception)) def test_get_or_create_course_ou__new(self): self.sync.api_talker.create_organization(feature_set="ALL") From 497abc990b9e01ac1cff61f5f5a333f966a861cc Mon Sep 17 00:00:00 2001 From: flam123 Date: Tue, 23 May 2023 13:10:24 +0200 Subject: [PATCH 25/32] Refactor create and move accounts function * Changed comments * Refactored create move * Refractored create_account * Changed test * Added my functions * Add api talker function for describe_create_account_status * Deleted create_account function * Added test * Fix describe_create_account_status docstring * Changed describe function * Finished basic test * Finished refactor create move * Black formattting * Duplicate code * Finished refractor --------- Co-authored-by: Henk --- website/projects/aws/awsapitalker.py | 9 ++ website/projects/aws/awssync_refactored.py | 66 ++++++++++++++ .../tests/tests_aws/test_awsapitalker.py | 11 +++ .../tests_aws/test_awssync_refactored.py | 90 +++++++++++++++++++ 4 files changed, 176 insertions(+) diff --git a/website/projects/aws/awsapitalker.py b/website/projects/aws/awsapitalker.py index 308a07da..edff72ed 100644 --- a/website/projects/aws/awsapitalker.py +++ b/website/projects/aws/awsapitalker.py @@ -160,3 +160,12 @@ def list_roots(self) -> list[dict]: page_iterator = paginator.paginate() return self.combine_pages(page_iterator, "Roots") + + def describe_create_account_status(self, create_account_request_id: str) -> dict: + """ + Describe the status of the given account creation request. + + :param create_account_request_id: ID of the account creation request to be described. + :return: dictionary containing account creation status information. + """ + return self.org_client.describe_create_account_status(CreateAccountRequestId=create_account_request_id) diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py index 91358645..fdbf7588 100644 --- a/website/projects/aws/awssync_refactored.py +++ b/website/projects/aws/awssync_refactored.py @@ -1,6 +1,7 @@ from __future__ import annotations import logging +import time from botocore.exceptions import ClientError @@ -23,6 +24,12 @@ def __init__(self): self.logger.setLevel(logging.DEBUG) self.fail = False + self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 2 + self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3 + + self.accounts_created = 0 + self.accounts_moved = 0 + def get_syncdata_from_giphouse(self) -> list[SyncData]: """ Create a list of SyncData struct containing email, slug and semester. @@ -116,3 +123,62 @@ def attach_policy(self, target_id: str, policy_id: str) -> None: except ClientError as error: if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": raise + + def create_and_move_accounts( + self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str + ) -> bool: + """ + Create multiple accounts in the organization of the API caller and move them from the root to a destination OU. + + :param new_member_accounts: List of SyncData objects. + :param root_id: The organization's root ID. + :param destination_ou_id: The organization's destination OU ID. + :returns: True iff **all** new member accounts were created and moved successfully. + """ + for new_member in new_member_accounts: + # Create member account + response = self.api_talker.create_account( + new_member.project_email, + new_member.project_slug, + [ + {"Key": "project_slug", "Value": new_member.project_slug}, + {"Key": "project_semester", "Value": new_member.project_semester}, + ], + ) + # Repeatedly check status of new member account request. + request_id = response["CreateAccountStatus"]["Id"] + + for _ in range(self.ACCOUNT_REQUEST_MAX_ATTEMPTS): + time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS) + + try: + response_status = self.api_talker.describe_create_account_status(request_id) + except ClientError as error: + self.logger.debug(error) + self.logger.debug(f"Failed to get status of account with e-mail: '{new_member.project_email}'.") + break + + request_state = response_status["CreateAccountStatus"]["State"] + if request_state == "SUCCEEDED": + account_id = response_status["CreateAccountStatus"]["AccountId"] + + self.accounts_created += 1 + try: + self.api_talker.move_account(account_id, root_id, destination_ou_id) + self.accounts_moved += 1 + except ClientError as error: + self.logger.debug(error) + self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.") + break + + elif request_state == "FAILED": + failure_reason = response_status["CreateAccountStatus"]["FailureReason"] + self.logger.debug( + f"Failed to create account with e-mail: {new_member.project_email}. " + f"Failure reason: {failure_reason}" + ) + break + + accounts_to_create = len(new_member_accounts) + success = accounts_to_create == self.accounts_created == self.accounts_moved + return success diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py index 9cf3301e..dd7747ea 100644 --- a/website/projects/tests/tests_aws/test_awsapitalker.py +++ b/website/projects/tests/tests_aws/test_awsapitalker.py @@ -183,3 +183,14 @@ def test_list_roots(self): roots = self.api_talker.list_roots() self.assertTrue(len(roots) == 1) + + def test_describe_create_account_status(self): + self.create_organization() + + account = self.api_talker.create_account("test@example.com", "Test") + account_id = account["CreateAccountStatus"]["Id"] + + request = self.api_talker.describe_create_account_status(account_id) + request_state = request["CreateAccountStatus"]["State"] + + self.assertEqual(request_state, "SUCCEEDED") diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py index 0346dfb6..75157013 100644 --- a/website/projects/tests/tests_aws/test_awssync_refactored.py +++ b/website/projects/tests/tests_aws/test_awssync_refactored.py @@ -211,3 +211,93 @@ def test_attach_policy__caught_exception(self): def test_attach_policy__reraised_exception(self): self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123") + + def test_create_move_account(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] + + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + ] + + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + self.assertTrue(success) + + def test_create_move_account__exception_failure(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] + + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + ] + + with patch.object(self.sync.api_talker, "move_account", side_effect=ClientError({}, "move_account")): + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + + self.assertFalse(success) + + def test_create_move_account__no_move(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] + + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + ] + + with patch.object( + self.sync.api_talker, + "describe_create_account_status", + side_effect=ClientError({}, "describe_create_account_status"), + ): + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + + self.assertFalse(success) + + def test_create_move_account__failed(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] + + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("alice@giphouse.nl", "bobs-project", "Fall 2023"), + ] + + with patch.object( + self.sync.api_talker.org_client, + "describe_create_account_status", + return_value={"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}}, + ): + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + + self.assertFalse(success) + + def test_create_move_account__in_progress(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] + + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + ] + + with patch.object( + self.sync.api_talker.org_client, + "describe_create_account_status", + return_value={"CreateAccountStatus": {"State": "IN_PROGRESS"}}, + ): + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + + self.assertFalse(success) From 301e711535c4c81626088e293130458019d56af0 Mon Sep 17 00:00:00 2001 From: Jer111 <82157107+Jer111@users.noreply.github.com> Date: Tue, 30 May 2023 10:35:07 +0200 Subject: [PATCH 26/32] 62 policy id and tag fields on frontend panel (#65) * Added aws policy field on admin page * Added view paremeters for aws policies * Added a '.' to satisfy linting * Updated the AWS policy model * Added get_current_policy_id function * Satisfy docstring * Added test for save function in AWSPolicy model * Added test case for the save AWSPolicy model * Added tags values in aws policies * Updated test_get_policy_id * Changed GiPHouse name and get_policy name --- website/projects/admin.py | 10 ++++++- website/projects/aws/awssync_refactored.py | 9 +++++- website/projects/migrations/0007_awspolicy.py | 25 ++++++++++++++++ .../0008_awspolicy_is_current_policy.py | 18 ++++++++++++ ...ename_name_awspolicy_policy_id_and_more.py | 27 +++++++++++++++++ ...er_awspolicy_is_current_policy_and_more.py | 23 +++++++++++++++ .../0011_alter_awspolicy_is_current_policy.py | 21 ++++++++++++++ .../0012_alter_awspolicy_is_current_policy.py | 21 ++++++++++++++ ...spolicy_no_permissions_at_root_and_more.py | 27 +++++++++++++++++ ...icy_tags_key_alter_awspolicy_tags_value.py | 23 +++++++++++++++ .../0015_alter_awspolicy_tags_key.py | 18 ++++++++++++ website/projects/models.py | 29 +++++++++++++++++++ website/projects/tests/test_models.py | 22 +++++++++++++- .../tests_aws/test_awssync_refactored.py | 19 +++++++++++- 14 files changed, 288 insertions(+), 4 deletions(-) create mode 100644 website/projects/migrations/0007_awspolicy.py create mode 100644 website/projects/migrations/0008_awspolicy_is_current_policy.py create mode 100644 website/projects/migrations/0009_rename_name_awspolicy_policy_id_and_more.py create mode 100644 website/projects/migrations/0010_alter_awspolicy_is_current_policy_and_more.py create mode 100644 website/projects/migrations/0011_alter_awspolicy_is_current_policy.py create mode 100644 website/projects/migrations/0012_alter_awspolicy_is_current_policy.py create mode 100644 website/projects/migrations/0013_remove_awspolicy_no_permissions_at_root_and_more.py create mode 100644 website/projects/migrations/0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value.py create mode 100644 website/projects/migrations/0015_alter_awspolicy_tags_key.py diff --git a/website/projects/admin.py b/website/projects/admin.py index 1cc2e032..207d9df1 100644 --- a/website/projects/admin.py +++ b/website/projects/admin.py @@ -15,7 +15,7 @@ from projects.aws.awssync import AWSSync from projects.forms import ProjectAdminForm, RepositoryInlineForm from projects.githubsync import GitHubSync -from projects.models import Client, Project, Repository +from projects.models import AWSPolicy, Client, Project, Repository from registrations.models import Employee @@ -197,3 +197,11 @@ class ClientAdmin(admin.ModelAdmin): """Custom admin for clients.""" search_fields = ("name",) + + +@admin.register(AWSPolicy) +class AWSPolicyAdmin(admin.ModelAdmin): + """Custom admin for AWS Policies.""" + + list_display = ["policy_id", "tags_key", "tags_value", "is_current_policy"] + search_fields = ("policy_id",) diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py index fdbf7588..0bbcc1d4 100644 --- a/website/projects/aws/awssync_refactored.py +++ b/website/projects/aws/awssync_refactored.py @@ -11,7 +11,7 @@ from projects.aws.awsapitalker import AWSAPITalker from projects.aws.awssync_structs import AWSTree, Iteration, SyncData -from projects.models import Project +from projects.models import AWSPolicy, Project class AWSSyncRefactored: @@ -124,6 +124,13 @@ def attach_policy(self, target_id: str, policy_id: str) -> None: if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": raise + def get_current_policy_id(self) -> str: + """Get the currrent policy stored on the GiPHouse website.""" + for policy in AWSPolicy.objects.all(): + if policy.is_current_policy: + return policy.policy_id + raise Exception("No current policy found") + def create_and_move_accounts( self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str ) -> bool: diff --git a/website/projects/migrations/0007_awspolicy.py b/website/projects/migrations/0007_awspolicy.py new file mode 100644 index 00000000..61ed77b2 --- /dev/null +++ b/website/projects/migrations/0007_awspolicy.py @@ -0,0 +1,25 @@ +# Generated by Django 4.1.3 on 2023-05-25 14:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0006_alter_project_unique_together_project_slug_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="AWSPolicy", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=50)), + ("tags", models.TextField()), + ], + options={ + "verbose_name": "AWS Policy", + "verbose_name_plural": "AWS Policies", + }, + ), + ] diff --git a/website/projects/migrations/0008_awspolicy_is_current_policy.py b/website/projects/migrations/0008_awspolicy_is_current_policy.py new file mode 100644 index 00000000..8a9f67e9 --- /dev/null +++ b/website/projects/migrations/0008_awspolicy_is_current_policy.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.3 on 2023-05-25 14:42 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0007_awspolicy"), + ] + + operations = [ + migrations.AddField( + model_name="awspolicy", + name="is_current_policy", + field=models.BooleanField(default=False, unique=True), + ), + ] diff --git a/website/projects/migrations/0009_rename_name_awspolicy_policy_id_and_more.py b/website/projects/migrations/0009_rename_name_awspolicy_policy_id_and_more.py new file mode 100644 index 00000000..4bd848a9 --- /dev/null +++ b/website/projects/migrations/0009_rename_name_awspolicy_policy_id_and_more.py @@ -0,0 +1,27 @@ +# Generated by Django 4.1.3 on 2023-05-26 09:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0008_awspolicy_is_current_policy"), + ] + + operations = [ + migrations.RenameField( + model_name="awspolicy", + old_name="name", + new_name="policy_id", + ), + migrations.RemoveField( + model_name="awspolicy", + name="tags", + ), + migrations.AddField( + model_name="awspolicy", + name="no_permissions_at_root", + field=models.CharField(default="", max_length=50), + ), + ] diff --git a/website/projects/migrations/0010_alter_awspolicy_is_current_policy_and_more.py b/website/projects/migrations/0010_alter_awspolicy_is_current_policy_and_more.py new file mode 100644 index 00000000..1f4a57a7 --- /dev/null +++ b/website/projects/migrations/0010_alter_awspolicy_is_current_policy_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 4.1.3 on 2023-05-26 09:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0009_rename_name_awspolicy_policy_id_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="awspolicy", + name="is_current_policy", + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name="awspolicy", + name="no_permissions_at_root", + field=models.CharField(max_length=50), + ), + ] diff --git a/website/projects/migrations/0011_alter_awspolicy_is_current_policy.py b/website/projects/migrations/0011_alter_awspolicy_is_current_policy.py new file mode 100644 index 00000000..6ce5667f --- /dev/null +++ b/website/projects/migrations/0011_alter_awspolicy_is_current_policy.py @@ -0,0 +1,21 @@ +# Generated by Django 4.1.3 on 2023-05-26 09:31 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0010_alter_awspolicy_is_current_policy_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="awspolicy", + name="is_current_policy", + field=models.BooleanField( + default=False, + help_text="Attention: When saving this policy, all other policies will be set to 'not current'!", + ), + ), + ] diff --git a/website/projects/migrations/0012_alter_awspolicy_is_current_policy.py b/website/projects/migrations/0012_alter_awspolicy_is_current_policy.py new file mode 100644 index 00000000..539fb0f9 --- /dev/null +++ b/website/projects/migrations/0012_alter_awspolicy_is_current_policy.py @@ -0,0 +1,21 @@ +# Generated by Django 4.1.3 on 2023-05-26 09:32 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0011_alter_awspolicy_is_current_policy"), + ] + + operations = [ + migrations.AlterField( + model_name="awspolicy", + name="is_current_policy", + field=models.BooleanField( + default=False, + help_text="Attention: When saving this policy with 'is current policy' checked, all other policies will be set to 'not current'!", + ), + ), + ] diff --git a/website/projects/migrations/0013_remove_awspolicy_no_permissions_at_root_and_more.py b/website/projects/migrations/0013_remove_awspolicy_no_permissions_at_root_and_more.py new file mode 100644 index 00000000..b4fdcdb5 --- /dev/null +++ b/website/projects/migrations/0013_remove_awspolicy_no_permissions_at_root_and_more.py @@ -0,0 +1,27 @@ +# Generated by Django 4.1.3 on 2023-05-26 11:39 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0012_alter_awspolicy_is_current_policy"), + ] + + operations = [ + migrations.RemoveField( + model_name="awspolicy", + name="no_permissions_at_root", + ), + migrations.AddField( + model_name="awspolicy", + name="tags_key", + field=models.CharField(default="", max_length=50), + ), + migrations.AddField( + model_name="awspolicy", + name="tags_value", + field=models.CharField(default="", max_length=50), + ), + ] diff --git a/website/projects/migrations/0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value.py b/website/projects/migrations/0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value.py new file mode 100644 index 00000000..6bb5be98 --- /dev/null +++ b/website/projects/migrations/0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value.py @@ -0,0 +1,23 @@ +# Generated by Django 4.1.3 on 2023-05-26 11:42 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0013_remove_awspolicy_no_permissions_at_root_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="awspolicy", + name="tags_key", + field=models.CharField(blank=True, default="", max_length=50), + ), + migrations.AlterField( + model_name="awspolicy", + name="tags_value", + field=models.CharField(blank=True, default="", max_length=50), + ), + ] diff --git a/website/projects/migrations/0015_alter_awspolicy_tags_key.py b/website/projects/migrations/0015_alter_awspolicy_tags_key.py new file mode 100644 index 00000000..d029a7a2 --- /dev/null +++ b/website/projects/migrations/0015_alter_awspolicy_tags_key.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.3 on 2023-05-26 11:51 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value"), + ] + + operations = [ + migrations.AlterField( + model_name="awspolicy", + name="tags_key", + field=models.CharField(default="", max_length=50), + ), + ] diff --git a/website/projects/models.py b/website/projects/models.py index c5195529..62645b57 100644 --- a/website/projects/models.py +++ b/website/projects/models.py @@ -8,6 +8,35 @@ from registrations.models import Employee +class AWSPolicy(models.Model): + """AWS global policy id and tags submission fields.""" + + class Meta: + """Meta class for AWSPolicy model.""" + + verbose_name = "AWS Policy" + verbose_name_plural = "AWS Policies" + + policy_id = models.CharField(max_length=50, unique=False, null=False, blank=False) + tags_key = models.CharField(max_length=50, unique=False, default="", null=False, blank=False) + tags_value = models.CharField(max_length=50, unique=False, default="", null=False, blank=True) + is_current_policy = models.BooleanField( + default=False, + help_text="Attention: When saving this policy with 'is current policy' checked" + + ", all other policies will be set to 'not current'!", + ) + + def save(self, *args, **kwargs): + """Save method for AWSPolicy model.""" + if self.is_current_policy: + AWSPolicy.objects.all().update(**{"is_current_policy": False}) + super(AWSPolicy, self).save(*args, **kwargs) + + def __str__(self): + """Return policy id.""" + return f"{self.policy_id}" + + class Client(models.Model): """Project client with logo.""" diff --git a/website/projects/tests/test_models.py b/website/projects/tests/test_models.py index 550878b9..80675726 100644 --- a/website/projects/tests/test_models.py +++ b/website/projects/tests/test_models.py @@ -5,7 +5,7 @@ from courses.models import Course, Semester from projects import githubsync -from projects.models import Project, ProjectToBeDeleted, Repository, RepositoryToBeDeleted +from projects.models import AWSPolicy, Project, ProjectToBeDeleted, Repository, RepositoryToBeDeleted from registrations.models import Employee, Registration @@ -112,3 +112,23 @@ def test_number_of_repos(self): Repository.objects.create(name="testrepository1", project=project) Repository.objects.create(name="testrepository2", project=project) self.assertEqual(project.number_of_repos, 2) + + +class AWSPolicySaveTest(TestCase): + def test_save_method_with_existing_current_policy(self): + existing_policy = AWSPolicy.objects.create(is_current_policy=True) + new_policy = AWSPolicy(is_current_policy=True) + new_policy.save() + existing_policy.refresh_from_db() + self.assertFalse(existing_policy.is_current_policy) + self.assertTrue(new_policy.is_current_policy) + + def test_save_method_without_existing_current_policy_false(self): + policy = AWSPolicy(is_current_policy=False) + policy.save() + self.assertFalse(policy.is_current_policy) + + def test_save_method_without_existing_current_policy_true(self): + policy = AWSPolicy(is_current_policy=True) + policy.save() + self.assertTrue(policy.is_current_policy) diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py index 75157013..52619820 100644 --- a/website/projects/tests/tests_aws/test_awssync_refactored.py +++ b/website/projects/tests/tests_aws/test_awssync_refactored.py @@ -15,7 +15,7 @@ from projects.aws.awssync_refactored import AWSSyncRefactored from projects.aws.awssync_structs import AWSTree, Iteration, SyncData -from projects.models import Project +from projects.models import AWSPolicy, Project @mock_organizations @@ -212,6 +212,23 @@ def test_attach_policy__caught_exception(self): def test_attach_policy__reraised_exception(self): self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123") + def test_get_current_policy_id(self): + self.policy_id1 = AWSPolicy.objects.create( + policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False + ) + self.policy_id2 = AWSPolicy.objects.create( + policy_id="Test-Policy2", tags_key="Test-Policy-Id2", is_current_policy=True + ) + current_policy_id = self.sync.get_current_policy_id() + self.assertIsInstance(current_policy_id, str) + self.assertEqual(current_policy_id, self.policy_id2.policy_id) + + def test_get_current_policy__no_current_policy_id(self): + self.policy_id1 = AWSPolicy.objects.create( + policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False + ) + self.assertRaises(Exception, self.sync.get_current_policy_id) + def test_create_move_account(self): self.sync.api_talker.create_organization(feature_set="ALL") root_id = self.sync.api_talker.list_roots()[0]["Id"] From 4ddf785d35f171d2ba53a7e19e9c6b9768da1c90 Mon Sep 17 00:00:00 2001 From: Jer111 <82157107+Jer111@users.noreply.github.com> Date: Fri, 2 Jun 2023 14:13:38 +0200 Subject: [PATCH 27/32] Refactor AWSSync pipeline (#67) * Changed comments * Refactored create move * Refractored create_account * Changed test * Added my functions * Add api talker function for describe_create_account_status * Deleted create_account function * Added test * Fix describe_create_account_status docstring * Changed describe function * Finished basic test * Create structure of refactored pipeline * Finished refactor create move * Black formattting * Duplicate code * Reorganize files and complete merge * Refactor according to finished create_and_move_account, aws data extraction * first attempt at printing an error box when something fails in the pipeline (functional but not too pretty). * Hotfixes pipeline for working sprint 3 demo * Add pipeline test cases * Fix 100% code coverage * Fix tests for error box in pipeline * Added the current policy to the pipeline * Add API talker function and test for untag_resource AWS API call * Remove most debug messages when testing * Rename awssync_refactored to awssync * Increase sleep time between requesting account info * Implement pull request feedback --------- Co-authored-by: Fouad Lamsettef Co-authored-by: Henk Co-authored-by: Mitchell Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> --- website/projects/admin.py | 2 +- website/projects/aws/awsapitalker.py | 9 + website/projects/aws/awssync.py | 675 +++------- website/projects/aws/awssync_refactored.py | 191 --- .../tests/tests_aws/test_awsapitalker.py | 17 + .../projects/tests/tests_aws/test_awssync.py | 1178 +++++------------ .../tests/tests_aws/test_awssync_checks.py | 3 + .../tests_aws/test_awssync_refactored.py | 320 ----- .../tests/tests_aws/test_awssync_structs.py | 25 - 9 files changed, 513 insertions(+), 1907 deletions(-) delete mode 100644 website/projects/aws/awssync_refactored.py delete mode 100644 website/projects/tests/tests_aws/test_awssync_refactored.py diff --git a/website/projects/admin.py b/website/projects/admin.py index 207d9df1..74ea5802 100644 --- a/website/projects/admin.py +++ b/website/projects/admin.py @@ -175,7 +175,7 @@ def synchronise_current_projects_to_GitHub(self, request): def synchronise_to_AWS(self, request): """Synchronise to Amazon Web Services.""" sync = AWSSync() - sync.button_pressed() + sync.synchronise(request) return redirect("admin:projects_project_changelist") def get_urls(self): diff --git a/website/projects/aws/awsapitalker.py b/website/projects/aws/awsapitalker.py index edff72ed..32d6ad21 100644 --- a/website/projects/aws/awsapitalker.py +++ b/website/projects/aws/awsapitalker.py @@ -169,3 +169,12 @@ def describe_create_account_status(self, create_account_request_id: str) -> dict :return: dictionary containing account creation status information. """ return self.org_client.describe_create_account_status(CreateAccountRequestId=create_account_request_id) + + def untag_resource(self, resource_id: str, tag_keys: list[str]): + """ + Remove tags with specified keys from the resource with the specified ID. + + :param resource_id: the resource from which tags should be removed. + :param tag_keys: the keys of the tags to be removed. + """ + return self.org_client.untag_resource(ResourceId=resource_id, TagKeys=tag_keys) diff --git a/website/projects/aws/awssync.py b/website/projects/aws/awssync.py index 45a2a301..6939751c 100644 --- a/website/projects/aws/awssync.py +++ b/website/projects/aws/awssync.py @@ -1,22 +1,21 @@ -"""Framework for synchronisation with Amazon Web Services (AWS).""" from __future__ import annotations -import json import logging import time -import boto3 - from botocore.exceptions import ClientError -from botocore.exceptions import NoCredentialsError + +from django.contrib import messages from courses.models import Semester from mailing_lists.models import MailingList +from projects.aws.awsapitalker import AWSAPITalker from projects.aws.awssync_checks import Checks +from projects.aws.awssync_checks_permissions import api_permissions from projects.aws.awssync_structs import AWSTree, Iteration, SyncData -from projects.models import Project +from projects.models import AWSPolicy, Project class AWSSync: @@ -24,95 +23,26 @@ class AWSSync: def __init__(self): """Create an AWSSync instance.""" - self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5 - self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3 - + self.api_talker = AWSAPITalker() + self.checker = Checks() self.logger = logging.getLogger("django.aws") self.logger.setLevel(logging.DEBUG) - self.org_info = None - self.iterationOU_info = None - self.policy_id = "p-examplepolicyid111" - self.fail = False - self.required_aws_actions = [ - # "organizations:AcceptHandshake", - "organizations:AttachPolicy", - # "organizations:CancelHandshake", - # "organizations:CloseAccount", - "organizations:CreateAccount", - # "organizations:CreateGovCloudAccount", - "organizations:CreateOrganization", - "organizations:CreateOrganizationalUnit", - "organizations:CreatePolicy", - # "organizations:DeclineHandshake", - # "organizations:DeleteOrganization", - "organizations:DeleteOrganizationalUnit", - "organizations:DeletePolicy", - "organizations:DeleteResourcePolicy", - # "organizations:DeregisterDelegatedAdministrator", - "organizations:DescribeAccount", - "organizations:DescribeCreateAccountStatus", - "organizations:DescribeEffectivePolicy", - # "organizations:DescribeHandshake", - "organizations:DescribeOrganization", - "organizations:DescribeOrganizationalUnit", - "organizations:DescribePolicy", - "organizations:DescribeResourcePolicy", - "organizations:DetachPolicy", - # "organizations:DisableAWSServiceAccess", - "organizations:DisablePolicyType", - # "organizations:EnableAWSServiceAccess", - # "organizations:EnableAllFeatures", - "organizations:EnablePolicyType", - # "organizations:InviteAccountToOrganization", - # "organizations:LeaveOrganization", - # "organizations:ListAWSServiceAccessForOrganization", - "organizations:ListAccounts", - "organizations:ListAccountsForParent", - "organizations:ListChildren", - "organizations:ListCreateAccountStatus", - # "organizations:ListDelegatedAdministrators", - # "organizations:ListDelegatedServicesForAccount", - # "organizations:ListHandshakesForAccount", - # "organizations:ListHandshakesForOrganization", - "organizations:ListOrganizationalUnitsForParent", - "organizations:ListParents", - "organizations:ListPolicies", - "organizations:ListPoliciesForTarget", - "organizations:ListRoots", - "organizations:ListTagsForResource", - "organizations:ListTargetsForPolicy", - "organizations:MoveAccount", - "organizations:PutResourcePolicy", - # "organizations:RegisterDelegatedAdministrator", - # "organizations:RemoveAccountFromOrganization", - "organizations:TagResource", - "organizations:UntagResource", - "organizations:UpdateOrganizationalUnit", - "organizations:UpdatePolicy", - ] - self.logger.info("Created AWSSync instance.") - - def button_pressed(self): - """ - Print debug message to show that the button has been pressed. - :return: True if function executes successfully - """ - self.logger.info("Pressed button") - self.logger.debug(f"Pipeline result: {self.pipeline()}") - return True + self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5 + self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3 - def get_all_mailing_lists(self): - """ - Get all mailing lists from the database. + self.accounts_created = 0 + self.accounts_moved = 0 + self.accounts_to_create = 0 - :return: List of mailing lists - """ - mailing_lists = MailingList.objects.all() - mailing_list_names = [ml.email_address for ml in mailing_lists] - return mailing_list_names + self.SUCCESS_MSG = "Successfully synchronized all projects to AWS." + self.FAIL_MSG = "Not all accounts were created and moved successfully. Check the console for more information." + self.API_ERROR_MSG = "An error occurred while calling the AWS API. Check the console for more information." + self.SYNC_ERROR_MSG = ( + "An error occurred during synchronization with AWS. Check the console for more information" + ) - def get_emails_with_teamids(self): + def get_syncdata_from_giphouse(self) -> list[SyncData]: """ Create a list of SyncData struct containing email, slug and semester. @@ -120,325 +50,103 @@ def get_emails_with_teamids(self): :return: list of SyncData structs with email, slug and semester """ - email_ids = [] + sync_data_list = [] + current_semester = Semester.objects.get_or_create_current_semester() - for project in ( - Project.objects.filter(mailinglist__isnull=False) - .filter(semester=Semester.objects.get_or_create_current_semester()) - .values("slug", "semester", "mailinglist") + for project in Project.objects.filter(mailinglist__isnull=False, semester=current_semester).values( + "slug", "semester", "mailinglist" ): project_slug = project["slug"] project_semester = str(Semester.objects.get(pk=project["semester"])) project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address sync_data = SyncData(project_email, project_slug, project_semester) - email_ids.append(sync_data) - return email_ids + sync_data_list.append(sync_data) + return sync_data_list - def create_aws_organization(self): - """Create an AWS organization with the current user as the management account.""" - client = boto3.client("organizations") - try: - response = client.create_organization(FeatureSet="ALL") - self.org_info = response["Organization"] - self.logger.info("Created an AWS organization and saved organization info.") - except ClientError as error: - self.fail = True - self.logger.error("Something went wrong creating an AWS organization.") - self.logger.debug(f"{error}") - self.logger.debug(f"{error.response}") - - def create_course_iteration_OU(self, iteration_name): - """ - Create an OU for the course iteration. - - :param iteration_name: The name of the course iteration OU - - :return: The ID of the OU - """ - client = boto3.client("organizations") - if self.org_info is None: - self.logger.info("No organization info found. Creating an AWS organization.") - self.fail = True - else: - try: - root_id = client.list_roots()["Roots"][0]["Id"] - response = client.create_organizational_unit( - ParentId=root_id, - Name=iteration_name, - ) - self.logger.info(f"Created an OU for course iteration {iteration_name}.") - self.iterationOU_info = response["OrganizationalUnit"] - return response["OrganizationalUnit"]["Id"] - except ClientError as error: - self.fail = True - self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_name}.") - self.logger.debug(f"{error}") - self.logger.debug(f"{error.response}") - - def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]): + def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]) -> list[SyncData]: """ Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS. This includes their ID and email address, to be able to put users in the correct AWS organization later. """ - sync_list = [x for x in giphouse_data if x not in aws_data] - return sync_list - - def create_scp_policy(self, policy_name, policy_description, policy_content): - """ - Create an SCP policy. - - :param policy_name: The policy name. - :param policy_description: The policy description. - :param policy_content: The policy configuration as a dictionary. - The policy is automatically converted to JSON format, including escaped quotation marks. - :return: Details of newly created policy as a dict on success and NoneType object otherwise. - """ - client = boto3.client("organizations") - try: - response = client.create_policy( - Content=json.dumps(policy_content), - Description=policy_description, - Name=policy_name, - Type="SERVICE_CONTROL_POLICY", - ) - except ClientError as error: - self.fail = True - self.logger.error("Something went wrong creating an SCP policy.") - self.logger.error(error) - else: - return response["Policy"] - - def attach_scp_policy(self, policy_id, target_id): - """ - Attaches an SCP policy to a target (root, OU, or member account). - - :param policy_id: The ID of the policy to be attached. - :param target_id: The ID of the target root, OU, or member account. - """ - client = boto3.client("organizations") - try: - client.attach_policy(PolicyId=policy_id, TargetId=target_id) - except ClientError as error: - if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": - self.fail = True - self.logger.error("Something went wrong attaching an SCP policy to a target.") - self.logger.debug(f"{error}") - self.logger.debug(f"{error.response}") - - def check_aws_api_connection(self): - """ - Check whether boto3 can connect to AWS API with current credentials. + return [project for project in giphouse_data if project not in aws_data] - :returns: First tuple element always exists and indicates success. - Second tuple element is contains information about the entity - who made the successful API call and None otherwise. - """ - client_sts = boto3.client("sts") - try: - caller_identity_info = client_sts.get_caller_identity() - except (NoCredentialsError, ClientError) as error: - self.logger.info("Establishing AWS API connection failed.") - self.logger.debug(error) - return False, None - else: - self.logger.info("Establishing AWS API connection succeeded.") - - return True, caller_identity_info - - def check_iam_policy(self, iam_user_arn, desired_actions): - """ - Check for the specified IAM user ARN whether the actions in list \ - desired_actions are allowed according to its IAM policy. + def get_tag_value(self, tags: list[dict[str, str]], key: str) -> str: + """Return the value of the tag with the given key, or None if no such tag exists.""" + for tag in tags: + if tag["Key"] == key: + return tag["Value"] + return None - :param iam_user_arn: ARN of the IAM user being checked. - :param iam_actions: List of AWS API actions to check. - :returns: True iff all actions in desired_actions are allowed. + def extract_aws_setup(self, parent_ou_id: str) -> AWSTree: """ - client_iam = boto3.client("iam") - - try: - response = client_iam.simulate_principal_policy(PolicySourceArn=iam_user_arn, ActionNames=desired_actions) - except ClientError as error: - self.logger.info("AWS API actions check failed.") - self.logger.debug(error) - return False - - success = True - for evaluation_result in response["EvaluationResults"]: - action_name = evaluation_result["EvalActionName"] - if evaluation_result["EvalDecision"] != "allowed": - self.logger.debug(f"The AWS API action {action_name} is denied for IAM user {iam_user_arn}.") - success = False - - if success: - self.logger.info("AWS API actions check succeeded.") - - return success - - def check_organization_existence(self): - """ - Check whether an AWS organization exists for the AWS API caller's account. - - :returns: First tuple element always exists and indicates success. - Second tuple element is describes properties of the organization and None otherwise. - """ - client_organizations = boto3.client("organizations") - - try: - response_org = client_organizations.describe_organization() - except ClientError as error: - self.logger.info("AWS organization existence check failed.") - self.logger.debug(error) - return False, None - else: - self.logger.info("AWS organization existence check succeeded.") - - return True, response_org["Organization"] - - def check_is_management_account(self, api_caller_info, organization_info): - """ - Check whether caller of AWS API has organization's management account ID. - - :returns: True iff the current organization's management account ID equals the AWS API caller's account ID. - """ - management_account_id = organization_info["MasterAccountId"] - api_caller_account_id = api_caller_info["Account"] - is_management_account = management_account_id == api_caller_account_id - - if is_management_account: - self.logger.info("Management account check succeeded.") - else: - self.logger.info("Management account check failed.") - self.logger.debug(f"The organization's management account ID is: '{management_account_id}'.") - self.logger.debug(f"The AWS API caller account ID is: '{api_caller_account_id}'.") - - return is_management_account - - def check_scp_enabled(self, organization_info): - """ - Check whether the SCP policy type is an enabled feature for the AWS organization. - - :returns: True iff the SCP policy type feature is enabled for the organization. - """ - scp_is_enabled = False - for policy in organization_info["AvailablePolicyTypes"]: - if policy["Type"] == "SERVICE_CONTROL_POLICY" and policy["Status"] == "ENABLED": - scp_is_enabled = True - break - - if not scp_is_enabled: - self.logger.info("The SCP policy type is disabled for the organization.") - self.logger.debug(organization_info["AvailablePolicyTypes"]) - else: - self.logger.info("Organization SCP policy status check succeeded.") - - return scp_is_enabled - - def pipeline_preconditions(self): - """ - Check all crucial pipeline preconditions. - - 1. Locatable boto3 credentials and successful AWS API connection - 2. Check allowed AWS API actions based on IAM policy of caller - 3. Existing organization for AWS API caller - 4. AWS API caller acts under same account ID as organization's management account ID - 5. SCP policy type feature enabled for organization - - :return: True iff all pipeline preconditions are met. - """ - check_api_connection, api_caller_info = self.check_aws_api_connection() - if not check_api_connection: - return False - - check_api_actions = self.check_iam_policy(api_caller_info["Arn"], self.required_aws_actions) - if not check_api_actions: - return False - - check_org_existence, organization_info = self.check_organization_existence() - self.org_info = organization_info - if not check_org_existence: - return False - - check_acc_management = self.check_is_management_account(api_caller_info, organization_info) - if not check_acc_management: - return False - - check_scp_enabled = self.check_scp_enabled(organization_info) - if not check_scp_enabled: - return False + Give a list of all the children of the parent OU. - return True + :param parent_ou_id: The ID of the parent OU. + :return: A AWSTree object containing all the children of the parent OU. + """ + member_accounts = [] + aws_tree = AWSTree( + "root", + parent_ou_id, + [ + Iteration( + ou["Name"], + ou["Id"], + member_accounts := [ + SyncData( + account["Email"], + self.get_tag_value(tags, "project_slug"), + self.get_tag_value(tags, "project_semester"), + ) + for account in self.api_talker.list_accounts_for_parent(parent_id=ou["Id"]) + for tags in [self.api_talker.list_tags_for_resource(resource_id=account["Id"])] + ], + ) + for ou in self.api_talker.list_organizational_units_for_parent(parent_id=parent_ou_id) + ], + ) - def pipeline_policy(self, ou_id): - """ - Create an SCP policy and attaches it to the organizational unit of the current semester. + incomplete_accounts = [ + account for account in member_accounts if not (account.project_slug and account.project_semester) + ] - :param ou_id: ID of the organizational unit for the current semester. - :return: True iff the policy to be attached to the OU already exists and is successfully attached. - """ - client = boto3.client("organizations") - try: - client.describe_policy(PolicyId=self.policy_id) - except ClientError as error: - self.logger.debug(error) - return False + if incomplete_accounts: + raise Exception(f"Found incomplete accounts in AWS: {incomplete_accounts}.") - self.attach_scp_policy(self.policy_id, ou_id) - if self.fail: - return False - return True + return aws_tree - def pipeline_create_account(self, sync_data): - """ - Create a single new AWS member account in the organization of the API caller. + def get_or_create_course_ou(self, tree: AWSTree) -> str: + """Create organizational unit under root with name of current semester.""" + root_id = tree.ou_id + course_ou_name = str(Semester.objects.get_or_create_current_semester()) + course_ou_id = next((ou.ou_id for ou in tree.iterations if ou.name == course_ou_name), None) - The status of the member account request is repeatedly checked based on the class' attributes: - self.ACCOUNT_REQUEST_INTERVAL_SECONDS: thread sleeping time before each status check - self.ACCOUNT_REQUEST_MAX_ATTEMPTS: maximum number of times to thread sleep and check + if not course_ou_id: + course_ou = self.api_talker.create_organizational_unit(root_id, course_ou_name) + course_ou_id = course_ou["OrganizationalUnit"]["Id"] - :param email: The e-mail address of the new member account. - :param username: The username of the new member account. - :returns: (True, account_id) on success and otherwise (False, failure_reason). - """ - client = boto3.client("organizations") + return course_ou_id - # Request new member account. + def attach_policy(self, target_id: str, policy_id: str) -> None: + """Attach policy to target resource.""" try: - response_create = client.create_account( - Email=sync_data.project_email, - AccountName=sync_data.project_slug, - IamUserAccessToBilling="DENY", - Tags=[ - {"Key": "project_slug", "Value": sync_data.project_slug}, - {"Key": "project_semester", "Value": sync_data.project_semester}, - ], - ) + self.api_talker.attach_policy(target_id, policy_id) except ClientError as error: - self.logger.debug(error) - return False, "CLIENTERROR_CREATE_ACCOUNT" - - # Repeatedly check status of new member account request. - request_id = response_create["CreateAccountStatus"]["Id"] - for _ in range(1, self.ACCOUNT_REQUEST_MAX_ATTEMPTS + 1): - time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS) - - try: - response_status = client.describe_create_account_status(CreateAccountRequestId=request_id) - except ClientError as error: - self.logger.debug(error) - return False, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS" - - request_state = response_status["CreateAccountStatus"]["State"] - if request_state == "FAILED": - return False, response_status["CreateAccountStatus"]["FailureReason"] - elif request_state == "SUCCEEDED": - return True, response_status["CreateAccountStatus"]["AccountId"] + if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": + raise - return False, "STILL_IN_PROGRESS" + def get_current_policy_id(self) -> str: + """Get the currrent policy stored on the GiPHouse website.""" + for policy in AWSPolicy.objects.all(): + if policy.is_current_policy: + return policy.policy_id + raise Exception("No current policy found") - def pipeline_create_and_move_accounts(self, new_member_accounts, root_id, destination_ou_id): + def create_and_move_accounts( + self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str + ) -> bool: """ Create multiple accounts in the organization of the API caller and move them from the root to a destination OU. @@ -447,167 +155,98 @@ def pipeline_create_and_move_accounts(self, new_member_accounts, root_id, destin :param destination_ou_id: The organization's destination OU ID. :returns: True iff **all** new member accounts were created and moved successfully. """ - client = boto3.client("organizations") - overall_success = True - for new_member in new_member_accounts: - success, response = self.pipeline_create_account(new_member) - if success: - account_id = response + # Create member account + response = self.api_talker.create_account( + new_member.project_email, + new_member.project_slug, + [ + {"Key": "project_slug", "Value": new_member.project_slug}, + {"Key": "project_semester", "Value": new_member.project_semester}, + ], + ) + # Repeatedly check status of new member account request. + request_id = response["CreateAccountStatus"]["Id"] + + for _ in range(self.ACCOUNT_REQUEST_MAX_ATTEMPTS): + time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS) + try: - root_id = client.list_roots()["Roots"][0]["Id"] - client.move_account( - AccountId=account_id, SourceParentId=root_id, DestinationParentId=destination_ou_id - ) + response_status = self.api_talker.describe_create_account_status(request_id) except ClientError as error: + self.logger.debug(f"Failed to get status of account with e-mail: '{new_member.project_email}'.") self.logger.debug(error) - overall_success = False - else: - failure_reason = response - self.logger.debug(failure_reason) - overall_success = False - - return overall_success - - def pipeline_update_current_course_iteration_ou(self, aws_tree): - """ - Update the AWS tree with the new course iteration OU's. - - :param aws_tree: The AWS tree to be checked. - :returns: True, iteration_id on success and otherwise False, failure_reason. - """ - is_current_iteration, iteration_ou_id = self.check_current_ou_exists(aws_tree) + break + + request_state = response_status["CreateAccountStatus"]["State"] + + if request_state == "SUCCEEDED": + account_id = response_status["CreateAccountStatus"]["AccountId"] + + self.accounts_created += 1 + try: + self.api_talker.move_account(account_id, root_id, destination_ou_id) + self.accounts_moved += 1 + except ClientError as error: + self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.") + self.logger.debug(error) + break + + elif request_state == "FAILED": + failure_reason = response_status["CreateAccountStatus"]["FailureReason"] + self.logger.debug( + f"Failed to create account with e-mail: {new_member.project_email}. " + f"Failure reason: {failure_reason}" + ) + break - if not is_current_iteration: - iteration_name = str(Semester.objects.get_or_create_current_semester()) - iteration_ou_id = self.create_course_iteration_OU(iteration_name) + self.accounts_to_create = len(new_member_accounts) + self.logger.debug(f"Accounts created: {self.accounts_created}/{self.accounts_to_create}") + self.logger.debug(f"Accounts moved: {self.accounts_moved}/{self.accounts_to_create}") + success = self.accounts_to_create == self.accounts_created == self.accounts_moved - if not self.fail: - return True, iteration_ou_id - else: - return False, "ITERATION_OU_CREATION_FAILED" + return success - def pipeline(self): + def pipeline(self) -> bool: """ Single pipeline that integrates all buildings blocks for the AWS integration process. :return: True iff all pipeline stages successfully executed. """ - # Check preconditions. - if not self.pipeline_preconditions(): - return False - - # Get synchronization data. - client = boto3.client("organizations") - try: - root_id = client.list_roots()["Roots"][0]["Id"] - except ClientError as error: - self.logger.debug("Failed to retrieve root ID of organization.") - self.logger.debug(error) - return False + self.checker.pipeline_preconditions(api_permissions) + root_id = self.api_talker.list_roots()[0]["Id"] aws_tree = self.extract_aws_setup(root_id) - if self.fail: - self.logger.debug("Extracting AWS setup failed.") - return False + self.checker.check_members_in_correct_iteration(aws_tree) + self.checker.check_double_iteration_names(aws_tree) aws_sync_data = aws_tree.awstree_to_syncdata_list() - giphouse_sync_data = self.get_emails_with_teamids() + giphouse_sync_data = self.get_syncdata_from_giphouse() merged_sync_data = self.generate_aws_sync_list(giphouse_sync_data, aws_sync_data) - # Check edge cases. - if self.check_for_double_member_email(aws_sync_data, merged_sync_data): - return False - - checker = Checks() - checker.check_members_in_correct_iteration(aws_tree) - checker.check_double_iteration_names(aws_tree) - - # Check/create course iteration OU. - current_course_iteration_exists, response = self.pipeline_update_current_course_iteration_ou(aws_tree) - if not current_course_iteration_exists: - failure_reason = response - self.logger.debug(failure_reason) - return False - course_iteration_ou_id = response - - # Create and attach SCP policy to course iteration OU. - if not self.pipeline_policy(course_iteration_ou_id): - return False + ou_id = self.get_or_create_course_ou(aws_tree) - # Create new member accounts and move to course iteration OU. - if not self.pipeline_create_and_move_accounts(merged_sync_data, root_id, course_iteration_ou_id): - return False + policy_id = self.get_current_policy_id() + self.attach_policy(ou_id, policy_id) - return True + return self.create_and_move_accounts(merged_sync_data, root_id, ou_id) - def check_for_double_member_email(self, aws_list: list[SyncData], sync_list: list[SyncData]): - """Check if no users are assigned to multiple projects.""" - sync_emails = [x.project_email for x in sync_list] - aws_emails = [x.project_email for x in aws_list] - - duplicates = [email for email in sync_emails if email in aws_emails] - - for duplicate in duplicates: - error = f"Email address {duplicate} is already in the list of members in AWS" - self.logger.info("An email clash occured while syncing.") - self.logger.debug(error) - - if duplicates != []: - return True - return False - - def check_current_ou_exists(self, AWSdata: AWSTree): - """ - Check if the the OU (organizational unit) for the current semester already exists in AWS. - - Get data in tree structure (dictionary) defined in the function that retrieves the AWS data - """ - current = str(Semester.objects.get_or_create_current_semester()) - - for iteration in AWSdata.iterations: - if current == iteration.name: - return (True, iteration.ou_id) - - return (False, None) - - def extract_aws_setup(self, parent_ou_id): + def synchronise(self, request): """ - Give a list of all the children of the parent OU. + Synchronise projects of the current semester to AWS and notify user of success or potential errors. - :param parent_ou_id: The ID of the parent OU. + :param request: HTTP request indicating the synchronization button has been pressed. """ - client = boto3.client("organizations") try: - response = client.list_organizational_units_for_parent(ParentId=parent_ou_id) - aws_tree = AWSTree("root", parent_ou_id, []) - for iteration in response["OrganizationalUnits"]: - ou_id = iteration["Id"] - ou_name = iteration["Name"] - response = client.list_accounts_for_parent(ParentId=ou_id) - children = response["Accounts"] - syncData = [] - for child in children: - account_id = child["Id"] - account_email = child["Email"] - response = client.list_tags_for_resource(ResourceId=account_id) - tags = response["Tags"] - merged_tags = {d["Key"]: d["Value"] for d in tags} - self.logger.debug(merged_tags) - if all(key in merged_tags for key in ["project_slug", "project_semester"]): - syncData.append( - SyncData(account_email, merged_tags["project_slug"], merged_tags["project_semester"]) - ) - else: - self.logger.error( - "Could not find project_slug or project_semester tag for account with ID: " + account_id - ) - self.fail = True + synchronisation_success = self.pipeline() - aws_tree.iterations.append(Iteration(ou_name, ou_id, syncData)) - return aws_tree - except ClientError as error: - self.fail = True - self.logger.error("Something went wrong extracting the AWS setup.") - self.logger.debug(f"{error}") - self.logger.debug(f"{error.response}") + if synchronisation_success: + messages.success(request, self.SUCCESS_MSG) + else: + messages.warning(request, self.FAIL_MSG) + except ClientError as api_error: + messages.error(request, self.API_ERROR_MSG) + self.logger.error(api_error) + except Exception as sync_error: + messages.error(request, self.SYNC_ERROR_MSG) + self.logger.error(sync_error) diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py deleted file mode 100644 index 0bbcc1d4..00000000 --- a/website/projects/aws/awssync_refactored.py +++ /dev/null @@ -1,191 +0,0 @@ -from __future__ import annotations - -import logging -import time - -from botocore.exceptions import ClientError - -from courses.models import Semester - -from mailing_lists.models import MailingList - -from projects.aws.awsapitalker import AWSAPITalker -from projects.aws.awssync_structs import AWSTree, Iteration, SyncData -from projects.models import AWSPolicy, Project - - -class AWSSyncRefactored: - """Synchronise with Amazon Web Services.""" - - def __init__(self): - """Create an AWSSync instance.""" - self.api_talker = AWSAPITalker() - self.logger = logging.getLogger("django.aws") - self.logger.setLevel(logging.DEBUG) - self.fail = False - - self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 2 - self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3 - - self.accounts_created = 0 - self.accounts_moved = 0 - - def get_syncdata_from_giphouse(self) -> list[SyncData]: - """ - Create a list of SyncData struct containing email, slug and semester. - - Slug and semester combined are together an uniqueness constraint. - - :return: list of SyncData structs with email, slug and semester - """ - sync_data_list = [] - current_semester = Semester.objects.get_or_create_current_semester() - - for project in Project.objects.filter(mailinglist__isnull=False, semester=current_semester).values( - "slug", "semester", "mailinglist" - ): - project_slug = project["slug"] - project_semester = str(Semester.objects.get(pk=project["semester"])) - project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address - - sync_data = SyncData(project_email, project_slug, project_semester) - sync_data_list.append(sync_data) - return sync_data_list - - def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]) -> list[SyncData]: - """ - Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS. - - This includes their ID and email address, to be able to put users in the correct AWS organization later. - """ - return [project for project in giphouse_data if project not in aws_data] - - def get_tag_value(self, tags: list[dict[str, str]], key: str) -> str: - """Return the value of the tag with the given key, or None if no such tag exists.""" - for tag in tags: - if tag["Key"] == key: - return tag["Value"] - return None - - def extract_aws_setup(self, parent_ou_id: str) -> AWSTree: - """ - Give a list of all the children of the parent OU. - - :param parent_ou_id: The ID of the parent OU. - :return: A AWSTree object containing all the children of the parent OU. - """ - aws_tree = AWSTree( - "root", - parent_ou_id, - [ - Iteration( - ou["Name"], - ou["Id"], - member_accounts := [ - SyncData( - account["Email"], - self.get_tag_value(tags, "project_slug"), - self.get_tag_value(tags, "project_semester"), - ) - for account in self.api_talker.list_accounts_for_parent(parent_id=ou["Id"]) - for tags in [self.api_talker.list_tags_for_resource(resource_id=account["Id"])] - ], - ) - for ou in self.api_talker.list_organizational_units_for_parent(parent_id=parent_ou_id) - ], - ) - - incomplete_accounts = [ - account for account in member_accounts if not (account.project_slug and account.project_semester) - ] - - if incomplete_accounts: - raise Exception(f"Found incomplete accounts in AWS: {incomplete_accounts}.") - - return aws_tree - - def get_or_create_course_ou(self, tree: AWSTree) -> str: - """Create organizational unit under root with name of current semester.""" - root_id = tree.ou_id - course_ou_name = str(Semester.objects.get_or_create_current_semester()) - course_ou_id = next((ou.ou_id for ou in tree.iterations if ou.name == course_ou_name), None) - - if not course_ou_id: - course_ou = self.api_talker.create_organizational_unit(root_id, course_ou_name) - course_ou_id = course_ou["OrganizationalUnit"]["Id"] - - return course_ou_id - - def attach_policy(self, target_id: str, policy_id: str) -> None: - """Attach policy to target resource.""" - try: - self.api_talker.attach_policy(target_id, policy_id) - except ClientError as error: - if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": - raise - - def get_current_policy_id(self) -> str: - """Get the currrent policy stored on the GiPHouse website.""" - for policy in AWSPolicy.objects.all(): - if policy.is_current_policy: - return policy.policy_id - raise Exception("No current policy found") - - def create_and_move_accounts( - self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str - ) -> bool: - """ - Create multiple accounts in the organization of the API caller and move them from the root to a destination OU. - - :param new_member_accounts: List of SyncData objects. - :param root_id: The organization's root ID. - :param destination_ou_id: The organization's destination OU ID. - :returns: True iff **all** new member accounts were created and moved successfully. - """ - for new_member in new_member_accounts: - # Create member account - response = self.api_talker.create_account( - new_member.project_email, - new_member.project_slug, - [ - {"Key": "project_slug", "Value": new_member.project_slug}, - {"Key": "project_semester", "Value": new_member.project_semester}, - ], - ) - # Repeatedly check status of new member account request. - request_id = response["CreateAccountStatus"]["Id"] - - for _ in range(self.ACCOUNT_REQUEST_MAX_ATTEMPTS): - time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS) - - try: - response_status = self.api_talker.describe_create_account_status(request_id) - except ClientError as error: - self.logger.debug(error) - self.logger.debug(f"Failed to get status of account with e-mail: '{new_member.project_email}'.") - break - - request_state = response_status["CreateAccountStatus"]["State"] - if request_state == "SUCCEEDED": - account_id = response_status["CreateAccountStatus"]["AccountId"] - - self.accounts_created += 1 - try: - self.api_talker.move_account(account_id, root_id, destination_ou_id) - self.accounts_moved += 1 - except ClientError as error: - self.logger.debug(error) - self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.") - break - - elif request_state == "FAILED": - failure_reason = response_status["CreateAccountStatus"]["FailureReason"] - self.logger.debug( - f"Failed to create account with e-mail: {new_member.project_email}. " - f"Failure reason: {failure_reason}" - ) - break - - accounts_to_create = len(new_member_accounts) - success = accounts_to_create == self.accounts_created == self.accounts_moved - return success diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py index dd7747ea..907d1efb 100644 --- a/website/projects/tests/tests_aws/test_awsapitalker.py +++ b/website/projects/tests/tests_aws/test_awsapitalker.py @@ -194,3 +194,20 @@ def test_describe_create_account_status(self): request_state = request["CreateAccountStatus"]["State"] self.assertEqual(request_state, "SUCCEEDED") + + def test_untag_resource(self): + self.create_organization() + + tag_key = "Test Key" + tag_value = "Test Value" + tag = {"Key": tag_key, "Value": tag_value} + account = self.api_talker.create_account("test@example.com", "Test", [tag]) + account_id = account["CreateAccountStatus"]["AccountId"] + + received_tags = self.api_talker.org_client.list_tags_for_resource(ResourceId=account_id)["Tags"] + self.assertIn(tag, received_tags) + + self.api_talker.untag_resource(account_id, [tag_key]) + + received_tags = self.api_talker.org_client.list_tags_for_resource(ResourceId=account_id)["Tags"] + self.assertEqual(received_tags, []) diff --git a/website/projects/tests/tests_aws/test_awssync.py b/website/projects/tests/tests_aws/test_awssync.py index 64f6938f..a33dd9fa 100644 --- a/website/projects/tests/tests_aws/test_awssync.py +++ b/website/projects/tests/tests_aws/test_awssync.py @@ -1,959 +1,433 @@ """Tests for awssync.py.""" - import json from unittest.mock import MagicMock, patch -import boto3 -import botocore from botocore.exceptions import ClientError -from django.test import TestCase +from django.contrib.auth import get_user_model +from django.test import Client, TestCase +from django.urls import reverse -from moto import mock_organizations, mock_sts +from moto import mock_iam, mock_organizations, mock_sts from courses.models import Semester from mailing_lists.models import MailingList -from projects.aws import awssync -from projects.models import Project +from projects.aws.awssync import AWSSync +from projects.aws.awssync_structs import AWSTree, Iteration, SyncData +from projects.models import AWSPolicy, Project +from registrations.models import Employee -class AWSSyncTest(TestCase): - """Test AWSSync class.""" +User: Employee = get_user_model() + +@mock_organizations +@mock_sts +@mock_iam +class AWSSyncTest(TestCase): def setUp(self): """Set up testing environment.""" - self.sync = awssync.AWSSync() + self.sync = AWSSync() + self.api_talker = self.sync.api_talker + + self.admin = User.objects.create_superuser(github_id=0, github_username="super") + self.client = Client() + self.client.force_login(self.admin) + + self.logger = MagicMock() + self.sync.logger = self.logger + self.sync.checker.logger = self.logger + + def setup_policy(self): + policy_name = "DenyAll" + policy_description = "Deny all access." + policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + policy = self.sync.api_talker.org_client.create_policy( + Name=policy_name, + Description=policy_description, + Content=json.dumps(policy_content), + Type="SERVICE_CONTROL_POLICY", + Tags=[{"Key": "no_permissions", "Value": "true"}], + ) + AWSPolicy.objects.create( + policy_id=policy["Policy"]["PolicySummary"]["Id"], + is_current_policy=True, + tags_key="no_permissions", + tags_value="true", + ) + + def test_get_syncdata_from_giphouse_normal(self): + """Test get_emails_with_teamids function in optimal conditions.""" self.semester = Semester.objects.create(year=2023, season=Semester.SPRING) - self.mailing_list = MailingList.objects.create(address="test1") - self.project = Project.objects.create(id=1, name="test1", semester=self.semester, slug="test1") - self.mailing_list.projects.add(self.project) - self.mock_org = mock_organizations() - self.mock_org.start() - - def tearDown(self): - self.mock_org.stop() - - def simulateFailure(self): - self.sync.fail = True - - def test_button_pressed(self): - """Test button_pressed function.""" - return_value = self.sync.button_pressed() - self.assertTrue(return_value) - - def test_create_aws_organization(self): - moto_client = boto3.client("organizations") - org = self.sync - org.create_aws_organization() - describe_org = moto_client.describe_organization()["Organization"] - self.assertEqual(describe_org, org.org_info) - - def test_create_aws_organization__exception(self): - org = self.sync - with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): - org.create_aws_organization() - self.assertTrue(org.fail) - self.assertIsNone(org.org_info) - - def test_create_course_iteration_OU(self): - moto_client = boto3.client("organizations") - org = self.sync - org.create_aws_organization() - org.create_course_iteration_OU("1") - describe_unit = moto_client.describe_organizational_unit(OrganizationalUnitId=org.iterationOU_info["Id"])[ - "OrganizationalUnit" - ] - self.assertEqual(describe_unit, org.iterationOU_info) - - def test_create_course_iteration_OU_without_organization(self): - org = self.sync - org.create_course_iteration_OU("1") - self.assertTrue(org.fail) - - def test_create_course_iteration_OU__exception(self): - org = self.sync - org.create_aws_organization() - with patch("boto3.client") as mocker: - mocker().list_roots.side_effect = ClientError({}, "list_roots") - org.create_course_iteration_OU("1") - self.assertTrue(org.fail) - - def test_get_all_mailing_lists(self): - """Test get_all_mailing_lists function.""" - mailing_lists = self.sync.get_all_mailing_lists() - self.assertIsInstance(mailing_lists, list) - - def test_get_emails_with_teamids_normal(self): - """Test get_emails_with_teamids function.""" - email_id = self.sync.get_emails_with_teamids() + for i in range(3): + self.mailing_list = MailingList.objects.create(address="test" + str(i)) + self.project = Project.objects.create( + id=i, name="test" + str(i), semester=self.semester, slug="test" + str(i) + ) + self.mailing_list.projects.add(self.project) + + email_id = self.sync.get_syncdata_from_giphouse() self.assertIsInstance(email_id, list) - self.assertIsInstance(email_id[0], awssync.SyncData) - expected_result = [awssync.SyncData("test1@giphouse.nl", "test1", "Spring 2023")] + self.assertIsInstance(email_id[0], SyncData) + expected_result = [ + SyncData("test0@giphouse.nl", "test0", "Spring 2023"), + SyncData("test1@giphouse.nl", "test1", "Spring 2023"), + SyncData("test2@giphouse.nl", "test2", "Spring 2023"), + ] self.assertEqual(email_id, expected_result) - def test_get_emails_with_teamids_no_project(self): - """Test get_emails_with_teamids function.""" + def test_get_syncdata_from_giphouse_no_project(self): + """Test get_emails_with_teamids function where the mailinglist is not assigned to a project""" MailingList.objects.all().delete() self.mailing_list = MailingList.objects.create(address="test2") - email_id = self.sync.get_emails_with_teamids() + email_id = self.sync.get_syncdata_from_giphouse() self.assertIsInstance(email_id, list) self.assertEqual(email_id, []) - def test_get_emails_with_teamids_no_mailing_list(self): - """Test get_emails_with_teamids function.""" + def test_get_syncdata_from_giphouse_no_mailing_list(self): + """Test get_emails_with_teamids function where no mailinglists exist""" MailingList.objects.all().delete() Project.objects.all().delete() - email_id = self.sync.get_emails_with_teamids() + email_id = self.sync.get_syncdata_from_giphouse() self.assertIsInstance(email_id, list) self.assertEqual(email_id, []) - def test_get_emails_with_teamids_different_semester(self): - """Test get_emails_with_teamids function.""" + def test_get_syncdata_from_giphouse_different_semester(self): + """Test get_emails_with_teamids function where the semester is not equal to the current semester""" MailingList.objects.all().delete() new_semester = Semester.objects.create(year=2022, season=Semester.FALL) - self.mailing_list = MailingList.objects.create(address="test2") - self.project = Project.objects.create(id=2, name="test2", semester=new_semester, slug="test2") + self.mailing_list = MailingList.objects.create(address="test4") + self.project = Project.objects.create(id=4, name="test4", semester=new_semester, slug="test4") self.mailing_list.projects.add(self.project) - email_id = self.sync.get_emails_with_teamids() + email_id = self.sync.get_syncdata_from_giphouse() self.assertIsInstance(email_id, list) self.assertEqual(email_id, []) - def test_create_scp_policy(self): - self.sync.create_aws_organization() - - policy_name = "DenyAll" - policy_description = "Deny all access." - policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} - policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) - - self.assertFalse(self.sync.fail) - self.assertEqual(policy["PolicySummary"]["Name"], policy_name) - self.assertEqual(policy["PolicySummary"]["Description"], policy_description) - self.assertEqual(policy["Content"], json.dumps(policy_content)) - - def test_create_scp_policy__exception(self): - self.sync.create_aws_organization() - - policy_name = "DenyAll" - policy_description = "Deny all access." - policy_content = { - "Version": "2012-10-17", - "Statement": [{"Effect": "NonExistentEffect", "Action": "*", "Resource": "*"}], - } - with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): - policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) - - self.assertTrue(self.sync.fail) - self.assertIsNone(policy) - - def test_attach_scp_policy(self): - moto_client = boto3.client("organizations") - self.sync.create_aws_organization() - - policy_name = "DenyAll" - policy_description = "Deny all access." - policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} - policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) - - policy_id = policy["PolicySummary"]["Id"] - root_id = moto_client.list_roots()["Roots"][0]["Id"] - self.sync.attach_scp_policy(policy_id, root_id) - - current_scp_policies = moto_client.list_policies_for_target(TargetId=root_id, Filter="SERVICE_CONTROL_POLICY") - current_scp_policy_ids = [scp_policy["Id"] for scp_policy in current_scp_policies["Policies"]] - - self.assertIn(policy_id, current_scp_policy_ids) - self.assertFalse(self.sync.fail) - - def test_attach_scp_policy__exception(self): - self.sync.create_aws_organization() - - policy_name = "DenyAll" - policy_description = "Deny all access." - policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} - policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) - - policy_id = policy["PolicySummary"]["Id"] - root_id = self.sync.org_info["Id"] # Retrieves organization ID, not root ID, resulting in ClientError. - self.sync.attach_scp_policy(policy_id, root_id) - - self.assertTrue(self.sync.fail) - - @mock_sts - def test_check_aws_api_connection(self): - success, caller_identity_info = self.sync.check_aws_api_connection() - - self.assertTrue(success) - self.assertIsNotNone(caller_identity_info) - - @mock_sts - def test_check_aws_api_connection__exception(self): - with patch("boto3.client") as mocker: - mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity") - mocker.return_value = mocker - success, caller_identity_info = self.sync.check_aws_api_connection() - - self.assertFalse(success) - self.assertIsNone(caller_identity_info) - - # IAM simulate_principal_policy is not covered by moto. - def test_check_iam_policy(self): - iam_user_arn = "daddy" - desired_actions = [] - mock_evaluation_results = { - "EvaluationResults": [ - { - "EvalActionName": "organizations:CreateOrganizationalUnit", - "EvalDecision": "allowed", - "EvalResourceName": "*", - "MissingContextValues": [], - } - ] - } - - # success == True - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.return_value = mock_evaluation_results - success = self.sync.check_iam_policy(iam_user_arn, desired_actions) - self.assertTrue(success) - - # success == False - mock_evaluation_results["EvaluationResults"][0]["EvalDecision"] = "implicitDeny" - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.return_value = mock_evaluation_results - success = self.sync.check_iam_policy(iam_user_arn, desired_actions) - self.assertFalse(success) - - def test_check_iam_policy__exception(self): - iam_user_arn = "daddy" - desired_actions = [] - - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.side_effect = ClientError({}, "simulate_principal_policy") - success = self.sync.check_iam_policy(iam_user_arn, desired_actions) - - self.assertFalse(success) - - def test_check_organization_existence(self): - moto_client = boto3.client("organizations") - organization_create_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] - success, organization_describe_info = self.sync.check_organization_existence() - - self.assertTrue(success) - self.assertEqual(organization_create_info, organization_describe_info) - - def test_check_organization_existence__exception(self): - with patch("boto3.client") as mocker: - mocker.describe_organization.side_effect = ClientError({}, "describe_organization") - mocker.return_value = mocker - success, organization_info = self.sync.check_organization_existence() - - self.assertFalse(success) - self.assertIsNone(organization_info) - - @mock_sts - def test_check_is_management_account(self): - moto_client = boto3.client("organizations") - - moto_client.create_organization(FeatureSet="ALL")["Organization"] - _, caller_identity_info = self.sync.check_aws_api_connection() - _, organization_info = self.sync.check_organization_existence() - - # is_management_account == True - success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info) - self.assertTrue(success_acc) - - # is_management_account == False - caller_identity_info["Account"] = "daddy" - success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info) - self.assertFalse(success_acc) - - def test_check_scp_enabled(self): - moto_client = boto3.client("organizations") - - # SCP enabled. - organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] - scp_is_enabled = self.sync.check_scp_enabled(organization_info) - self.assertTrue(scp_is_enabled) - - # SCP semi-disabled (pending). - organization_info["AvailablePolicyTypes"][0]["Status"] = "PENDING_DISABLE" - scp_is_enabled = self.sync.check_scp_enabled(organization_info) - self.assertFalse(scp_is_enabled) - - # SCP disabled (empty list). - organization_info["AvailablePolicyTypes"] = [] - scp_is_enabled = self.sync.check_scp_enabled(organization_info) - self.assertFalse(scp_is_enabled) - - @mock_sts - def test_pipeline_preconditions__all_success(self): - # Create organization. - moto_client = boto3.client("organizations") - moto_client.create_organization(FeatureSet="ALL")["Organization"] - - # Mock return value of simulate_principal_policy. - iam_user_arn = "daddy" - desired_actions = [] - mock_evaluation_results = { - "EvaluationResults": [ - { - "EvalActionName": "organizations:CreateOrganizationalUnit", - "EvalDecision": "allowed", - "EvalResourceName": "*", - "MissingContextValues": [], - } - ] - } - - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.return_value = mock_evaluation_results - check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) - - # Mock return value of check_iam_policy. - with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker: - mocker.return_value = check_iam_policy - success = self.sync.pipeline_preconditions() - - self.assertTrue(success) - - @mock_sts - def test_pipeline_preconditions__no_connection(self): - with patch("boto3.client") as mocker: - mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity") - mocker.return_value = mocker - success = self.sync.pipeline_preconditions() - - self.assertFalse(success) - - def test_pipeline_preconditions__no_iam(self): - # Mock return value of simulate_principal_policy. - iam_user_arn = "daddy" - desired_actions = [] - mock_evaluation_results = { - "EvaluationResults": [ - { - "EvalActionName": "organizations:CreateOrganizationalUnit", - "EvalDecision": "implicitDeny", - "EvalResourceName": "*", - "MissingContextValues": [], - } - ] - } - - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.return_value = mock_evaluation_results - check_api_actions = self.sync.check_iam_policy(iam_user_arn, desired_actions) - - # Mock return value of check_iam_policy. - with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker: - mocker.return_value = check_api_actions - success = self.sync.pipeline_preconditions() - - self.assertFalse(success) - - @mock_sts - def test_pipeline_preconditions__no_organization(self): - # Mock return value of simulate_principal_policy. - iam_user_arn = "daddy" - desired_actions = [] - mock_evaluation_results = { - "EvaluationResults": [ - { - "EvalActionName": "organizations:CreateOrganizationalUnit", - "EvalDecision": "allowed", - "EvalResourceName": "*", - "MissingContextValues": [], - } - ] - } - - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.return_value = mock_evaluation_results - check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) - - # Mock return value of check_iam_policy. - with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker: - mocker.return_value = check_iam_policy - success = self.sync.pipeline_preconditions() - - self.assertFalse(success) - - @mock_sts - def test_pipeline_preconditions__no_management(self): - moto_client = boto3.client("organizations") - moto_client.create_organization(FeatureSet="ALL") - - # Mock return value of simulate_principal_policy. - iam_user_arn = "daddy" - desired_actions = [] - mock_evaluation_results = { - "EvaluationResults": [ - { - "EvalActionName": "organizations:CreateOrganizationalUnit", - "EvalDecision": "allowed", - "EvalResourceName": "*", - "MissingContextValues": [], - } - ] - } - - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.return_value = mock_evaluation_results - check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) - - # Mock return value of check_iam_policy. - with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker_iam: - mocker_iam.return_value = check_iam_policy - with patch("projects.aws.awssync.AWSSync.check_aws_api_connection") as mocker_api: - mocker_api.return_value = True, {"Account": "daddy", "Arn": "01234567890123456789"} - success = self.sync.pipeline_preconditions() - - self.assertFalse(success) - - @mock_sts - def test_pipeline_preconditions__no_scp(self): - moto_client = boto3.client("organizations") - - organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"] - - # Mock return value of simulate_principal_policy. - iam_user_arn = "daddy" - desired_actions = [] - mock_evaluation_results = { - "EvaluationResults": [ - { - "EvalActionName": "organizations:CreateOrganizationalUnit", - "EvalDecision": "allowed", - "EvalResourceName": "*", - "MissingContextValues": [], - } - ] - } - - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.return_value = mock_evaluation_results - check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) - - # Mock return value of check_iam_policy. - with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker_iam: - mocker_iam.return_value = check_iam_policy - - # Mock return value of check_organization_existence with no SCP policy enabled. - organization_info["AvailablePolicyTypes"] = [] - with patch("projects.aws.awssync.AWSSync.check_organization_existence") as mocker: - mocker.return_value = True, organization_info - success = self.sync.pipeline_preconditions() - - self.assertFalse(success) - - """ - def test_pipeline_create_scp_policy(self): - self.sync.create_aws_organization() - - policy_name = "DenyAll" - policy_description = "Deny all access." - policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} - - policy = self.sync.pipeline_create_scp_policy() - - self.assertFalse(self.sync.fail) - self.assertEqual(policy["PolicySummary"]["Name"], policy_name) - self.assertEqual(policy["PolicySummary"]["Description"], policy_description) - self.assertEqual(policy["Content"], json.dumps(policy_content)) - - def test_pipeline_create_scp_policy__exception(self): - self.sync.create_aws_organization() - - with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api): - policy = self.sync.pipeline_create_scp_policy() - - self.assertTrue(self.sync.fail) - self.assertIsNone(policy) - """ - - def test_pipeline_policy(self): - self.sync.create_aws_organization() - - policy_name = "DenyAll" - policy_description = "Deny all access." - policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} - policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) - self.sync.policy_id = policy["PolicySummary"]["Id"] - - ou_id = self.sync.create_course_iteration_OU("Test") - - success = self.sync.pipeline_policy(ou_id) - self.assertTrue(success) - - def test_pipeline_policy__exception(self): - self.sync.create_aws_organization() - - ou_id = self.sync.create_course_iteration_OU("Test") - - success = self.sync.pipeline_policy(ou_id) - self.assertFalse(success) - - def test_pipeline_policy__failure_attach(self): - self.sync.create_aws_organization() - - policy_name = "DenyAll" - policy_description = "Deny all access." - policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} - policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) - self.sync.policy_id = policy["PolicySummary"]["Id"] - - ou_id = self.sync.create_course_iteration_OU("Test") - - self.sync.attach_scp_policy = MagicMock(side_effect=self.simulateFailure()) + def test_AWS_sync_list_both_empty(self): + gip_list = [] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_empty_AWS(self): + test1 = SyncData("test1@test1.test1", "test1", "test1") + test2 = SyncData("test2@test2.test2", "test2", "test2") + gip_list = [test1, test2] + aws_list = [] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list) + + def test_AWS_sync_list_empty_GiP(self): + test1 = SyncData("test1@test1.test1", "test1", "test1") + test2 = SyncData("test2@test2.test2", "test2", "test2") + gip_list = [] + aws_list = [test1, test2] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) + + def test_AWS_sync_list_both_full(self): + test1 = SyncData("test1@test1.test1", "test1", "test1") + test2 = SyncData("test2@test2.test2", "test2", "test2") + test3 = SyncData("test3@test3.test3", "test3", "test3") + gip_list = [test1, test2] + aws_list = [test2, test3] + self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [test1]) + + def test_get_tag_value(self): + tags = [{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}] + self.assertEquals(self.sync.get_tag_value(tags, "project_semester"), "2021") + self.assertEquals(self.sync.get_tag_value(tags, "project_slug"), "test1") + self.assertEquals(self.sync.get_tag_value(tags, "project_name"), None) + + def test_extract_aws_setup(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.api_talker.list_roots()[0]["Id"] + + ou_response = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1") + ou_id = ou_response["OrganizationalUnit"]["Id"] + + account_response = self.api_talker.create_account( + email="account_1@gmail.com", + account_name="account_1", + tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}], + ) + account_id = account_response["CreateAccountStatus"]["AccountId"] + self.api_talker.move_account(account_id=account_id, source_parent_id=root_id, dest_parent_id=ou_id) - success = self.sync.pipeline_policy(ou_id) - self.assertFalse(success) + aws_tree = self.sync.extract_aws_setup(root_id) - @mock_sts - def test_pipeline(self): - moto_client = boto3.client("organizations") + expected_sync_data = [SyncData("account_1@gmail.com", "test1", "2021")] + expected_iteration = Iteration("OU_1", ou_id, expected_sync_data) + expected_tree = AWSTree("root", root_id, [expected_iteration]) - # pipeline_preconditions() == False - success = self.sync.pipeline() - self.assertFalse(success) + self.assertEqual(aws_tree, expected_tree) - # pipeline_preconditions() == True - moto_client.create_organization(FeatureSet="ALL")["Organization"] + def test_extract_aws_setup_no_slugs(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.api_talker.list_roots()[0]["Id"] - policy_name = "DenyAll" - policy_description = "Deny all access." - policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} - policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content) - self.sync.policy_id = policy["PolicySummary"]["Id"] - - iam_user_arn = "daddy" - desired_actions = [] - mock_evaluation_results = { - "EvaluationResults": [ - { - "EvalActionName": "organizations:CreateOrganizationalUnit", - "EvalDecision": "allowed", - "EvalResourceName": "*", - "MissingContextValues": [], - } - ] - } + response_OU_1 = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1") + OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] + response_account_1 = self.api_talker.create_account( + email="account_1@gmail.com", + account_name="account_1", + tags=[], + ) + account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] - with patch("boto3.client") as mocker: - mocker().simulate_principal_policy.return_value = mock_evaluation_results - check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions) + self.api_talker.move_account(account_id=account_id_1, source_parent_id=root_id, dest_parent_id=OU_1_id) - with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker: - mocker.return_value = check_iam_policy - success = self.sync.pipeline() + with self.assertRaises(Exception) as context: + self.sync.extract_aws_setup(root_id) + self.assertIn("Found incomplete accounts in AWS", str(context.exception)) - self.assertTrue(success) + def test_get_or_create_course_ou__new(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] + tree = AWSTree("root", root_id, []) - def test_pipeline__exception_list_roots(self): - self.sync.pipeline_preconditions = MagicMock(return_value=True) + current_semester_name = str(Semester.objects.get_or_create_current_semester()) + course_ou_id = self.sync.get_or_create_course_ou(tree) - with patch("boto3.client") as mocker: - mocker().list_roots.side_effect = ClientError({}, "list_roots") - success = self.sync.pipeline() - - self.assertFalse(success) + course_ou_exists = any( + ou["Id"] == course_ou_id and ou["Name"] == current_semester_name + for ou in self.sync.api_talker.list_organizational_units_for_parent(root_id) + ) - def test_pipeline__edge_case_double_emails(self): - moto_client = boto3.client("organizations") - moto_client.create_organization(FeatureSet="ALL")["Organization"] + self.assertTrue(course_ou_exists) - aws_tree = awssync.AWSTree( - "Root", - "123", + def test_get_or_create_course_ou__already_exists(self): + tree = AWSTree( + "root", + "r-123", [ - awssync.Iteration( - "Spring 2023", - "456", - [ - awssync.SyncData("email1@example.com", "project1", "Spring 2023"), - ], - ) + Iteration("Spring 2023", "ou-456", [SyncData("alice@giphouse.nl", "alices-project", "Spring 2023")]), + Iteration("Fall 2023", "ou-789", [SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023")]), ], ) - gip_teams = [ - awssync.SyncData("email1@example.com", "project1", "Spring 2023"), - awssync.SyncData("email1@example.com", "project2", "Spring 2023"), - ] - - self.sync.pipeline_preconditions = MagicMock(return_value=True) - self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) - self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): - success = self.sync.pipeline() - - self.assertFalse(success) + course_ou_id = self.sync.get_or_create_course_ou(tree) + self.assertEqual("ou-456", course_ou_id) - def test_pipeline__edge_case_incorrectly_placed(self): - moto_client = boto3.client("organizations") - moto_client.create_organization(FeatureSet="ALL")["Organization"] + def test_attach_policy__not_attached(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] - aws_tree = awssync.AWSTree( - "Root", - "123", - [ - awssync.Iteration( - "Fall 2023", - "456", - [ - awssync.SyncData("email1@example.com", "project1", "Spring 2023"), - ], - ) - ], + new_policy_content = json.dumps( + {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} + ) + new_policy_id = self.sync.api_talker.org_client.create_policy( + Content=new_policy_content, Description="Deny all access.", Name="DenyAll", Type="SERVICE_CONTROL_POLICY" + )["Policy"]["PolicySummary"]["Id"] + + self.sync.attach_policy(root_id, new_policy_id) + attached_policies = self.sync.api_talker.org_client.list_policies_for_target( + TargetId=root_id, Filter="SERVICE_CONTROL_POLICY" + )["Policies"] + attached_policy_ids = [policy["Id"] for policy in attached_policies] + + self.assertIn(new_policy_id, attached_policy_ids) + + def test_attach_policy__caught_exception(self): + # Error code "DuplicatePolicyAttachmentException" can not be simulated by moto, so it is mocked. + attach_policy_hard_side_effect = ClientError( + {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy" ) + with patch.object( + self.sync.api_talker.org_client, "attach_policy", side_effect=attach_policy_hard_side_effect + ): + return_value = self.sync.attach_policy("r-123", "p-123") - gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] + self.assertIsNone(return_value) - self.sync.pipeline_preconditions = MagicMock(return_value=True) - self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) - self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) - with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): - self.assertRaises(Exception, self.sync.pipeline) + def test_attach_policy__reraised_exception(self): + self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123") - def test_pipeline__edge_case_double_iteration_names(self): - moto_client = boto3.client("organizations") - moto_client.create_organization(FeatureSet="ALL")["Organization"] + def test_get_current_policy_id(self): + self.policy_id1 = AWSPolicy.objects.create( + policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False + ) + self.policy_id2 = AWSPolicy.objects.create( + policy_id="Test-Policy2", tags_key="Test-Policy-Id2", is_current_policy=True + ) + current_policy_id = self.sync.get_current_policy_id() + self.assertIsInstance(current_policy_id, str) + self.assertEqual(current_policy_id, self.policy_id2.policy_id) - aws_tree = awssync.AWSTree( - "Root", - "123", - [ - awssync.Iteration( - "Spring 2023", "456", [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] - ), - awssync.Iteration("Spring 2023", "789", []), - ], + def test_get_current_policy__no_current_policy_id(self): + self.policy_id1 = AWSPolicy.objects.create( + policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False ) + self.assertRaises(Exception, self.sync.get_current_policy_id) - gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")] + def test_create_move_account(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] - self.sync.pipeline_preconditions = MagicMock(return_value=True) - self.sync.extract_aws_setup = MagicMock(return_value=aws_tree) - self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams) - with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): - self.assertRaises(Exception, self.sync.pipeline) + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + ] - def test_pipeline__failed_creating_iteration_ou(self): - moto_client = boto3.client("organizations") - moto_client.create_organization(FeatureSet="ALL")["Organization"] + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + self.assertTrue(success) - self.sync.pipeline_preconditions = MagicMock(return_value=True) - with patch("boto3.client") as mocker: - mocker().create_organizational_unit.side_effect = ClientError({}, "create_organizational_unit") - success = self.sync.pipeline() + def test_create_move_account__exception_failure(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] - self.assertFalse(success) - - def test_pipeline__exception_attaching_policy(self): - self.sync.create_aws_organization() - self.sync.pipeline_preconditions = MagicMock(return_value=True) + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + ] - with patch("boto3.client") as mocker: - mocker().attach_policy.side_effect = ClientError( - {"Error": {"Code": "PolicyTypeNotEnabledException"}}, "attach_policy" - ) - success = self.sync.pipeline() + with patch.object(self.sync.api_talker, "move_account", side_effect=ClientError({}, "move_account")): + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) self.assertFalse(success) - def test_pipeline__already_attached_policy(self): - self.sync.create_aws_organization() - self.sync.pipeline_preconditions = MagicMock(return_value=True) - - with patch("boto3.client") as mocker: - mocker().attach_policy.side_effect = ClientError( - {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy" - ) - success = self.sync.pipeline() - - self.assertFalse(success) + def test_create_move_account__no_move(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] - def test_pipeline__failed_create_and_move_account(self): - self.sync.create_aws_organization() - self.sync.pipeline_preconditions = MagicMock(return_value=True) + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + ] - with patch("boto3.client") as mocker: - mocker().move_account.side_effect = ClientError({}, "move_account") - success = self.sync.pipeline() + with patch.object( + self.sync.api_talker, + "describe_create_account_status", + side_effect=ClientError({}, "describe_create_account_status"), + ): + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) self.assertFalse(success) - def test_pipeline__exception_extract_aws_setup(self): - self.sync.pipeline_preconditions = MagicMock(return_value=True) + def test_create_move_account__failed(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] - with patch("boto3.client") as mocker: - mocker().list_organizational_units_for_parent.side_effect = ClientError( - {}, "list_organizational_units_for_parent" - ) - success = self.sync.pipeline() + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("alice@giphouse.nl", "bobs-project", "Fall 2023"), + ] + with patch.object( + self.sync.api_talker.org_client, + "describe_create_account_status", + return_value={"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}}, + ): + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) self.assertFalse(success) - def test_pipeline_update_current_course_iteration_ou___failure_check_current_ou(self): - - self.sync.check_current_ou_exists = MagicMock(return_value=(False, None)) - - self.sync.create_aws_organization() - success, id = self.sync.pipeline_update_current_course_iteration_ou(None) - self.assertTrue(success) - self.assertFalse(id is None) - - def test_pipeline_update_current_course_iteration_ou___success(self): - - self.sync.check_current_ou_exists = MagicMock(return_value=(True, "1234")) - - self.sync.create_aws_organization() - success, id = self.sync.pipeline_update_current_course_iteration_ou(None) - self.assertTrue(success) - self.assertEquals(id, "1234") + def test_create_move_account__in_progress(self): + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] - def test_pipeline_update_current_course_iteration_ou___failure_create_ou(self): - - self.sync.check_current_ou_exists = MagicMock(return_value=(False, None)) - self.sync.create_course_iteration_OU = MagicMock(side_effect=self.simulateFailure()) + dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") + dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] + members = [ + SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), + SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + ] - self.sync.create_aws_organization() - success, failure_reason = self.sync.pipeline_update_current_course_iteration_ou(None) + with patch.object( + self.sync.api_talker.org_client, + "describe_create_account_status", + return_value={"CreateAccountStatus": {"State": "IN_PROGRESS"}}, + ): + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) self.assertFalse(success) - self.assertEquals(failure_reason, "ITERATION_OU_CREATION_FAILED") - self.assertTrue(self.sync.fail) - - def test_pipeline_create_account(self): - self.sync.create_aws_organization() - success, response = self.sync.pipeline_create_account( - awssync.SyncData("alice@example.com", "alice", "Spring 2023") + def test_pipeline__no_accounts_no_ou(self): + self.sync.checker.api_talker.simulate_principal_policy = MagicMock( + return_value={"EvaluationResults": [{"EvalDecision": "allowed"}]} ) + self.sync.api_talker.create_organization(feature_set="ALL") + self.setup_policy() + pipeline_success = self.sync.pipeline() - self.assertTrue(success) - self.assertIsNotNone(response) - - def test_pipeline_create_account__exception_create_account(self): - self.sync.create_aws_organization() - - with patch("boto3.client") as mocker: - mocker().create_account.side_effect = ClientError({}, "create_account") - success, response = self.sync.pipeline_create_account( - awssync.SyncData("alice@example.com", "alice", "Spring 2023") - ) - - self.assertFalse(success) - self.assertEquals(response, "CLIENTERROR_CREATE_ACCOUNT") - - def test_pipeline_create_account__exception_describe_account_status(self): - self.sync.create_aws_organization() + root_id = self.sync.api_talker.list_roots()[0]["Id"] + root_ous = self.sync.api_talker.list_organizational_units_for_parent(root_id) + root_ou_names = [ou["Name"] for ou in root_ous] - with patch("boto3.client") as mocker: - mocker().describe_create_account_status.side_effect = ClientError({}, "describe_create_account_status") - success, response = self.sync.pipeline_create_account( - awssync.SyncData("alice@example.com", "alice", "Spring 2023") - ) + current_semester = str(Semester.objects.get_or_create_current_semester()) + current_accounts = self.sync.api_talker.org_client.list_accounts()["Accounts"] - self.assertFalse(success) - self.assertEquals(response, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS") + self.assertIn(current_semester, root_ou_names) + self.assertTrue(pipeline_success) - def test_pipeline_create_account__state_failed(self): - self.sync.create_aws_organization() + self.assertEqual(len(current_accounts), 1) + self.assertEqual(current_accounts[0]["Name"], "master") - with patch("boto3.client") as mocker: - response = {"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}} - mocker().describe_create_account_status.return_value = response - success, response = self.sync.pipeline_create_account( - awssync.SyncData("alice@example.com", "alice", "Spring 2023") - ) - - self.assertFalse(success) - self.assertEquals(response, "EMAIL_ALREADY_EXISTS") - - def test_pipeline_create_account__state_in_progress(self): - self.sync.create_aws_organization() - - with patch("boto3.client") as mocker: - response = { - "CreateAccountStatus": { - "State": "IN_PROGRESS", - } - } - mocker().describe_create_account_status.return_value = response - success, response = self.sync.pipeline_create_account( - awssync.SyncData("alice@example.com", "alice", "Spring 2023") - ) - - self.assertFalse(success) - self.assertEquals(response, "STILL_IN_PROGRESS") + def test_pipeline__new_accounts_existing_ou(self): + self.sync.checker.api_talker.simulate_principal_policy = MagicMock( + return_value={"EvaluationResults": [{"EvalDecision": "allowed"}]} + ) + self.sync.api_talker.create_organization(feature_set="ALL") + self.setup_policy() - def test_pipeline_create_and_move_accounts(self): - moto_client = boto3.client("organizations") - self.sync.create_aws_organization() + self.sync.api_talker.create_organization(feature_set="ALL") + root_id = self.sync.api_talker.list_roots()[0]["Id"] - new_member_accounts = [ - awssync.SyncData("alice@example.com", "alice", "Spring 2023"), - awssync.SyncData("bob@example.com", "bob", "Spring 2023"), - ] - root_id = moto_client.list_roots()["Roots"][0]["Id"] - course_iteration_id = self.sync.create_course_iteration_OU("Spring 2023") + current_semester = str(Semester.objects.get_or_create_current_semester()) + course_ou = self.sync.api_talker.create_organizational_unit(root_id, current_semester) + course_ou_id = course_ou["OrganizationalUnit"]["Id"] - success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) - self.assertTrue(success) + self.sync.get_syncdata_from_giphouse = MagicMock( + return_value=[ + SyncData("alice@giphouse.nl", "alices-project", current_semester), + SyncData("bob@giphouse.nl", "bobs-project", current_semester), + ] + ) - def test_pipeline_create_and_move_accounts__email_exists(self): - moto_client = boto3.client("organizations") - self.sync.create_aws_organization() + pipeline_success = self.sync.pipeline() + course_accounts = self.sync.api_talker.list_accounts_for_parent(course_ou_id) + course_account_emails = [account["Email"] for account in course_accounts] - new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")] - root_id = moto_client.list_roots()["Roots"][0]["Id"] - course_iteration_id = self.sync.create_course_iteration_OU("2023Fall") + self.assertTrue(pipeline_success) + self.assertEqual(["alice@giphouse.nl", "bob@giphouse.nl"], course_account_emails) - with patch("projects.aws.awssync.AWSSync.pipeline_create_account") as mocker: - mocker.return_value = False, "EMAIL_ALREADY_EXISTS" - success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) + def test_synchronise__success(self): + with patch("projects.aws.awssync.AWSSync.pipeline", return_value=True): + response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True) - self.assertFalse(success) + self.assertEqual(response.status_code, 200) + self.assertContains(response, self.sync.SUCCESS_MSG) - def test_pipeline_create_and_move_accounts__exception_move_account(self): - moto_client = boto3.client("organizations") - self.sync.create_aws_organization() + def test_synchronise__failure(self): + with patch("projects.aws.awssync.AWSSync.pipeline", return_value=False): + response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True) - new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")] - root_id = moto_client.list_roots()["Roots"][0]["Id"] - course_iteration_id = self.sync.create_course_iteration_OU("2023Fall") + self.assertEqual(response.status_code, 200) + self.assertContains(response, self.sync.FAIL_MSG) - self.sync.pipeline_create_account = MagicMock(return_value=(True, 1234)) - with patch("boto3.client") as mocker: - mocker().move_account.side_effect = ClientError({}, "move_account") - success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id) + def test_synchronise__api_error(self): + api_error = ClientError({"Error": {"Code": "AccessDeniedException"}}, "create_organization") + with patch("projects.aws.awssync.AWSSync.pipeline", side_effect=api_error): + response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True) - self.assertFalse(success) + self.assertEqual(response.status_code, 200) + self.assertContains(response, self.sync.API_ERROR_MSG) - @mock_organizations - def test_get_aws_data(self): - moto_client = boto3.client("organizations") - self.sync.create_aws_organization() - root_id = moto_client.list_roots()["Roots"][0]["Id"] + def test_synchronise__sync_error(self): + sync_error = Exception("Synchronization Error") + self.sync.api_talker.create_organization(feature_set="ALL") - response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1") - OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] - response_account_1 = moto_client.create_account( - Email="account_1@gmail.com", - AccountName="account_1", - Tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}], - ) - account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] - moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id) + with patch("projects.aws.awssync.AWSSync.pipeline", side_effect=sync_error): + response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True) - aws_tree = self.sync.extract_aws_setup(root_id) - iteration_test = awssync.Iteration("OU_1", OU_1_id, [awssync.SyncData("account_1@gmail.com", "test1", "2021")]) - aws_tree_test = awssync.AWSTree("root", root_id, [iteration_test]) - self.assertEquals(aws_tree, aws_tree_test) - - @mock_organizations - def test_get_aws_data_no_root(self): - boto3.client("organizations") - self.sync.create_aws_organization() - self.sync.extract_aws_setup("NonExistentRootID") - self.assertTrue(self.sync.fail) - - @mock_organizations - def test_get_aws_data_no_slugs(self): - moto_client = boto3.client("organizations") - self.sync.create_aws_organization() - root_id = moto_client.list_roots()["Roots"][0]["Id"] - - response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1") - OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] - response_account_1 = moto_client.create_account( - Email="account_1@gmail.com", - AccountName="account_1", - Tags=[], - ) - account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] - moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id) - self.sync.extract_aws_setup(root_id) - self.assertTrue(self.sync.fail) - - -class AWSAPITalkerTest(TestCase): - def mock_api(self, operation_name, kwarg): - if operation_name == "CreateOrganization": - raise ClientError( - { - "Error": { - "Message": "The AWS account is already a member of an organization.", - "Code": "AlreadyInOrganizationException", - }, - "ResponseMetadata": { - "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "HTTPStatusCode": 400, - "HTTPHeaders": { - "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "content-type": "application/x-amz-json-1.1", - "content-length": "111", - "date": "Sun, 01 Jan 2023 00:00:00 GMT", - "connection": "close", - }, - "RetryAttempts": 0, - }, - "Message": "The AWS account is already a member of an organization.", - }, - "create_organization", - ) - if operation_name == "CreateOrganizationalUnit": - raise ClientError( - { - "Error": { - "Message": "The OU already exists.", - "Code": "ParentNotFoundException", - }, - "ResponseMetadata": { - "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "HTTPStatusCode": 400, - "HTTPHeaders": { - "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "content-type": "application/x-amz-json-1.1", - "content-length": "111", - "date": "Sun, 01 Jan 2023 00:00:00 GMT", - "connection": "close", - }, - "RetryAttempts": 0, - }, - "Message": "The OU already exists.", - }, - "create_organizational_unit", - ) - if operation_name == "CreatePolicy": - raise ClientError( - { - "Error": { - "Message": """The provided policy document does not meet the - requirements of the specified policy type.""", - "Code": "MalformedPolicyDocumentException", - }, - "ResponseMetadata": { - "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "HTTPStatusCode": 400, - "HTTPHeaders": { - "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff", - "content-type": "application/x-amz-json-1.1", - "content-length": "147", - "date": "Sun, 01 Jan 2023 00:00:00 GMT", - "connection": "close", - }, - "RetryAttempts": 0, - }, - "Message": """The provided policy document does not meet the - requirements of the specified policy type.""", - }, - "create_policy", - ) - return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg) + self.assertEqual(response.status_code, 200) + self.assertContains(response, self.sync.SYNC_ERROR_MSG) diff --git a/website/projects/tests/tests_aws/test_awssync_checks.py b/website/projects/tests/tests_aws/test_awssync_checks.py index 6a141759..9989201b 100644 --- a/website/projects/tests/tests_aws/test_awssync_checks.py +++ b/website/projects/tests/tests_aws/test_awssync_checks.py @@ -87,6 +87,9 @@ def setUp(self): ], ) + self.logger = MagicMock() + self.checks.logger = self.logger + def test_check_members_in_correct_iteration(self): # Test when correct self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1)) diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py deleted file mode 100644 index 52619820..00000000 --- a/website/projects/tests/tests_aws/test_awssync_refactored.py +++ /dev/null @@ -1,320 +0,0 @@ -"""Tests for awssync_refactored.py.""" -import json -from unittest.mock import patch - - -from botocore.exceptions import ClientError - -from django.test import TestCase - -from moto import mock_organizations - -from courses.models import Semester - -from mailing_lists.models import MailingList - -from projects.aws.awssync_refactored import AWSSyncRefactored -from projects.aws.awssync_structs import AWSTree, Iteration, SyncData -from projects.models import AWSPolicy, Project - - -@mock_organizations -class AWSSyncRefactoredTest(TestCase): - def setUp(self): - """Set up testing environment.""" - self.sync = AWSSyncRefactored() - self.api_talker = self.sync.api_talker - - def test_get_syncdata_from_giphouse_normal(self): - """Test get_emails_with_teamids function in optimal conditions.""" - self.semester = Semester.objects.create(year=2023, season=Semester.SPRING) - for i in range(3): - self.mailing_list = MailingList.objects.create(address="test" + str(i)) - self.project = Project.objects.create( - id=i, name="test" + str(i), semester=self.semester, slug="test" + str(i) - ) - self.mailing_list.projects.add(self.project) - - email_id = self.sync.get_syncdata_from_giphouse() - - self.assertIsInstance(email_id, list) - self.assertIsInstance(email_id[0], SyncData) - expected_result = [ - SyncData("test0@giphouse.nl", "test0", "Spring 2023"), - SyncData("test1@giphouse.nl", "test1", "Spring 2023"), - SyncData("test2@giphouse.nl", "test2", "Spring 2023"), - ] - self.assertEqual(email_id, expected_result) - - def test_get_syncdata_from_giphouse_no_project(self): - """Test get_emails_with_teamids function where the mailinglist is not assigned to a project""" - MailingList.objects.all().delete() - self.mailing_list = MailingList.objects.create(address="test2") - email_id = self.sync.get_syncdata_from_giphouse() - self.assertIsInstance(email_id, list) - self.assertEqual(email_id, []) - - def test_get_syncdata_from_giphouse_no_mailing_list(self): - """Test get_emails_with_teamids function where no mailinglists exist""" - MailingList.objects.all().delete() - Project.objects.all().delete() - email_id = self.sync.get_syncdata_from_giphouse() - self.assertIsInstance(email_id, list) - self.assertEqual(email_id, []) - - def test_get_syncdata_from_giphouse_different_semester(self): - """Test get_emails_with_teamids function where the semester is not equal to the current semester""" - MailingList.objects.all().delete() - new_semester = Semester.objects.create(year=2022, season=Semester.FALL) - self.mailing_list = MailingList.objects.create(address="test4") - self.project = Project.objects.create(id=4, name="test4", semester=new_semester, slug="test4") - self.mailing_list.projects.add(self.project) - email_id = self.sync.get_syncdata_from_giphouse() - self.assertIsInstance(email_id, list) - self.assertEqual(email_id, []) - - def test_AWS_sync_list_both_empty(self): - gip_list = [] - aws_list = [] - self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) - - def test_AWS_sync_list_empty_AWS(self): - test1 = SyncData("test1@test1.test1", "test1", "test1") - test2 = SyncData("test2@test2.test2", "test2", "test2") - gip_list = [test1, test2] - aws_list = [] - self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list) - - def test_AWS_sync_list_empty_GiP(self): - test1 = SyncData("test1@test1.test1", "test1", "test1") - test2 = SyncData("test2@test2.test2", "test2", "test2") - gip_list = [] - aws_list = [test1, test2] - self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) - - def test_AWS_sync_list_both_full(self): - test1 = SyncData("test1@test1.test1", "test1", "test1") - test2 = SyncData("test2@test2.test2", "test2", "test2") - test3 = SyncData("test3@test3.test3", "test3", "test3") - gip_list = [test1, test2] - aws_list = [test2, test3] - self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [test1]) - - def test_get_tag_value(self): - tags = [{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}] - self.assertEquals(self.sync.get_tag_value(tags, "project_semester"), "2021") - self.assertEquals(self.sync.get_tag_value(tags, "project_slug"), "test1") - self.assertEquals(self.sync.get_tag_value(tags, "project_name"), None) - - def test_extract_aws_setup(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.api_talker.list_roots()[0]["Id"] - - ou_response = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1") - ou_id = ou_response["OrganizationalUnit"]["Id"] - - account_response = self.api_talker.create_account( - email="account_1@gmail.com", - account_name="account_1", - tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}], - ) - account_id = account_response["CreateAccountStatus"]["AccountId"] - self.api_talker.move_account(account_id=account_id, source_parent_id=root_id, dest_parent_id=ou_id) - - aws_tree = self.sync.extract_aws_setup(root_id) - - expected_sync_data = [SyncData("account_1@gmail.com", "test1", "2021")] - expected_iteration = Iteration("OU_1", ou_id, expected_sync_data) - expected_tree = AWSTree("root", root_id, [expected_iteration]) - - self.assertEqual(aws_tree, expected_tree) - - def test_extract_aws_setup_no_slugs(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.api_talker.list_roots()[0]["Id"] - - response_OU_1 = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1") - OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] - response_account_1 = self.api_talker.create_account( - email="account_1@gmail.com", - account_name="account_1", - tags=[], - ) - account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] - - self.api_talker.move_account(account_id=account_id_1, source_parent_id=root_id, dest_parent_id=OU_1_id) - - with self.assertRaises(Exception) as context: - self.sync.extract_aws_setup(root_id) - self.assertIn("Found incomplete accounts in AWS", str(context.exception)) - - def test_get_or_create_course_ou__new(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.sync.api_talker.list_roots()[0]["Id"] - tree = AWSTree("root", root_id, []) - current_semester_name = "Spring 2023" - - with patch.object(Semester.objects, "get_or_create_current_semester", return_value=current_semester_name): - course_ou_id = self.sync.get_or_create_course_ou(tree) - - course_ou_exists = any( - ou["Id"] == course_ou_id and ou["Name"] == current_semester_name - for ou in self.sync.api_talker.list_organizational_units_for_parent(root_id) - ) - - self.assertTrue(course_ou_exists) - - def test_get_or_create_course_ou__already_exists(self): - tree = AWSTree( - "root", - "r-123", - [ - Iteration("Spring 2023", "ou-456", [SyncData("alice@giphouse.nl", "alices-project", "Spring 2023")]), - Iteration("Fall 2023", "ou-789", [SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023")]), - ], - ) - - with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"): - course_ou_id = self.sync.get_or_create_course_ou(tree) - self.assertEqual("ou-456", course_ou_id) - - def test_attach_policy__not_attached(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.sync.api_talker.list_roots()[0]["Id"] - - new_policy_content = json.dumps( - {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]} - ) - new_policy_id = self.sync.api_talker.org_client.create_policy( - Content=new_policy_content, Description="Deny all access.", Name="DenyAll", Type="SERVICE_CONTROL_POLICY" - )["Policy"]["PolicySummary"]["Id"] - - self.sync.attach_policy(root_id, new_policy_id) - attached_policies = self.sync.api_talker.org_client.list_policies_for_target( - TargetId=root_id, Filter="SERVICE_CONTROL_POLICY" - )["Policies"] - attached_policy_ids = [policy["Id"] for policy in attached_policies] - - self.assertIn(new_policy_id, attached_policy_ids) - - def test_attach_policy__caught_exception(self): - # Error code "DuplicatePolicyAttachmentException" can not be simulated by moto, so it is mocked. - attach_policy_hard_side_effect = ClientError( - {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy" - ) - with patch.object( - self.sync.api_talker.org_client, "attach_policy", side_effect=attach_policy_hard_side_effect - ): - return_value = self.sync.attach_policy("r-123", "p-123") - - self.assertIsNone(return_value) - - def test_attach_policy__reraised_exception(self): - self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123") - - def test_get_current_policy_id(self): - self.policy_id1 = AWSPolicy.objects.create( - policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False - ) - self.policy_id2 = AWSPolicy.objects.create( - policy_id="Test-Policy2", tags_key="Test-Policy-Id2", is_current_policy=True - ) - current_policy_id = self.sync.get_current_policy_id() - self.assertIsInstance(current_policy_id, str) - self.assertEqual(current_policy_id, self.policy_id2.policy_id) - - def test_get_current_policy__no_current_policy_id(self): - self.policy_id1 = AWSPolicy.objects.create( - policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False - ) - self.assertRaises(Exception, self.sync.get_current_policy_id) - - def test_create_move_account(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.sync.api_talker.list_roots()[0]["Id"] - - dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") - dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] - members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), - ] - - success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) - self.assertTrue(success) - - def test_create_move_account__exception_failure(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.sync.api_talker.list_roots()[0]["Id"] - - dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") - dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] - members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), - ] - - with patch.object(self.sync.api_talker, "move_account", side_effect=ClientError({}, "move_account")): - success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) - - self.assertFalse(success) - - def test_create_move_account__no_move(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.sync.api_talker.list_roots()[0]["Id"] - - dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") - dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] - members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), - ] - - with patch.object( - self.sync.api_talker, - "describe_create_account_status", - side_effect=ClientError({}, "describe_create_account_status"), - ): - success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) - - self.assertFalse(success) - - def test_create_move_account__failed(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.sync.api_talker.list_roots()[0]["Id"] - - dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") - dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] - members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("alice@giphouse.nl", "bobs-project", "Fall 2023"), - ] - - with patch.object( - self.sync.api_talker.org_client, - "describe_create_account_status", - return_value={"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}}, - ): - success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) - - self.assertFalse(success) - - def test_create_move_account__in_progress(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.sync.api_talker.list_roots()[0]["Id"] - - dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") - dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] - members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), - ] - - with patch.object( - self.sync.api_talker.org_client, - "describe_create_account_status", - return_value={"CreateAccountStatus": {"State": "IN_PROGRESS"}}, - ): - success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) - - self.assertFalse(success) diff --git a/website/projects/tests/tests_aws/test_awssync_structs.py b/website/projects/tests/tests_aws/test_awssync_structs.py index 3ecb722c..3915bed0 100644 --- a/website/projects/tests/tests_aws/test_awssync_structs.py +++ b/website/projects/tests/tests_aws/test_awssync_structs.py @@ -1,11 +1,7 @@ """Tests for awssync_structs.py.""" -from unittest.mock import patch - from django.test import TestCase -from courses.models import Semester - from projects.aws import awssync @@ -161,27 +157,6 @@ def test_repr_SyncData(self): def test_awstree_to_syncdata_list(self): self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist) - def test_check_for_double_member_email(self): - # Test when there are no duplicate emails - self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) - - # Test when there is a duplicate email - self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G")) - self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list)) - - def test_check_current_ou_exists(self): - # Test when current semester OU does not exist - with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"): - self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022") - val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) - self.assertEqual((val1, val2), (False, None)) - - # Test when current semester OU exists - with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"): - self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021") - val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1) - self.assertEqual((val1, val2), (True, "98765")) - def test_AWSTree_equals(self): self.assertEqual(self.aws_tree1, self.aws_tree1) self.assertNotEqual(self.aws_tree1, self.aws_tree2) From df330ad9c99ff8149ef65546d219f4327ecb861c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filip=20=C5=81ysak?= <92109241+FilipLysak001@users.noreply.github.com> Date: Tue, 6 Jun 2023 11:23:38 +0200 Subject: [PATCH 28/32] changes for resolving security (#68) --- website/projects/aws/awssync.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/website/projects/aws/awssync.py b/website/projects/aws/awssync.py index 6939751c..b6252fdd 100644 --- a/website/projects/aws/awssync.py +++ b/website/projects/aws/awssync.py @@ -163,6 +163,7 @@ def create_and_move_accounts( [ {"Key": "project_slug", "Value": new_member.project_slug}, {"Key": "project_semester", "Value": new_member.project_semester}, + {"Key": "course_iteration_tag", "Value": "no-rights"}, ], ) # Repeatedly check status of new member account request. @@ -187,6 +188,7 @@ def create_and_move_accounts( try: self.api_talker.move_account(account_id, root_id, destination_ou_id) self.accounts_moved += 1 + self.api_talker.untag_resource(account_id, ["course_iteration_tag"]) except ClientError as error: self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.") self.logger.debug(error) From f5974ceb8da79efea6029ec70efb7b2135cfe491 Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Tue, 6 Jun 2023 10:20:57 +0000 Subject: [PATCH 29/32] Documentation AWS integration feature (#70) * Add initial draft AWS integration documentation * Add pipeline flowchart and remove create organization * Apply review changes --- README.md | 67 ++++++++++++++++++++++-- resources/pipeline-flowchart.drawio.png | Bin 0 -> 130607 bytes 2 files changed, 64 insertions(+), 3 deletions(-) create mode 100644 resources/pipeline-flowchart.drawio.png diff --git a/README.md b/README.md index e2561a30..7cb75dee 100644 --- a/README.md +++ b/README.md @@ -16,8 +16,9 @@ This is the code for the website of [GiPHouse](http://giphouse.nl/) powered by [ - [Questionnaires](#questionnaires) - [Room Reservations](#room-reservations) - [Course, Project and Static Information](#course-project-and-static-information) - - [Projects and Repositories](#projects-and-repositories) + - [Projects, Repositories and AWS](#projects-repositories-and-aws) - [GitHub Synchronization](#github-synchronization) + - [AWS Synchronization](#aws-synchronization) - [Mailing Lists](#mailing-lists) - [Tasks](#tasks) - [Development and Contributing](#development-and-contributing) @@ -122,10 +123,10 @@ The room reservation is built using [FullCalendar](https://fullcalendar.io/), a ### Course, Project and Static Information Admin users can add information about the course lectures and the projects in the backend. There are also a small amount of static HTML webpages with information about GiPHouse. -### Projects and Repositories +### Projects, Repositories and AWS +#### GitHub Synchronization The projects module provides synchronisation functionality with a GitHub organization using the [GitHub API v3](https://developer.github.com/v3/). For this, a repository model is included in Django. Project(team)s can have one or multiple repositories, which are then synchronised with GitHub. For this functionality, a [GitHub App](https://developer.github.com/v3/apps/) must be registered and installed in the organization. Details on this are explained later. -#### GitHub Synchronization Projects and repositories contain a field `github_team_id` and `github_repo_id` that corresponds to the respective `id` of the object on GitHub. These fields are automatically set and should not be touched under normal circumstances. Teams and repositories on GitHub that do not match one of these id's will not be touched by the GitHub synchronization. If the `github_team_id` or `github_repo_id` are `None`, it is assumed the objects do not exist and new objects will be created on synchronization (except for archived projects and teams). @@ -149,6 +150,66 @@ Synchronization can only be initialized via actions on specific sets of objects Synchronization currently does not regard the role of directors of GipHouse. This needs to be configured manually. Note that it is however not possible to add directors manually to a team on GitHub, since they will be removed after each sync. +#### AWS Synchronization +The projects module provides synchronisation functionality with [AWS Organizations](https://aws.amazon.com/organizations/) using the official [boto3 Python AWS SDK](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html). +The AWS synchronisation process only applies to the current semester and is one-directional (from GiPHouse to AWS, but not vice versa). + +Each project in the current semester with a team mailing list gets its own AWS member account that is part of GiPHouse's AWS organization. +Since all AWS member accounts have isolated environments, each team is able to configure their own AWS environment as desired. +The AWS member accounts are restricted in their abilities using a pre-configured [SCP policy](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_scps.html) that is applied to the course semester Organizational Unit (OU) where all team member accounts reside. +For example, the SCP policy can be set such that only (certain types of) [EC2](https://aws.amazon.com/ec2/) instances may be launched. +Such specific configuration details can be found under the [Deployment](#deployment) section. + +The entire AWS synchronization process, also referred to as the pipeline, can be initiated in the Django admin interface under Projects by pressing the large `SYNCHRONIZE PROJECTS OF THE CURRENT SEMESTER TO AWS` at the top-right and roughly goes through the following stages: + +1. Preliminary checks + - Pipeline preconditions + 1. Locatable boto3 credentials and successful AWS API connection + 2. Check allowed AWS API actions based on IAM policy of caller + 3. Existing organization for AWS API caller + 4. AWS API caller acts under same account ID as organization's management account ID + 5. SCP policy type feature enabled for organization + - Edge case checks + 1. No duplicate course semester OU names +2. Create current course semester OU (if non-existent) +3. Attach SCP policy to current course semester OU (if non-existent) +4. Synchronization + - Determine new accounts to be invited based on AWS and GiPHouse data. +5. Create new AWS member accounts in AWS organization +6. Move new AWS member accounts to course semester OU + +![pipeline-flowchart](resources/pipeline-flowchart.drawio.png) + +After the synchronization process has finished, success or failure is indicated by a green or red response box respectively. +Verbose details for each synchronization run is logged using the `logging` module and can be accessed in the backend, for example to inspect causes of failed runs. + +An example of a possible AWS environment in the form a tree is the following: +``` +root +│ +├── Fall 2022 (OU) +│ ├── team-alice@giphouse.nl (member account) +│ └── team-bob@giphouse.nl (member account) +│ +├── Spring 2023 (OU) +│ ├── team-charlie@giphouse.nl (member account) +│ └── team-david@giphouse.nl (member account) +│ +└── admin@giphouse.nl (management account) +``` + +When an AWS member account has been created for a team mailing list as part of an AWS Organization, an e-mail is sent by AWS. +This process might take some time and is under AWS' control. +It is important to be aware that gaining initial access to the member account is only possible by formally resetting the password; there is no other way. +Also note well that each project team member will receive such mails because the team mailing list works as a one-to-many mail forwarder. + +By default, all newly created member accounts under an AWS organization are placed under root with no possible alternative. +Once the member accounts have been created, they are moved to the current course semester OU. +Unfortunately, AWS does not specify how long it at most takes to finalize the status of a new member account request. +This introduces the possibility of there being a time period between having a newly created member account under root and moving it to its corresponding OU that is restricted with an attached SCP policy, possibly giving the member account excessive permissions. +To mitigate this risk, every newly created account comes with a pre-defined [tag](https://docs.aws.amazon.com/tag-editor/latest/userguide/tagging.html) and the SCP policy attached to root should deny all permissions for accounts under root with the specific tag (see [Deployment](#deployment) section for more details on SCP policy configuration). +The tag gets removed after the account has been moved to its destination OU. + ### Mailing Lists Admin users can create mailing lists using the Django admin interface. A mailing list can be connected to projects, users and 'extra' email addresses that are not tied to a user. Relating a mailing list to a project implicitly makes the members of that project a member of the mailing list. Removing a mailing list in the Django admin will result in the corresponding mailing list to be archived or deleted in G suite during the next synchronization, respecting the 'archive instead of delete' property of the deleted mailing list. To sync a mailing list with G Suite, one can run the management command: `./manage.py sync_mailing_list` or use the button in the model admin. This will sync all mailing lists and the automatic lists into G Suite at the specified domain. diff --git a/resources/pipeline-flowchart.drawio.png b/resources/pipeline-flowchart.drawio.png new file mode 100644 index 0000000000000000000000000000000000000000..ec7e59812ca502a7ea03d0741a079a65e1a940cd GIT binary patch literal 130607 zcmeFa*{%OGZ7lJ9%0jGQ^6Gh$H_lo=s&n<`FH=Beboh;L)`nx2mET%oeuH&uJ!(*bMG7ZVjCZD z5An0hn)*FW{7>YcHXk?oxs5-rM)8n^Fd*?A$$y8@&@uWU**DVQOYH+dD>Bc3hd<&U zJ!Eg>rM{nLxbv?{nA6YP53J?5BJJzmKKraD@#(jz+C*^)9Y@ngw>U*oADu??$CW*{ z|G1DmKgW3wi!NB%@&*XVwbCQ&pvIEruIwz`>V{edDYP>&cM3N_iav1(*iD$ zfaY=E&u`B0Uws3Y(!MI+cR$8#{qHCCo@>{tn0+SJaQ7G=4~~u75T~{3&WA+`Kn+ z|EBI+AEWvI1VKS1_VK6UK~VhstbdXM@5eX0H}}7_7D2tW>-fD6z)=SXl=Lh4-@dQ? z0}%~CL{R=-r0!GkBh~O5Nzr1)jo z{TywT;4cc|=iwy5>G<2q`zI;B5_r1r{V;_&SnL^ow@sOaNOOVKC9v@IT(vBG0^Qmn z@288axa;F5eAJykW5nN~X27xCTNS)j1NG?&{QgwK{?iivP7E@r|KHls{=CS2i0P-W zR@0AF14jPSs)jcyzETbBGxmEV^ZzxdfjxnJF1Y>)^uXKG{oS|-ZOo^j=vY5 zzo9SjYpnZ!fTez8uiBF2t*cs z>^1%#jrH!+7yJDS>F)O?q~r$&{i2ZW-T|S1dLiAvoq^wtn*G%j-2LW$+22XczA^HZ znzewx(Q2@p>ktCG|5VzO{9FM36GTCNZ?gCHTz*k|Qg6%uPp>^ceYvk>Pi-Jn3cDD; z;NX13CjNON55;wT7utyO!G~# zuhhtQ#I}U}9VW^KWXQh+!?uqX+J;*ZXn9LLi#c2=heR!_~6y|XwiqY&$#Pg!|e_9k4FP@^VJ*(*nxzd zSP0br_4_)dAH*f&hefikG})$Xp0Ku$;=BJ^>GR=a{>N_oke}PPpDqvi=9jOdyPump zUvK<;I`^Lf^7r4Z+x*US>GlopFSIuEI3y{2LjMaE=b1KD@PY^adW-X0 z7?eHsUZA}V3V!$rgYv1WzcwiNrJ6g1|#j`<@bh2FE|^+d~=A7`xiYbFcIEc_?6 zJUF&_o0^|9J)c(QAKdhOhB^NO*q*Nucl?FL`^U=t7hICR;Uf6Aa7pmP=VJBuzsTLb zQZcn!MoRXm6IOYxlVc;dSQ``sR-=nETsbov>3`Ef7=7H20E#Q^8@ zs|;kn%o3ESA3|z1Y`&8WN>t4t#hiZMsIn+R8PN=cUSRhbJ5G4eE$vXU!>3BH2{i#p3`OJj-DG&A641)lIX~JMX zN}PHd^H06?G3n1yn)|)`to8uG{FPb0(DJJ}{XBGk`}~aUTN8g{(;w$(|LxfnCq6SW zzwqh1@Zgh!zK+fPJSTi`=zl$Y`Wg2B0iXVamirG5LB(}MquL!%nsrkn%?MWnD9(Zl zNH6|HE%=Ky9iIW2UtCapMk9a21;t;*iu>&k;|wJA-tqoq`VZ9nJF?LKBKQAz$S;}D zpF@5i`QI_*TLFJDK|ha~{{VIU4n_Z*7(hAKT4e}wvgjOreIw1Xd6^X+e1TOM>AL<)Fz7j%PCs2u)T^QXQ zv7?wYH2l*nC*_?4K5^(PJ+ib1f0!#hR1&(tUK!cFs_?0^;n#^DdAe#x)abZiC2w3z zs@91s;T(d?w3GJB<2E~L$(OEOut8U;q^P9{$27~7JVi~7l5TXTbn(**@h~-IqhkeW zR8HAzZgo9IoMIAS@R@uUznWdBVgZ0B!UZtdyC6LWC-J(=yX`dkQvOI!?jVUpPVQi{ zO3-(kQ*_wvc9m;yMoQ}LdCbgba0&$p{udk3E&KgVG4Z<_Tagv?N7jqC74)t+5BM!L zyT(7b<26(8SSl zq)OJY%XmX6upHai$NL?cJY~9>Sn3v556>XBz1dKj@}MHArSy78$vBZVTju`E?Qj$6 z>@Fly=N>RqoIa=cMW*%(reN0!w(XVcK`-#OiFN7e9CY7sb6=p_K)2hZ^wQiXcKdDD z&}CvV?e=Cfgs+R{_9a9_Vn8o1A!YcR$TO+N`<5zoi=rVA<#X*u?g>-{b93(4UVlmG zbJ|L<6)KF%J$YR{sAmZx50451;R(8^gpMsv6CX>M)Z1t7eoHY!(~eixBXn$zaz_zQHilLZ>2z|Ajpm_* zOLnCVOxyEW8J?mCCM3R0A7%L>?{~IBTxxzpc;>Jir46YqmN4I-97a%`3oa8o2Szmx z=5#AI$z~GvhPZjfMr}U$7G~_3;(<l{y=KbaNM54)7C@uctWu?L%4U3IFKBfZpVJ7 zpK>QY7YaOS8A?dhJNaQx2W3a?*jXc@;bzBIkMz||Luh%=13PHtR=aY?jm_fy%S#e9 z=2}PPd#q6g+&^I&ZO9KbC+JG*f|x7`p;qQg2C|@3W>MV6YH?XzTlQl}&}JIvPgu1J z>Nb_QSIUd0LWR9~re<#3U3?VIkvf4ovmG3y*>9rL!$Z|k&*{#LhQJ@BN4JD<0e?9S&WgNhyET^_XO|9pd-0@qfIeOCd;aE0MO=)-@zpRGd;syX7rl*K9Jr0Yzy^`h;pq9+;d+b6*eAvL2~_lDJ=`V zebI?GkcaFXkl1ESb0c7a)a^HM-Xy1LBYIu8>3QLNe6>kXnH(1F^ktkyLpds*^3bf~ zVeoR|U@z8yXUd;W z2N^EuXp-KOIB4umq{znDy-KF#W33TnrN&3S#|$>cyQh7=;<0dxN>8WfewXJlT9HpEhUN{NH*r}&-%r;G0bWJLRSqdtW*;^$Z zvC<8febaDQ`N)JybiLW$6~@^f^VryGIUG8`=-uKoWdJryptzTATDim0O*w1)xp!tU zSTgRJ2}YR(VkH&fi7~FN&n0Jh7P+z`)RYOTmtY=D-89TnO^n^mDxSqG@K3=y5*kT% z%XSOb=u~ilQr98PYu~9Xej^XdBzRWJ^k6CwEhC3cui41yQ{j8@-5FJ- zrn(@ZQcMdkNs||28C*>)%&#*w3tAI+;?SEwEvC;sKIM-vj!+w??#?Wecv3ipr;O+Q zZu%@GVRcQ3mr6Lk=r0@H+AHamf_=t9E(o8dbUPQK>iZC_+elpgK;DK_X?rdrzz^`_xfjQjlE;L< zYZq>+C<;qOHYZ@c~C?>DLkn3IHnV)JK0xm?*F$dyi7!@(#%#iqO7()j?E7Q4q#Q8icu8-xt!@}%BqZq)>iY1Yq{p67!e_1W zaRT3?YerqAgpy9#=1RA>cqu|iy4v2!9Uf;-@!%Zw3F}9^Jy&|bE`&`_l}(Reb`{Vq zA;YTzy#?I$xxCrfTH2Ca`a1hul=b&M?~N$w&v|-}C+wxQLw3{d?Cb{2<=!n11?CbF z_q)GcEO(Q!n`aLg-|MO7kDXBCcsUj7Nw*1xZqp-}afSnvZUKk5v!W%c%h%px69f8% zI#70;HW-Zr(jj$YpjT-_VSRGm^o}$%>LwctsTk`W37TFK;-MvvvwMQs1e}A5Z23r) z)5g6}ZkZVg#jdyU`T3d6yo$g1s}x zW9%KLWK{QUD&GuyuIN?<=jp0E86&IGa(EuCR}sGux9vI6vp{@>V;OE5j}sn}2U=ou z6_Yo;*B~k2DZ_1jL+P^1j?E}3rhM~H*_?>sGdpDj-5nciDg=TfPCg>{E>n;Jb;pmo@5OUsaojcnDfoD^-j?L^}~Zq6WT!g<=A zr_CA8>HrG$PXP>%HW9F@V9u|2jCCrkOG$Uq+dVla*TXFA{9v;`9lcb)-7>9ia7{;S>} z^pca#nL>K|O&vZrA(r*KvE0NKeUOBj9*|)T_474)Tr{+f6V5@guLrx`@%>VJcv<)4lDPZYI$ zlmF)g;ak-5Th#Jf)bd-@@>|sMTh#Jf)bd-@@>|sMTh#Jf)bd-@@>|sMTh#Jf)bd-@ z@*g^CiGMkA_cLDfyA4|IVe=k^>V7YUSi4_aoV|yb_-`D8{5yr6-xU0PN87pqB}Gke zS~|g=C5^bv3>CQJHiQZW4^sT(nwf7`tqkj1DQClIldC|xz7DGc z9#qCQs4k?;(TU2^i^>^^YA3Tk%Slv~WrQh1%wZYa8)Z%%HI|-qPR?WrQW|n*j9EEZ z?@J5?V*wn~D_mMC5X+apV3{w$#xvIpCo&?Vc|)r6W(ccrQ0-!)TZfN!Fjg$$T~MnB z88Xl!iQb;(13VGFqD6fLq*9+2c~m)i2+3OwIIR@@y&C?V4gILR-Qak}?=2($9dBZ&x`%n}xpJfVwS<^Z1rLiNSWfbNubf?Y~dE)5x+9?k^ zH0KkSymVI`PkGX*88x3c{-v`ccFH}}9=?Y!wL~JNd@@e z*$m+wW0kc+;DI!yW=n z(Oh|iSGW(b0^9%xJP&-xB0R#qb)JAb3-J++_zHanIwPJyumUdEzd+xshiI`uIRDZ< zLo@?EMtq6jU-=sNXysFY1!%U?9Km7(-&!l(5j?05fDh=p;s)Vee&gkmqj6UFfbXF% z1P7we1hhoBMe>7q0eBu{4AD9p0S=f~zVa<~%7q--0$(_YzVIFO5AbF0&$;o^N^h0Z zIAb&o+5w&^G%p!=6Y&Me(aIM9AK(mR6L9?@A8G!<^DF*<#}Xt1fU^SVhrxFL7ScVyBP&k;9iU(6AMl8M@FR?i+I(CCxIrcm-dEnf06!x=vg$05 zp9Jv=%mKcm{xhH>uP(rcFh&An8=Z!D4e-0poijieB%Kb|w7N-bkOzPVbWH*CHi4H{ zod>)QFtNbPt4_eC5B>yvBE5#-Mmk~T8^pt58h51v5!VV(1b?kW%spdX~~Fzkbd z-}9-?B7Cp%3its&a}&TP(D9RpR=tbpi{@B#>Zg8Q^(*3I;4e-_`gs#@JfeB0AzrIK zdD4Sx3g8TQ&W3hK&%>OcKfwD68Ig!k`Dx{&;2AUz*7S1bJg2OkABQy z?ei0s70+580bM|E0KaI+HXz-ywg>!v>I#7UM=&6{TKhTKLIb?Bc+1Kw!1sWoRR^s$ z3gifVUu75W0sSpB4|sO{yUrKsxQ}Zq?N(eO*?;Q-g!2*fGVnL(QKY}#^aCD5Is$lc zr61A}hz7`xth((z7sNZDi&p(DG^-9pa`HhxbRF=y$|um5|K9ek_y%2wbOEwesJ(5f zU~iD!Tmry9=n25H2fU8>63q>0jbv_x<8zFUd!Qo#o>jLZn}XzevbYc4MtVfe9i-Pg z9oaKiecK)+gV65>?cp=hNodX=_T^-+x)8<$zhJe2@co1Lk(@$1pyjF$0f+1Nm7joi z@=FIB3ino-hi5-r2i4$!n^>FplcD2 zL*L6=mXPj*XIEMSKcN1R&IOrrRiGErWk}zywhOK!J+bl@?k#X{(GY zB?r9#whYEbIA3`Io&%i(@I${~SCMW-uxBIiI@s`)UNEN&=|tzlMz8X{>J6apr!Nuq zAPb1*V0+Mb$Uph?S9qAqhi_nmKLB&Vk=|YHDjEy${Gl6xR`3~ML3Rk~9DoVhtvGj( zz6G6w=mqd1odJIVPUw5J(V$B|Xa+E>a*pH+bO7`rgX|-o^*R`Hr9F%feW9^O&?R6i zfVVzq3-hT|#Mh{AuoEjEgWg3x#_Dg7-m>BcWE0?6GkL~De5R|Ovt#$I_I(GTqADmOel=XAhN zfNz*T*l46*;W>~?&sc_46wCz+cENBb*@r1mS&!<%8!rPBdcFI zBmP1D`#J_Z2R4886M=6L&#&tffVqH`74nh5p9UC!rZNt&0nZfxC*bI7J$G#~S=2i3 zsx7bvg4GwDoxWNheYI9n&JvSGVv?-uLR%(5wh<3v@RvmRU;UodKZmse(lKCYX2z*ishMEF^C1KbDyVfAB>d{L{P0P7G8 z#zk`kTp``P(td&YuCPIWV1HH6HQ*D$x@eUzE`Wn{$Em;xXoZ4SGoU>`v?E{|vPn(kvI_QU!88E5f7;H(n?g+`y4+W+Vq2@#itf~w}< zDDM-eKF@o3M^LHHm-2m;_PO2u>GRD%UuS;)xS;qSAoKH`J^1?$!Tmy+u8vA^87Pce z%XDr3);VzDGLo(1N~Yyq!z7fH0fmNc@4%ESUMSy%_EPRYZ*3iA23a;&NY3>pcVVP# zk(5jd2~X-!;_~Ly${my`UG9-p9hGCd&=sEfKBXm7|9S_Hs>&=dqg) zcazv3PVPAXo)E5RuF5#=QK<_->ErfrIRtfwGSK1Yyu?{FKF^jd_-t1%>NcP%2P!8? zrQJVHqdH%rnRoZ6*P*_*v1nr`ja-;fGB~zf!Y5vD_VMHTJlQA>eQQB|AoHx*edym4 z997()L3XGDZY%B2L;gzdS*>jEhlBNmi|}(N0NwC+t`Uesj2+0s0Vri_SEB^v3c}zX zrgMB`$8v`Xff-^h?>WLGyB*LwYMBceIlDP1CkVGAOu}!iO;a^nf_@IgXJt|p02+8!^=8&}iJl4`?7K}nU>|Xku4CCPtBL>;nSCbA- zEUDE=qz}u)K2s=F6?`Wp(2Go&oKL;$y~e52?mB9QoKjK6&+?6Du9*60jNtB7W6Gm=O_tx@)`o#!#R`z$5sCXx{9*?%TJxt|UA8BU5G4fca z)lE@uk>ytqm&S$4wy#&G>ibJflP>Ytl$TrJ?+8t+hsWy{#ZcS84ACKy>Wo#Hyt+D= zsnaD?dZtFpg;ib3Svs~yya%!*gNq$)&CC_H!FX<&kyrqb{ z^Kv5F-Mp#K@I4>Y>#%TDxkKz~>i*pUI(cVQS!G{_VPE@v)OYn^ITc!K7X@egj3Jv8U`6APTHnu%*z3LjNPrn5uR<%+j+CIOYq`OAuzUw zHWbp?wjWq<*|dYk-(rS+4CTEX>$aUO$G_!*qhr?4pBg?c0*jc#BOXsjROp6*81OVR zlKT|5`=wy^hp{X9u7@i5-8|^dU=No#I0>4mnrrm9Q z?VIjqE)RJs%GWV-4d%p~65~Q~{XM5%nSBXPi$vV1!{cH0Ndv%>W!=}0gVOJwIcFzV zPbkaUK&4iF#%E_Kcey?y>YK%GxU}Mo2XiYdaT+U>|AA;TXCPO*#1B%h? zds8s(Idu2p@$j13mLqlRez<8nfttI^bso3RgC z_V%i)UGNmI__^$xhrdz%%~kbx22D5=);%9wog8*8J=u}09F_-QFA{j_RoPJAoka?q zq6KB+$61tfJ`-JR;KP>kS%NydIG{(kZy|`n?j)~y*+DNcho>$VvK8>eW-QANH=XnG z?r4tpx=q-)@D!AmR{9y^Tq-?1l3P{WuT$(EYzPUeWdq3-sd|V`qjJ zks?o*gW9%Zi@U?hqbpO{8~i(1H*P7pS+tUEw^7(?ebjD=L|Ky2G~V z9M`2?rqLmq26o)0p8G|6&*N;OEYG)RrO!)G-xo1Pf4S%(;Ta;Cx#ul0EMm zs_(AaBVTx&;nK~meA&?B@U-Q}rtm2B(W3HJ*SLsn8|1gX!`!2y;gXWNN`svm`FzW+ zmOyh6x7mhlspK)aJa+L^PHLZ6*pa=_ryje}F13|6KC*vGcR2Z-Yq#ZNTXlzpxIB4G zL}tWDl=>)#s&*j8<^kBG;&rVqxxWunH^a-_-Lwr!o&V$q25G-i4Exi`blU=9i&QPxU0jAt+_k}C`8q=;2H{mK&qV*!WFp847mQk(kRrC9YCxnf7N|;oih1dR=N!AjyCe+?) zFE4I(%#oHJ%!SWoyI5=+0Nf&OVgvf+RXppN4NJzcC-M%eu|7k=U!7h4n1fA`i4KKPY8XZ(f|Cxe`Eps1giRhKV9& zOubE3Qc_sHk(dfnzZ&O2eee z67Qo8+*q{wQ#a4KXQnn;TyDatgW6O_Xak#NpepZdijkuf(`8W`7UMEuL%jC0iKire zE89`4=}0gr{9;X@mihQ#9hcGbo#QryB3&N0D);oH>i)RglWCv9F%N)d<4!U75sK=S zvWYh!K*I4>WJYDM(!*3Ih1njj;pL&}DjSshf})p}KF!$$bD(;azSPE3K6yj<8ZoQE z`11iOajl0lVaztajXBm#qoQwLM7_Cd%oyc9W*zsSujc6}_!!r+J}-KzwO75?Gy*n} z-I`X@bW{@E$!NA!;YO&fDxqcW0G`S;AigQ-7vn#W%L#`M zD1sgo+JGpC$4#7VUF{C82X58p>8_hCy%}D-XG~$_zxZ?u`U}c3tqTPwAzFi2-4d-oGA!Wqo`yq8j_}`PcaJD}Xn_dl4d^LF%X@!5 z$oBvoOU}igwuTvQmgc^3E5_w9Dk*lXG_H~1RFtE6CDq8}(BV109YP^3lO?9I zhiY@_7F<&}*M)z+Qkj;$s+GDy&jySV$+9>s*OSSn+*WLNpe1&;SwC36c+qemPfyWs zE>V0bwDgLZa9|E2ErM^1ZjjvNc1P~a>Wdr%Vdo{JrW9h3a<&aPoYV!V*1*jPo(U?V$d#T2=zT4PfvYCjdxG< zupdGJ8qvI@3wZsEc-GvdULizb_^Pt@EEXU;F-?l1$x$fyZB10q-keb6`_K;M$rSQ?bt2h=|>eXz23_N#(bC;%3R7MFL|}%B*UO( z(Hz6FyvmAOU35zIng~DZEM)D_MR^;QaP~=eAn3v_k^8oLnDF~-+gXkIgLNOnY0JWu zMw2?^7C6#NmU5mrdK{}i*R|-1HtrD6&v~^(Jjz?<3ou19d4` zNRir1G=8s{(l%(rknyI?>t}pw(yegQk*BC}N6Jz6g6&F#(#SEg7=!i_ouY6!S-AKN z94D~L($Ykf&zbF`@H{vk&5*Y)T&S+#$&2N)gX0~}qLKD}gliL%JWZS4qqsX(c!W zhDkKz$|DDy=L0p}Q)Jf%Ir+-lY`1T>lC;<7hr{VQgr8j>LS3Z87<%nr4hRKwK$UtX zsP#>G$OE;XeCtVX4!n906S0!1vNvuDt(g6p^U{zHFfEPgl5h*3L&tV1p^xbCO;EFzI8IO`VDrK4Xg2%e2SqM4Q~B zq|f_#lnDHAWJ+~+HHu{1YPyq*94nq{!#*Z`QSce-(nGGFZR`vi_JK3MsI75oE@$t| z8rehyLHM|k`X;Zhj14iAi=D~I#NjICn)-_Y!*DH%&GusYFh}yJ8DV2(p#B;DD)dfx z7O;xC$0(~_|q^yXi$1z^8 zmo?p`Il(o%?gswd&nq~%@$6V~&9P2&Cf>1=s3UapD zxBEGO^Ncttvn0hWo}IYgs-jp<`*cS!jcShM3}+@nHqVt;d2SBn!`|LY_^|hnI4*AO z3vo=xB6a-4-t2Z0apse@Qeyo^gM(q-QshBdb$9odZ2KrxnviK=f0G0eKYN9Hi}=Z^ zSq(L#Pvg}`A26cX6|QuFQ*^xSt{YpPtK*wWG+;)#K|`wlRl2)Pl|H=F;c+0ilydtk z@+CMye_cFPe#WMjncz-z3y4_V;B4X?_BUTmuzXH=Q2(?!kEZ*Sp)gAXRX75^HrkN6mw4=xw6N>LyUhf_8Lz zYpD8ks>YG3Ul1V^Ao}TVhj_!lxg`TxX^oa&?4wBeyWW?=>rKx_qYjb%=CB>1pjcyy zmUCnbpT+~=L>To29Px%~t8ME-cm(ba^ufK83%}xn#NG3jh>2=<-z!PIlv*y1Du8X(CdC*8HHj)1nnC5*jN2TPTkGF1p_G%w6Cgu`AgWwBYx(kXWjD>|Msu>X;KU6`KSl~%*G)jbzpnsN=o zFtXt=CvJOkFlp|xPDpR(MaJM@J>&6oYS6Z<`k-O;DG+I^#uY7K$Y4~-!l20sB5BTq z&cIUZ+`tX#h%TA4sp@r4L2-RidwWr~6rJq^eIj8c*n$kG6BE(jQ{Pr|hhdDP2MwkkJ zAscho%_bbrv>4uHf3}?vR9%=s2z$_hvz$0=hyQDOYRK)jT~B&~U68 zL<%=UK*guB9P{DTMl6~n!_wzbx-{Imk05z6k=i*e0tv+2lQ3}n_%`eG5$Qx)PK)c;wa*tijP!==GtJ81%6EH)2)*%^=Ibuh_kyEP;H zs+bhJ3r64>EaT4ORliU7eCLP9kOzKY#p;-GcS>H`{zRWT;}#z~U@~Eew^&1&cZOdM z<{?eRf_C~cH$+;xJ}Rq!f+YNkECksuTxC0>6F z2ity*x6-)W?b5{xX?|@U*PHB?Y~2tI-e^76Hq3Of zLr?{9B&-oMc_YE$i+-W`>tVA`u(KOt!#?0XWe?*C=RymZ*3!?ySOp0qp4q&m%A-cH zTWeoZyWW*w$4e6Oom%^uqX z*N3>9kLN=o)uVA* zUX~(mFK$)TR&4CsvDT_P#xqYi$JAyv&*9$OJM(p>2lwI{)Po>8%Yt`zVifyq_PG#? zXSAY&$p;*nAK>GQ+>z6;sD=jQf(r}^Bh%gyi&WV{frNxW+uW&JLQ*cH7?}F~uC_tb zu`(SBI9GGT=y19mY@dSj-1IS#+heMim2reHz{N`0^g=&-y-i?QE@4L_wXVV>2#|3G zrx;#RJglV)9x@I;8hhh1g|%sh?f!aD8prLJ{rwP2_aVH8IpCA@!yk23I_r%A+o&*nhxXhkDJCx!5Ax>%evWr#BPLN!H z(Sp~r!jX#?J?9SKV)c-ja86E}p3;xo)+%?S?&#{7DduC)S@qV3l6!hUjBBFsxQobN zzLMMF`g{h}85_gIz=<+wSk53=%+kuI6j2aAgmyi-oRL>kQ?&2)b3??;9wJXP^Z{P2JJRq=_=bD38IWH&(8legLw?C8V=(*bSb~O^Oy41c)9b~j7*$q%S1nTo3 zmmzN3j27}*>5HWt*GlP-t$q=;txu?%8!H0<=cL)gmI@VInf`V)Q&~#;K-?@kY(L4B zPy~bmB|Y%H!plhH2UO6c*J&S0CL|sxo_(PcgE-7Li-z&X$}33@D-MvkVKh%S|YE0_dAaGDkhz5B`;b^xuy%EGR%K zPFOickgA3-j@o{dT0vSHDg()jK}O?4D*3xaYm3_14)*n&0M1Q%6A#(V+8`x3(1NF4C6 z(n|AsD}&mAVx%9!7K#&&(%pX0Z+%&ASEh@u?@}^iDsCB+o_+Q|%SSZ7FWgW>3Q+p% z`K_gBH=^uA*e*ilYX}FYA3s-_`PX<3rOO<=+uuZWxVcjqxR+NXsc2r)pFLlj?XCI& zulyNaBeAS**&b3;>7S&_*W*Ju-4|(sbHMu(l*5JW*UtiPg(*$2Jee7Lni9)LD5*XhUeSjT{vRiWo;rrIEzb~pjNGb9D}ny6*ojAxon!?-tC zO*yj)C63+_j!;s#@`Q<%n!~kVeNee0q(Z}S$B0+n*9}Oo#BSAL4xqf!hLXqVntbp& zzw*%bB9{H_g60h=PZrD}PjHP>;ed5x);LJWwG;e}C&_YfLoaVowy7c6{umFH9^`Mi z7R_8QZoq$dy2_4=r#HeGLUB{S`@nqnR_VoI_7R_<@&bX+dudQYj(va|law z2#67qKUY{2NUnsm@V6#I8t04l(2EH;;iI?r)8&n$KBVTYnT$$+BF&F$XBN^h-kRs< zo;{05^JqdW8L$s?$N?&t`NsCe0~zH1_Wm6r9U=mm^i>c3U;*ct{Mg>qbk1|JEc+r??Q>HvawuPy?jFcDVZ|r5ei^8s}%swJ4;G& z%Uk=lTK5lq|K)1u8z1@*Y`|N`wy8(1wgSq6wG_%^iw|B{?ZjuB+!uw{jkrLmsr*H+ zeAP$$7iBlUm0o2mUT&r!rxeIizw$T8)h&mz1y{*DTPr*pieE*Q)ZX%gWC(1J5Y@&O zbb1A}NkiB?0T^D0x6M`8(vbAak20I8zPrI=ws_kSq^mUyYyhU$QFBAyK-P_F-i*M% z!xv1kV5z}oI>cpqU)zHY>_Z;c^dfCx{2{C%p74qDF_12cUX;D(M&LbMMzAK3fAK{k z*iZWorPbE{%X3i1yIP%A30yeR{-WWb#B?Bm^XI5PxbJB5%@u&A|Hgj*{5fuVL#z#< zRD#S$O$4ukfO787k-z4aPrLvQR$I6QPiBm-NOz`~O-8S;X8NV)sr7&&K%rmNBWS=J z%AuF<=LVYKr~H-=+yNvjFI(oCbjIc*l$w9QCJ!_ z&Y5KJWAxI-?_c7`QaOBm$)iBE_5sD%X@)cv+i|4L=)^21KT=|Vk^-9MpBlU`L~vs5 z+1HmtBf%SgrZpmbQ>(d-+E)fevC~<|vz@?>oTM4!QH1xlL42}l(~9BbgGwGrUQ;Z} zY(lHGsQP)~HzyQ3map(@cFu88W7Gq(j@Yz^rJ_RYtkm-AyiHBXe)16fJC=1&)f{<0 zFa5Y%_X@k}cL4#KaRk~kc-PiC=!2>Cgq-Eq6*g9brL{@veLqQIAZ^VUhJV?Mvn;q%1L35|ByH?Mdgpz~t zm3Z)~F4Nm9nXE5oEUVALG}ma~DQwZ6>)9aP+QMv*d%%#8lyapxCZrA{GBlG$tGox+-XmS2-N%i#O@b;T3%fV8D}rPs-_xkMmj~l zIpfQ+-NmIVIgr{4L!X@4c-)VCo7Y*9`&Pi!9(<=eN-nPKMw z)5`*}4|xpb5q6wOAHEbc6ud%Pa=Q-~-XJbElMPp;`v1v$@31D*cu`OpMzNqERxEVU zv5;PDklsi_0wDxzDybx-K_Y@EDp&wTQNS*?QBg!hR76KbP!J0OVnIQIB!5`hop z^3XBgkvSqRk|k!LE#VfVfx;4pvZSDIFBInsdHKa=q0JI553w?AAnQXB0mlf%8Xi`L z!AlX4zZKvzRihws(0DwKMP<_QUbCIs_%5<3Z` zkwyW(n=+ir2EkQfpy*97hJ%DtZ7LcgL?zb?R8#@@B@&dA(+NSw8~9tL57#r`bc9qP z!G%T{A|lK%Vldx?wgrn3$|wdDcf}9J>s5A=oEs5hi3k?PK&w-e*(3zPC=gMg^H5l# z!^~QQ3?E{qMFivMAnKBbV=?qmbh0Lni!@@*5~LyyrPqXE@S<=T7LUa%W3gsqlqdwN z2D!E%0E=ykQb7p^a(P@FHqwj%^P1FhYZzZ5vqEn`3uQ2mjl?FfY$V9D2{pDQvLaRC zN{rGRDN_K4m6-_B(LlBoAtVGPu`*C3m4*wNlriL7OsJU55@M7g^bjI1T5Yp|--0oq zAtg4pku3xc1vM+yip7J}WDe8_lnAombYTiADTXKH3wa>Y4UZ&7VLw=i*;0KR!X!YOtppJ0LSic6Knx&8*%{K1XnPFYObS)POlT}-{eaX3pD3=f(rLL?ofj8K3CVG$2UC5ghwiAbQa9YR7ylx zG{GPxBbXSABud1$sV!O-u-_7xJc2nY1|$x{ZMLXjgi>R}K<;lSD2pv;;%4 zu{e~OL=x%D!PrQNRxBf+!<06a8boRWyaN60!lG$hBQcz66Ki!!kOd#XG0BB+936@P zM$*-BfWu?tNDQ9OhIpAB6G=v>;~gqlHu|H9}9v=uv1;cmjfAz8;ziA4WowF-ka%6-5$78!$38 z|9fzHgob5^L*XJAc48z5i!+!kaDf6~5Rpzsf-Zb&ER-%TiDOzokG;t6UE0EUBpARS z2+G1$Ix`@<>IiUf!Qo~qM~^z_ zrYZ#(Ef;ug1ZZ9iLW_m~*NRg~$ZR_wh2xUN;bd&MhQtbo7zv%IJXs_j%iv46 zN`+cUBXEoi0n)|>NjN$)kt9X}w$6lWU;2D5AjL-|kNDVwzt0dA0f+&j` z3zEx8(7}Q(ci|+9P7A^zIe=~v6*?Z0DuZ~NTu;D3SRztMLbx(L8YDfLl|&Ya5RJAg@BR5Od6d4Nt1C{;%FLD3L4beR7eR7$UiK8xEN%M+lg3c zJwycqVn#T-9whM#c?hgNmwrJQ9#t`Y-*#G0jf-x zB?Kx@r6p)ddJ*8adNM53t|V~GbRNj;&>`6jGdCJ9AQFgj&;SsHMc7o)YCR4I%3fgO zRD5Ho2p?@0D8t2eF$ZqKL~A4v9){xqUj-OTgi*9Q07_s{gm4=ht}>A@M584}r?PR4 zAxw!DWEbiAiVz-A%;H$h6s{hn=i zm1uM%NKhe#$na=GNN^~W6d@(hMZC}`D#^|R-G8kn35`IA0>Y7msxzS^0u6@3F@a8r z3_w??T)oK7r<%iqk@`3T91GfhqL~OiRZk3I(4yl&JxMSgv?n3Rbp_9xnPe*rv`U2d zB1>RF5|nBsoyx%wBwV^31qW3?3_@Epi$xEMj1JL3U@f%h1)vlJ!vs?Ql;I*P3`k@| z5(v})lohG5ODO~x1t29`fI!G$;5a1~Iv;93OjXB-Kqeu9pa%(ba4uVgq!KtbMHH9JLW7Qx96XIc)yJsWKnJFnKpYt} zQf&%VfNBe(a9tQ97#F7i127Q5FdN8PGDY%0PClP(|BiQx5Cc3I&bEYy0L2j+24pef zR0xd~Aye|H8l;>d=MW?|4mE;qMSw=AfI+ zjR=#`h*5YcN)BB(D_TWSm<#~ybY@`~5Dvw3VN@7+7;GxmqM;zwERc%E6Ea1yHqd&Q zLSe0X&b5Qi-a=Cto=1!Vkz=$F z7?CCy;B0YhnvtkN$$*d&L)KDB26-$~rIAAVEDUS|D;5!BDS9cA5`*R8qe4Iev0ZKj znme=rn>Z#0|GnxAL{M2GE}jyLG=@lx$QV#}j4x3@0IH`6(9B?jf+pfJ331X;jRpKv zXpfa*z?TGWEJH`p#K!7$W{i{y!wW5H3;_*991a)2XSfQ~z|yl}c$k{5keKv*q(%o4 zAK+wrY-|YA4(S*`%#q8(qyz?!PvDRQY^H%L;DzzvNJwUt*dmw=K};k!mP{aHQ6Pxb z2B@=zBvMEOhDfm)ujX4pA}XFv05pOJQgcuW0~3dhWJkxM<0MRk&7`z}EG6Ld21SH; zBs31P8pWZN5|$cf6qAJ<4L?L0fwNK6I-F9ZjaEkk>j0m?Bm_s`WO$k)#%$zg`yT>A`omI zh8g``@<3>46e&cG*@9>S4BUe}Dvoaf@IZw@QZL2~a`~_p4T~j#hZ*S_n>^ZpmPeqF z8VOFI0U2k|55g1*hLwfTVhx&TO*qbOpraIIAm+)1S}TAyJ~&Id0aUqzOJGqL8)#D- z_JdL(WWpGWCMFyc0^+@qadA?BNfg2NRYZyud?bd8w**J1;qdPgG2J9nGdWZqFBHNu z44oB&j{y1YAyyKRjD(n1j8K8X(a3rXD5H*p1v5kRcmgB@P?1(TC)z*_!846W84RSk z2*2w*u_!i?K@r+?BC0@dfHRl`LTr>0B=xa4(J?p<0T?}ijRIT}FBqw1aPSy0G7cn# z>Z4!`E-w-*MOu(jpyw*%FdQ<8VC2B?HZ@Bij8R4^#l(6b+4M(95J|Axcb-vO$(q z1rti_z@b2O0;DYzrH%b=b28DGAh;Mt70ARS`uCz#p$cIPH<%X2#K=LVm2e;#8$j$$ z3^Obaujj-9>BazB(#J71a<)~@SAt|pHXYO=rm1-_4n?jo6SaB-Un7P?`~}Ra*BD?1 z7!Ft&AwA40L@7XAcPx(}0A)hdbXr`Pkj&JFV2wbar5eoPaS9m+RPUpZ$VQEZ3&Onk zpb!8AloE3!A8C%p!eUvX2#SHPkwNkTsCdr^F=N9bqWB6ife@--D45b9`R3|^`OVKj#C7N@94uGGLqqKHhb6=sT)p{y_x1ea1^ zrL?Gtv@i`#U`2r-+*k#$fRd3>;e;3rAzBp+D%pc2NXXG~S|SH-klLhr6M;umMnE?X z64D^}HxgpOt0E#okGC=v3J_XOf!jnT0Yy&_Rq>^i7-$6W1o&S#Fo`O43}tj=1l~Z^ zL?CcT2^Jbd2njk=X0#bCF~%vZAt*M74}L=pQK+q;U>7}H7orC_H&QbanoAyK0Qr6_ zF{ElmnWEyD6mX&85isx;q8b)VwAf%)gJcY@M2h6<@ltJEFg-?LL~A1ia-$B0Budrc zb~Q>tkR+xA;UuyY>Amo z47Y-L2q3T1t^`Fk(Xk>N6;#OpGG zhG1aD4CMuDEFj1zS|pYdSm5TkbV9J2r4Lrak?dG~q+NiI1pWs!#GQ<>F(5etrBzrt z!6+3Tj0MN(Em5>6F+zv2;3T0K@DN1l!T&&g$}qeNlyBm5?a`44w9#PXB1524mayc& zZ=gbiqPc?DFlq>ZK&qH4iLo-^R+R~Bl0gz59HYU@f=Mwn7(N_<#70Mj!tDw!8>5q> z#Wa|RN@VdkvC0r&B1eQ$aFQ6R4XF2ms9+5~JVcMeWBDAbiY|odnIM-8tOlGTv@V0i zfRk~+duSb8T8IW{sAO`iO37s_1Y{#94`5fpcnl4ZiN=RBLMe81s09O(_~|kbG|EJn z6p#rSPsfTul_4x2Z#QZ~SXiuCiHNqrDKSbcM`b{9>?Aa(X<~>2iE*@0K8(zkskBN! z1C*exITc32!PLM?&ql|pZNvs%kv{I_JfOvbgG)ftT1w|_j zpm+e1gchM7QA3R}tI1fYK?2?o(Q;lS+#pi$m?kLUlL`vbSuHW)p`k!%fZAabqqv~% zg~3c==^|rLu>dgbJdA=Vi=stZ1RSJMO99+c&S&DKdW#e(jlo=7M5(ECm3CBT|3ICt^M?iq2E;53q7s$de zGDIX){t!5+wA3(7sD%gWP5i%AG82u(Oh*D$8So6)kG?}cs%ZYBY9(rTn662!F9zdRc3jpK@7xlBstb7Hh|BR zX3%@)&#z5pqh2Eys$^y*_yh^QROkVnQrV%$;m`noj&BfuU&Qwb{yK51+-R1^fyclT z-#pCkQ$Umz8S{&Oe)xDlu~eCS^nq&J$C`Y3(0SWEf)Q{vxu1_MX__cOtIU`+q> zrqpt?+4k4Ph%ILQf4s;4;Vl9Cn(_PHgD21oA>N>Y3lJ(R#*V}rA{O$eO~V3>4?6k( zWvBc9|8$x9>qPzwvYO>$K>1-hu}Z6gQUPIFmFYXKDvWyQ3mPc$r?-4ZQH37D&i^Uy z{vUw0=pX3!7uq6Fe*yphhG_fOrT_1Owm-1(e~Gqm0A~=WK}6{P9c!Bi-+UP&fmMjNcQm2ZBm}aybXdwR+ehVln0=Vg|fL($=BoKEpi#31Zx#>Hv zmg;o|EnvM6GyXpic~R*9Mm!e@tn5^yUT^mMuTsVL?|>Kqgc&7U57ONK!!zH%m8ry0 zMnJ6p{PABuP^cgg2@+D~uA80Jr;leJ&^0eCZbAFD0r$t>{`!qePxkg*-;ml6>6kkF zNZl~XCKrv%hJos?%OfwHyE7qg1G7Dh-oC7Xd2sK^)#!~OtdwD?^OdqMfp4B9M*Cxt zpRV7ZiHudeZq2Rluy4OV)6}xHCOf-hc2iBP3VoEamh9xz*UQr}d8vzw1AFcH+uT6i z$)<$GYtH{Nbk?GTu4@Z1aCi69F3Jgxy>Gtt)~t0$Uma&{5G$Tmtgl8I)`k7!=ft4D z-UOw&0M`JLR=78R;LpJeeBRzS)nqq)eI47Kw zQoHA)l;C(ILS#J1@tw+o@W6J=gB{bBw=_ zw0aGgpk>?Qb@P6l&24X3Of#7!<76W)OKzOh&7C{NZI`vBDjWH#Y08lk8~jjbEHCR$ zM5rDScha34_ht#M-u13NUhe%bqf$i(T1GQy4W-mVZHF5Lw`R$(qzuc`!J`AoMPu=M*7QOm1Zs`2^qkazb z^Mw0)WoR79iWYl!t?dV-5IqMv=u1Cj`Tf>pYaPEh@=*AZsl2XxO}*B0iV5*=35)Na zwRN<&9y)sN^z7&(MRlu<`0-O5lY^)(POPw%v#0x9L zd)P)^dX4Mw7r%qe{WzDOvn9>*_Cz2z-`>X=)c406JeMuAY{&f&-##2o+{8KaGXG4~ z^_g$x6=a`zbM9Ve+wJ}4su?d2O9xa7g!9o?pIM+&b0+Q#mmII_a7P8t5C0d?N)FoP z>XI#b-gf8F-*Zn6-523)YQB`}2YY?$$&f-Y(@KX{Fc<2~tCMrv8loSZ77 zQ>H`f!PB0O;NX;zj^{o% z4`RvYd}%NA8%#}~p!;}FUv?T+@_6Og;BUQOfgA92+rF$D+NN_$=D&I&13zmfcusDH zAGAIhQxy3(X8gQ`zFr|(0MH{BC!lBi*eZL~`TB*4y40%u+wH%jK7xC>-qZW3EdAFt zWj)UV6yLsn3MeYNtH>1*3+JD)7MwUr1;ZV1PRbi_CGXx*UF{6kzk8CmzLTs z;CGK*=xdb0FK0Za9@+DcU3@>EKS%8kVU2V$-#N6|4|VUPF1O-C?Ex@NuTvfywr|Po>N7t<>F+6=k`y!m z0EJC`-OK*^I?2Hk1_&*W<9yx6emU zcT7H??u1$$p{=!`-~0#A`tc^m70~u-+)MYuA2TfT>a)HJu%eArbo^(7>5?lg4{_Di zhq}cleb#aFkyph{4~OLp1DoDJ*?BWrvghcIsi)QR|Me=YE-u;G+e$*XKTj~p-R1d% zGsPtUYFSZ3#$u|*E$O_8J`wh1a$@)G9aWDmEN3}G#75Ec`CSqA0218cfs~-|luL&@ zNyTga^^ClI0QgoE9xC_&zLRhcOS(JZu}w>2RU1jGhM=#EkDW)NE_u9SRE`&E<;MKd zj_WgX&%8c;$ZN?>%8H=%t`+V{TZ(#z+g_tFKQ8mf`GA!xIRJ7ot!Fj0KRz_3pMQPk zO*9}UX}55IlFTywF~A>Xp4Yk~@f`l$B{$ZR23_%kD(I+g%xPe?hh}c%G1Wr zeer2b)K)Pk@M~xAx+hi08dT@CN`U*)^0i|!56<}=N%Z78JzX)WiWS&_`t;(^zg+_) zA@u+In(uqwC`(K|Xrh;QpMz-goKQ?cY@Uw<)dzy3TI`6eSR8?|dHoQLt^8 zgNW*>cN9PD&V5!QZ2CFOU-alZzAar^5&WS4-}{;FG8$*tS@QbyOXiZjm}e!e+jHB_ z?J(bBtc={w`Pc*|e^pekI9>B%t4qZ&2b*8Nj>b1rt4q88ji(dOdwC*4KP~&Yu?(Bz zkQvWCO&{(EH+SP@6)vU=zmAffYcjw6{3IOuEWx=_S5}Uysxl>`9w(b+E^rvt}pXD| z0P580153v>r*t`8KlbW5&MPF$aW%7Ngk>J3bSQfIzbw9DB*1|}w8x8eKd<4&vOWi8 ztDMJ%|FJBraPxet$mXh53*(K5gmt{dvTHPt-*Lvm~@~X8%C35S$}t);qGP zx2tt?dC|AtFS(kdr6&zfWI_GM8P3N`XFg7LtFpCj6IK+cLx?+vIaE{MqVL67KK%;< z140Oi;UByYbo^j!PDv3ixDU0V)qwHOS076J*4xtk@@V?rSBEi(Md$lK*v0?E!9N>r zsuM-Yi_W~*?25B^mJON?18_F^g#n#EksNes+qb+y zn~L`+2a)HW$|d1e zTx8Jx?IKKom=>0_%-u7>;mVWPzP)wxU-l6a@0hc_YU>Qk?_JOLq*q<> zNYsA*0$>n*dp{SuBeA!n3rKQR!i7uZ=X$#}9e@w#065+9rtI6ps;&6jRXONuy;a+0 z7MnLuJ#mA$kDZJ8xLc%=mRx;K@k9HyvjBQqw#7b#zd83M-v%V*>Zg^N&MW^o>{Naq=u3CZZMQC)Up3x2 ze6IV2q8eR~(i|zu>T2UAsy$hLKMwSL%K7iI@Qmj>`SryOl0hASrA1uvV89%3JYCt> zk1g-6cm#|$-w_D2fn_4(*np%eA5UWbxyi#4K0MxE_>&C%BqDcQToS{aceTeaoP@}H zes%K#d*xX`R$IU{=%b|{b8KHf)K>#7br|KW;86I%dC~`#di2X9#oI-RUq4j$8>?M( zI`&QUo*6%vczo@7Oa1()enZAZyl6g}@L|f>^(9X?uoNZtvfJ(fVj=ZGeS4RlxUt&| zn0*dVzXOp=!f44scn9z1<`Vy8=`7su|Dsxb-V@*SOUXp!4qMBZi#4X2*-^b+HHnSN zYX^vGA^+_3;`_#r`$PA0KCTKomY3{@o;q2=7{R(4;Z3J|v4+cE)*WeGla+iKQta1# z^c^wn=J33Iew1kk)eI5qMI^t019Y_B`1h6mV1VM}sk9NhaoC4((!$D#xmy`g7 z`yqvRPH|d0VRqx-(XgGh+kg^ROO*jExFu?nOz(aNfGm>eR(hrKL;XU1_op{I*7P3A ztX{Fy=~P_n+_>^J`y<`{!muB-WG5cnfJ{5`#_qSY5^rQlS1(Yg?uBW%v)(0ly-EBw z_rpOS^@ia`AU4Fl%X|^eKZQs=bolsczlnw3E;~Kq-zNFV87o-KdHg4w98~4LtIlrX z)6aLW$*IOR-Oo6uKXl~eu#)Uem4d+U#EYGBzM(+ME=%~z^!WykFxm%kaO||yv_9xk zFBCw;u3MErh=c<*lG|BsUg<)2>g%en>@#Iv<^{o#|EybwjB~u?A*cz-iRztnTh_9F zM}m3}wzs>!EN9gOdqq4@Z%6Eyx_c_4lsPb*UtoP%SDlelK-1rKHLHB<@v|7iJrgYxSp$QK2-Og!M8_@CAwMeYKff_CtDdsYg+pz3O6h$ zwP!V<98PyZ%RA6(jq)>e_X;mIzJ&^XnUF`fNo(JW!k*XwF#> z+g#VOlX~EXkua##=~2dsYtzqc1bUSNFw0ptUs}~i)s}k0SF=15a6?g)U#bgzY+qMx zoA>myw=d5X{uh8PfC${sZ?igR&VERE^uxF(>st2ZM!(wj~|Y{ZxQ@wntO{B)Wdrp(V7~_+*Z{C@eznf z=9hkq>;TkvI&5rhobT^TcZ`p{5AV9GK0XC9iec6T1SX*AkP4fV^h^I7KrL^a0P0)i zjhlu)!DVN-qvy2JA#ly%f|ctk=9#L|8XaKQL5~1$I(h2bm!@yi$2-5y*gDKB1Lfdd zyMcPj_~nnXg3m8s{Gir9wt6Xj>E}J4;p=Wmvby%vR#XAPauKo+&r9ffruf)?x8rgB z?}910FE)DYrL8e)?!-<_p4hgeD;ukS7{3m(O^!2Gc(ok6I0Ok9BVU}!xE}59xZVC@ zGU>rdpkv-T`=M@b()#o4U+63QGoOBm)Eto~e0{m_UgOGK-AU2P#fS3cDm`L}FOLvUfXnpnE69ATo2- zU0gL3i@k5S=%57KyWRPwsj0oVDM8V;cXOJDu4weYq=2%XTi0uz-`rE3Jiu)vZdDf^ zFj{9c;B@DQCJ7hVE^h_2xCIC{Zb5j;qE+Lqu-2`nw7ZLzI~!*>SkCaAW_3T`lSR+P zoA$l>7+SC)^5RUTZtFOL1_pwo9odB#Yq?x@Zb@v)=&+i-(hBoga_^(;&bVBxS zUE}E%pWh^xXD#TC>unW4FR(V(=XkFBYsg z(CAfz$4E|FUQQgoB4|Kg`rE$0^mAIj%+mmP>Rf-fR2Qe^=bxkpMh`dvWPbI_G4AWXYBT5i zESWWaYln5j>{eTD!PT#KOb@qo)tYKY-_Jb#`}RF~8SJC;-_hE{<6AemS9)CLC=$Bc z_O~1^yp908zAb*VtJnF7xjXXbpD9n>(a7wR1Z*(%kd3PMlyYorHl&G$a%bN$zrK%8 znsTUeme-^GPJEN4X7(pkAYdQtRYS(VM{V8r;w3PoL~l>*wV8l$(CA*iU_G%Rte9B6 z+;7@Y%b2pRma+HxX&fxb#huT2rra3?362Zm-%(};JeN96wrv@;cSF9%+@eK?CvfjN z`ET?I5^j|J(zlw6Y4LisfbPK^`|fkT|B_jArmlIoVS7i<^<&#R+N-v0-_u@2sonk@ zzj}4V$MjK&8^7WQ-o8kEODoureQ~8T60oDwU4Y=Sp3K)4T!t)F)i-G#x*68?(f#_5 z&-8Kx1`}k*{=l!5u;Qk~Z%YnFv8EOz<2$O5ukJH*-1>Si#Q+$-^{DI1>M8j!phLx& z95y`Ree2lq4b|Tx{+JC2kE}h9^pyH%aeTMe-Gw!CG(LSMUrjr9&aLl()_XFW+kTV}K%Co>e*F>I8H+X&vmt|Z@hh> zi?#^xtEqP?k}KkYsxUk?`-$rEZ5_D3>8?4oo~MAp&os;1GJCRP%JUVIr1OKaBGr?} zc70m2u*dr#>-PulE^#f{QSGyM^?eAVmf!GIFYtEU?d`d2>Ti;Dvln#SiRig~_H^ev z)J;xra17M1x)zZto}uCgeTkkG9teK8*8Qp*{X;bs%$O)6^5&`z3~9 zvu(|({*fE5O#y_uc-@V8i(eHLF5T1Z;^UAxk$v1X8~ea~JN^hUa!2e#8+-wSEg#eO z7OVG#uJ784`C5LDEPn9*jHQbYBvYE7RPdvVbsIO);ps$GoN;^0BRm|yn>NZoh zyVI)zB9z!cjwSA`ncO+5)@$;r2q0;+$6MY!oGpA35;biXe9g^z>+dIsyf*RiGxcwINY?{hze2wLj$K~bH1(A#mfxl;0 zLF`!jU>tq0|AW&OBt*&D9-N6?a7+-WAuj@E(-z=|zzlz3aTq{+khc!gc%VGo61l*| zQ`hOYd7k79Yh9Hlzhq*-d|}`rPs91X{Wq$W6D+knO!Wz})6(<6eWXq;NT1VP;Xrh^ zr4&pnzR&L?AKiCgAyPkG&;wvs*{TC1r^ayfPSIh{3|}v6{rp%_fBY>$03iMQghv-Z zcC6z!AF3*Wf}+}yP8l^Hb%eFgzRkBa;7-zW$vG#wN5JcKUQ5oNGL5y!#5+2Gq>v53 z{a)7SMQlpV*g5g1MDk67-xe6#8wwOVmV6XF+H&@AI`9piDgnAz6%a+W+m00Qs+4MmYTC&&vF)beM+D=y{p#2`PA(q#+b2Q~M0hJFOuXQ6hpbp& zd$#H1xg6o520(Z2`(8PE5}9(I;lJ%f5P71-?c3W`fwE)8FZlDEIsvyi=IJ$b&gY$X zfwa`Dj(mk0omAHQ;b=>7+1DeleoY$y+%QT*y18qr=i8%+U$43M_;a^w&)nX}9-AAP zReNbeyHAk&TgVIyshn0T(4>lzTY&j9^Xv6PtDH%m4Da@~{W~y_=>w3;;p#HqLqr@Ro6=? zq24H9T$hgf8alSf$8*V9Yy0tpeVy+P*M!)=4(iyNnJNVeU7_X$q-51!Vg;1e&I3Bv z-q~jsrk8zrJu`RyCyxMCLdka78%R>1(05Cd$2c|&h z@@RkBxR~S*jvI%&B>}f#w*FjK`H&%RYKDxTG2FD1jr&bK)ZE^1GcCT-Z!>CP;#Z5? z?wwwXT|H+zl3Rd7P!qTkw$v%pjdbp{8^&Jc({Fm7ras>~K`_j7VaJwmedfgT2j-r* z7kwrh=zwJ?-wLbxtQEMW7m*=1j5E4qe{i*xTAG z88a1CqakBJRyVQsz?2<99@KU2a-Z3Tx+gu~fZDb4xcl=n8N24KGG##W;K$e%Bf^gr zwWO|o2(+)N(=YQiD*;1q+7(}(@M3dW&tA*mU5UU5QFIfvc2A@)yY`D`Ti4n~kMSNu z9RNwc0AF*2?orcx7`2Zp-fkMlOU-X@D*G0{O?Jd9b6|7voDJdu4rX8@Ex5Gbt!~zv zZs0@@=*|z#|2XEZbHu31TPv2|3ZJW!*N4|>>J|;&lGhav7|szNHw@)y#@?i&2~mu+ z{R@F;y&c#iW3}J(Ktn=CT^|QoDk5^-PESQY64AbWEH1mNtbz#X@a)v-kLT>U^v6!Z z@+#mQ)Gb*23Z2VaZ5i*d1-&)r{qrj_pYBJ?hqk=j?1LE zC+^);q9*1YZC7RR*>w#knt3!-ScyNz=TMor>p0DGQi%Ta>9_f(NB&m*?y(ekbqUGs*&sQ$_^dKrQ^XbsOPU9*( z`~h*?=Vz&lkgn@-rq1RJPC}&wq)_SSJrT20s(e>WSYNJ9jyUAmr({*?u>IGO<&lGP z;#c)uKlqoxuWj6Ej~TW8mIQd-YaneZ(r15!jm9^{cT{(IBV(E`h%##%ahmn#UhG^x zAU4uDoE+30S)g~R))%y5+n@h&zd!E$EP!oE8ug8CC()`xXJ2W_ zsSmFHxjQYZ(SurX=dSGPKi8!!u_L#s;lt45F2?kYMK!ZJ*7{*%n#B(~2Wn;}Prfqy zTTh2#&9sxpQMSjy0->q5LP+RDdlaIP} zOnkAu$TRflnb$K8`Nv&Ouj+i^>~av-mvm~zxRP?<0+(0vj&pW;Y|Jcvs7cOlI`(OG z+8}BWZq%38H+mGBgv%as&79+Pxwy}JLSz$}SAg+Uy?x2YyTkg`o`_jZu3O4WbYIn{ z--KJq_zR;~@J|Dz#^#5fHnuy_MJeNN1)~piRo9wPhmqRnUelU@4$1d;`>-76RT$+Q$pcpVFWfV|H*Sw7c9AH{DJuch)X_i zjsf>w-B#n($t$}HoIdO!znH?JSZsjQ~Jc zqi?vmqGaf?qM*Yi=n)Ps-s{#n;3|&wb{$E_HH7&fV?Gav+#hk#bBfpC)HLs58_`1% zZp#dX)tJKMCy>!9m0X8A9^g{&d_G3c z*yv-`VXLd2Ms!oc9aF-cB6d&q^1RUh_(xz@Dt>7kjc}VgYKQEj4YXJnM#joCANP`>G}G!n&S)Nyd{$-#uRC5jm{S z8@d14dv{N;-lv~G74xL!_LF(ZV;nQ{Y_9ao=ib#*%bEFY$tPm+NG+yOgua!lVq7my zy16#(Qu0>vJoU`pbB~q{syd;%J$T6G1s9Ilfs-PF6yN~vb)u@Nry_Vat>4fI+CB8M z4yl!`X;qgDl{44db4RP@=ZFyXHx|+bZ!D@o(tw*t;KbY1R(B*9ov@6M^Wos_t*tFf za&C<&=`1oEqH6`HvUS8cbL*W=9zmB1@;9tMKP7%;sPlDv!pl9Ilc#tT&Gnd!bv?m3 za^e=V1?UIG2Or1$(FKIW2ydk5XxBJICOXZ1dq#Gi#ZPxgh#N`VcZ_;k7I~a zWEJ6~RLrK*j_2bSZ|SP88=qbC(5W^4`rL*EnuLqqo7!WV(>Gi$PB~J4)kENmJUHOu z%CDP*3ktSaUf;iKAF(F{DCd5TuyMRWZNO+=w2GH9;g?Ot6V|&_1%P{cKQmTr&+l?x z&W}mIkUPZtcCPUSa6=bO9^-eJ(C0~JEXQkI%8jszE1IifHm=WO8(jT}je^&t7iw1> zV|L}Yp4PyR$cKcCG_pVvmwfgxDA#g$@4jb)0?sa4w=pB9)c)c4`dor+qGj5_vbH;1 z29gyvXos|FMJIXx-n3D?alofi)GuT=*^|zQp)Ks%p0nWj+0n9Vl~n-b>)mrwpFaHx zYReluDIS+5qi4g@Zaqx+@@ka4+uI}eV?6EkP7VnC=qw6>J^Scct&iS6y)JE4$x!;9 zt2y?2y5hS^da`9j1#4Ub@S?o`u)g*sd5FF6*s1GhKCC~Tk??Z3#jDac=m?8{F!YyP zdsZI%$}azCgxmN{vj;kQzeK*A`5}M6$rPqyxA0L#EbtD`{bSRIE?_b^?4HdC^FOG% z$=EU~|Jvu9v&W@1u)J*8yE3;!ZaZmbfxSurM4Qgi0Dw@3v0<5*iCfpqewWi!UUT#_ zq>C+B95N_zI9+l4?S@{b%T5p)7zIU zHB|!%IC|T%W6uiqjZf@(m(@8a(nB@&%Y-4b8&qeT=X<0^bk5!XdF}gOYu7!Etlct^ zJySlpc_OlMS&$W%5}HymLOAc{zHE6_?U_sAZNewF0uXLrHy3T*Sa2x+^zWBbEED!m zKKi++|9Zyo-!%Ax8ZWZf)bVL~msZ^Oy|uAud(H!#=2u2Tm-FJX=eR`24VT93$C|k> z&LZA$4zzMM=1;+96qgVJ{WU|oVKvxgoWZF>g-e0o{63^d?(H*a&RpPH(RL#3(=4W# zeOu6)tPAjFpd~pSel>UK9L9D1?62x9(`66zwP1hj^#eUT&+OEgHIxa2m_--%9V;d~ zkqJv3UdP=z6XreolBmJAiyCy5c&_zmbiL7`uAfl99#sMfQRe`5&ShEqJ?Cg zczk`v>M=>R$;VbaA|DvfC1dXL46ApBIj5zN+j!WKtNNx9j!k)cza+9a=6d{wjDh84mJ-x zmr9%DeJItx32A=NzpHd;-qB~Vum9+r*DQiLf^6Hv;G+1uRSoK zjZQ3e8Sy!AqFhh_xeX5VmM`(IZe#QBflwHhnqSik3`S_+kKSMdz6_o7ysdrdG3>Tq zZ$(N2J(=U0k1zi88smrD>%Jk&w-wuVuC;W@JDP3B1?`8xsfj20WQ!8K0|IXO0^&W} zDPzJ~%B@tV_x2mwo)W-Y1N$>R26g26p1!O30;KUG@;=rS?$En~oyXx|i?<2Vhmn>J zC*<`v5!G={?5L-V_CM68ynxGF?bM!Kdf@e{Dh~T{8e`$L*0U>K6}4ce@-NTfd`fC5 zZg9#&p7!10QxUu@(esK|z`Y_(lx83Q%-2h9+!L~WYxwOTn#&I};3E{wc~`!9e*5;^ z=ad&mHe60SQ7NPN&K;)X9kKwcM3!ggK-UfJOW{E1e@*0?2vJNDtS@B?SARap7M1E5*IU?2WLakx^)ViH{aFx4iXe)6N~P64Lj(;Xq)Ce%~gFk z)FzsPj_7dlVm{5o&U9-f5c3Pl-7!+1DbWI2VrxLrOC(5w4Prtudyr>{ok-gRf8dzEt#Zs~(If5}<) z`0Um=I5RnDoKr^2(%SKHZ#{v#;Q0JmeqAJRWW6m@nd#^<-i@no3bfNb5sY7-IQE}7 zF59h$dPXFWgpv>&^A?8HiKI&y`=0cXs{lTasUDfA( zWjhC*9*Tes7+@uIW~fjxtt&2W0?S^c{Y(rYvnDLyQlvAO{= z@>3T8Cr#r=y%T|d>0IY67RK)ta0mRhY( zM$5C8PeIt=^bs%EYPv^aPeJv7>(@4B#4oU(h3yBn)sb_x@WmqH<1X#zw~s%5i(7ua z*EBd_{(=f9dUb-wR2TBbW25u@9{%c^uelDKFD=urET8!MeW0(`;n-yn2j~HhX4!Y2 z+;r(3h!RP^3qEJDpFF!=q^b9vz8B+02Dh~Akz%K+THkM?*W>9`x!9Wg5x}~&sEwJo zb=Ns+;~(X(8>Hz)5BF)$Wz91(<_hng3HN=lzb5e8ef#0&U#GxZKyWj9l}AF{fPsdj z8=dFICWrRAOd800cE_U0{uM)eekJc+^Zb-26F(%u|HwE#Na|d)^uQMY2we4mQgAvQ z^Kapw73>OFJ^b06$yejU+*birsOC5zGaX~C<_{11lgaC+zuzKFD>zx0C%bPwva zPrQ#z^c!XW*j^H3ufZJ^yy+>wyV<*VlkfGks`lIa)7g7gZ)aTcLnDEBy{ghmIx+E! z-pu@1(m-_e_A5xdO{*X<&oA=vTRm*-S12drRa0K{2c}ClqtoZgFnAYGUK&pvhvZb( zosja7M86!Jwg9Ag1ke2?Q_1&#e&N%D*d1)$%j6X9)f*WzvJ%`V%&-I3UO0_m?(=uL z1qrerM{UTg@l?6Ok_P)xboVzr?J0jy?|q`)srP`>Db?VcE(b1oY^n&>JNX9`0lU_; zMu8gVPI6e0Y#T7+NuQF-yk&8H3!Tis2@XPYCeT2NM898du3onMQ~!P;@7MSwAIbcZ zE4{TrEt@E%BTrjyv#~|xs*M*9U!AbHoyQN^bQO1~Fsx)D5clzV@!}(0&TGOF()TES z%^mI!xiJs#+Ue@RTj@6JU|dS?#at6`njITEX|U6sy!dUGVZ%0=r4OFAZeUG6Xe~K? zjyu=?!2ZHTF(B@FbIu}8gCpyGYS4P8-zS~UZ$icbvwjYU@V>gQITAP%4)33(Dm~D) zyx2COqAK#CiTC=x_k#dl`L<%|-lNZS<-0!!JH|R6-92Icni)>*N-6p!N;e1E3**<*d7PzA-B%7NKkZ!ZqLw z7nYi~hD>q0;<)weX<)^vn`dI*6PBmG9ddroH-J%hyLUR(ActIS_@XKUP}B0qpnzcC zhKZG3>%J_QN?g3|QTpnox#ce7lRSGXHe{3O!>F&5pC{)H?699n+uv`3SKQhlb+a>( z6TM*L+53|@Yx@uQ`ebo;O8vlJ*NTQa-MixE9+1ymc0p2+;guEklrlqd$?4EW7evjP z@sI7Jkr7iN2gR3W|M!{ZGjUg32G*b_?+hOP9N26Q*AGzjefxBxB&lM^V4q2br$JYc zt5&Rn8^Tz9{-`x<42_>Y*vVseTR-ANAYr9u`^i|+hue@m! zW<6!bcHM-}Y9?TSMRd)efunKX9uG}~fdHDKL9M_8uPc53HbZ(p?#tWpf%7z}epx9A zt0=hTwht`^|HhGLeY*&NG$pkBp0!<_pdm~PY<)Cf_2x`BI{9qTSg$xyVSd3wJC=9& z%i(m~B*N537uFpc0?dq_+;`+rYS?fl=9ItWx zK#VwaGqB?Q`H8pJP;NAG6Q`eg_(Wp_P;tW{_?v-4DNg{%`g^ z=e+X`ewblB_j9jnUDtPwGPQM1j#!3X+F$RUkR3DqS-K9rl0pagm=~~4*iS|W#2!96 zdK6eJQ3uTm%{V5(rSSQ+!JvL~t5VWdD~n zs`z4gJtQg|-m$8eQbyb=6~u86lq`FlBvX6+cfW$r+PE}kdT@A%K9hmfK9de220PTI!QUeI^~2UIS#Pzq?n`IBKF9G z?)eJt>WlYfWgwWI(4w1WEnH?NHSM^z1#Wju#5u0^=$n^(7u6*bcBHPzyY5wY*~Ry8 zZTl@9`?LV6x%K-dZ!?(_#!DNW;0SmA^ItBZ)A=i^z(6_asnIYsj>1hECoC?t_sg4d z`%2IFd(hvYbpJ@JH@y>GF|4@UjNVhD3J`sDhE4dVPfR^-ia#&Op8URZ+JT(b)w*pS zDYSIINr;GgPllgZg&U2XMbG*#7S6$z^%#psxjd*360wG-NE3;tSH|N3=7+LCIfO{c zzERC9UW0|6f}sT4^d)DYBCx$+;6Cud&QBsKBnSVp3eb_!g9>;O{RhQ`r>XqnV+eY7^5u+_GzL2WGp$UqN;S zZ?unUk=3IZyz;5_zxGKgpSwH=m?O>(+b@=pJF+VcTk>3uTkU_Zc4n};7mQgDai5(p z7sgFeGC)l_5Iu>UCI;?O-~`*5BGf;0H}|H78{wHpG=XIzh1P;afE>xkoVOY7HL4`# ze3FS7=6*{*?ljD#-SmT|CoTWHk%v?M_Xd>hr?eB4777@=P(!9gR&*{WwwIA@8b70G zTOYK-#<8oazWjr7Je2X;<(t?$>fgGJRLD#cjA9<&GFh)pp39J~g}ETGlw9yd{=@ zlI~mgyPa}k?S)vwy?|L&co!suj`7}@VrpVF!cNO}{7uJLo?4!7ofV9|^-<+dfWAQ! zk}E>F6!9h!C}?b)P+z_oGdW?J_q{xl34YqkGU^jPlhf_jDJJ_#f#}cKE}|O}-7Ry- z+Djd`P{uCScVd#}ya_~qj?}VI91&P$OcCKEN(xgr#6$foZd^nHVLU4Eje4r9F@I((8w#rxjU?drGbyzS z)iV}0N?V1e&exiD6@G>Gx4>|H#ZLKcOtTm+Rv=y!A-H~@N(dM@SuTiB>T|+Ty?Rx_ zd$GT)bfQxg)D!yhh7W)*y`LjdT{)&C7O+k&|K-NG5@&>Uyd#kPZ$5NzHF>BKVLru#vqtP%Smr?zspOWUoO z&Vn81o@uVhDX>Yv%ug5^KjD@F@KI!LHUjaL7igU1BX$@t7EnDTIbb?xOQiC+CMXaJ zn6-M;&x4YpIHr$ukqF(zZ^JGe&Ul)OtpL*IYT71(@wWmvdK?n%g1rwOTqs zx=hBfA~dcRcqB=k1PlEyAPTd6$=frVCI_)VJ!NKXKYf~(SkO8ErotlT*~BgXa04@r97!X5ANT2RF&&>%2)J$Asv`jzIK zXAjo$gnt5+UYsKX%VxPI`-k+$A9>Vq3CQZ>X(O<4))Z8#B2Y}y{_b%g?^&{8gZ*13 z>I@+%;S3KOp03;tKiTr?98~X|_CIEg1Sql6?w!|j*`p(tjGPT7Bp-OKYsa%K42G5S z#d8DlKi;D%xhYVl?iN{>cyANu{(ma<(&wLsE4{b{%QSl0LeWLltC)Av+NE zV^^aL4wIt505OB(t>B|{>*yr>Dog)lC@iu_qUZ@cbWl6_ogq#ydyICGQewHnMMv@9 z{XuOeBJmW_k#{!;ua%9L@M#WXlfm8fs1AdFgy|XR^b1ac7u=ZPHkilTQ_YEv@bS3Q z-j^s@0#BMq?%oR4&}E;T3mOW%@?jN0>XVK)FIDEsMU6;=!L3lo@mkR&Okw-q~Y z7Z=cnD(hUC@cBIn9hE{{yQTTq=Ohar2wjf=srFxpTK3l>|k^a~FoUq8+1oN?)k)~zb zvoW`t*;macQCW+(Y$H#QgQ`Er_$cCDH-R6OVulcq;kCk0dCpYM?**LxxS_rw)uhJ1o>T-l`02X3r4}By9bY{2 zFDDVnVcYG1{hLjJie$dtX8qWet+Cl?A@69 z7F6S(yit;@tMHvxFY`1J&|HUigT^ z&PE|e&GCIo=%0XMuT#I-Fg6K|0j*Zoa~RRVy+o9PZQU6|rLRT$#j_@toG=04e5Fe3 zpgS8Ing!-9!J|g{?KKy&M8K2WOgZIPmX4TPtLIFQexprBW#mdXK086Fi|dE2*Kc}ZS2DZTIAsfbhDsZ_%+ ziR4cR>MO>73~JXZywV~QBhq8FQZNxndL?qI~t2KuUV* zsJ6$Qq-8-}n1=mcKTA}ALkH))0#+nT^!_c`_8AW;_>a@<(lmH)9^+5`>&Mf>nWa^k z?yn^`Ef+4h12}{6j|)U2`kxMcGkNuMKS=Ol-ABgrpMnEFV4cic6ExXwX?K`+IEypVupJ>Y?Z7Djf{PEj%kHpn zFdf80glX8)X!XSz(v~NQcdAzwo;VN=>J8Ffkuhlg6AXSMgh!-|TdZ{IUq?#Z<#XrE z8HnERSs3-wDUQP^kH7hKI`bisaPP{r%L@1MS5yPYWA>%Q?|oYo+aCJKEJMW;We@(v z3gmo1*0RH4KW&I98!t8RDn7n>9=P=(4FTr5-M@w076y%Nw@M3u89buCZBq@X7qx4;@Ezcv zlQNM)N4(%H@nviXT~Gn1hR2RD{O$X!d+H|_LS?f#TOlK zP`mTU_k1l~9g*!6`xsi`CJ*k0HETy;|vK z4u6O^LBRe|eRmPHn(55wCk~3+`MNs2bE@T&xa_TVT!!AfFW>dG?wq)NB(bMQFV;}r zwjMI*&`99TCLyX`oTTY6MR)CW`ig1G*t8*_q4peTHbdzN5kpen0k!axr4?Ti@HiZu zq$3n&`7Co>T82&wW+O~ay&7YXq|$3Fe0j=~yz5ikjiQ|aMbnx)RGXLUt10eG(Y8kp zL)ftMEP8WqJ)S==BT;g@_#@H4b|y`XW3|#;YJJP_P&tHt! zO04|52s%M!I*x8wO3$Z%n0X<5^7GYLEdsa{6WY>Kg`7!;Sr=VagLIPGDC6Dtsu{mD zx)6>1!}+OVWz(82lj2RYlj}aQlw9<7v9>%C2gNpNatdcy(F-4f{-`>e;f;mMNHDZb z*z?K}387Cd8+F!JBVl}Pa}5J5F1`y++(G;WR185=DD`JasA@Zbx_i@_XrT+~k9a>G zEB<#zZkKF%&_L>u^&6v*i<+t+c1~C^=98yH9CEcop zIUK`XzzEHpO=L%p?#9`A^(?}Ote01h3{3EJN_|KBKJKy}kLvfS%@+cQZJqZhDKd6) zUf!eoY%+{hJ1!trr z|J!=y0j{hP+ZEY|WfVkq5xTLosmDoofj2U-u2jnjXE{IMr@TI_s3js0&{y9eCgI&I z?e2<-o>OsHw^zMxTc-Bd z*3Xy?WgTjTP{8PFTqvV{;5W{QNHv?m?#I?GkG@!>ujejtCChc^W9pt9nQWS4tY2AO zxQoP$67^gcxAOlNyboZYqt#@A-slsUY5sLOLg-{FI`r>FKXl*S06y#AH1B4 z0>VY)E_F71>`FK;AB%542G{h+ZgLEUZaS*5ov=39LxVsw&|P)2oOUJ`q2@|JWLaET zYD-&U8gEtQeSEe1IcD1Ly@6E_&n$7Q5_&0yxzMGD@X!L+%e||Fs^GTB z_;ek@+&P2(P$1|01Ls(pi+1guiX*2YgdA^<>?^qlrPRM1JJN7q#<)Pw-OVJSKh0NLye^QfdroY#-!;Z{vqY904EPSZ>Esp7ekRTqVOCd-eRIKM-R4z z40@I(PYpt>szI%VLPG38kwZhZxSxR8a&)Jz^R?wnd8KW*{3Lq_Tc;3?o+wleI!?-e|8G8D3wlwKA?Ry7HH z+Shnbo!iqSTu(i)oT8X~(QABZh*zfR{msIA^cD-%%gruYd2>%tYDig8FZ8JSTdg*< z=nK0L+Y`EAim9W*>_$PCY z9cd>|cjmk;{(@d|lps{l(ax|JQ^O5TF#9aSk zsR-&a;y}U9_A3cVDRh>agW}H;#xk$^x()Hf5?#uJsdouU`rIN9jngKQgcKF5N_$EZ z3na}#ZUS-404Isk@SHQ z*VtrFz@H(oOO{&}T)uzr1;F3G5#t6lo#|isH@*MxVrBNafSxpIiy;X}By34O7;8f| z@YQtwy>hr42I-m2eS6)y&E;a$jY-iVM6;+9x~gvsqCbL#Je<&wK4cMv#eA$LSrJ2Wykl1A3lJKf} zbVr?j<5JVqgLCc@!`WfRUvFpJ-906vi*>(Q3_i37iSU`NqqNo!QY~EZ-fz9J;Cv~= zR?kN9xAH!Mp&XW$jgYmH>*tTBF_A#TF=rn`LHig-@sDe}F}B#_zkspuE>e3-VYe1j zo?2A?ZWA1OQhmhWgQ8lmE~zw}8yy%=dBseNp@!rDgPzM6xloZi1!d~#39bm}$xk%Z!$4>>41 z5B96HychocZ>M_=^FzPf6om@)BuhzOL0vz_e83 zH@Oxl8S*b%=pLHcA~~sWG@#!quqZcG<4hwdS~mVYLbUWMYUQ?PFZNW=IjO*IKaQhn z3|_;g9mb2h3A$+-kr%^Kx#xQeiDmO5e=q5ho#ZZp>>njvZb`EqaNt@$s zaFps&+-Sg(ry|UW_UB1a%!wjhv7bOk6J`UGtxRt7sTZr8GVlvby3}&LCHZfn(7i~| z@;s*2;#L~a86gR;~&@46!ciCR>n43Em?()32 zhD~rkkA+5KC?(fz+s9khi@vknuhG`$R3PGvF(#EhL?y@aJQ0=Ym`8yK?N@4F$czNN z_4sx1;6u3X3?p2*6yiZ-Ga5O!0TynqA8R|@#?oWUJ8f|KS0%ziP%WjK~ZsvUy?hD>lfSk^HV{l zr7r?Qe`0TO+Wk*4lQXgjmLsl&W(Ua1MrApsjEl%5w=3` z!Y;jrFHY&diU+nDx&6Au2_JHE9n(A2^$xt#or!qZknjY>O|br6gvc+>Y`4qZ5#-z?2+}8A1 zVp(AH5eq*kwQbILLY%P95zju&Jz(&u0Afh7+b=cV|JN6ltQ;{Xxu>cc7rzI!7uMe; zrpn?zVVJU2cu4^kwCX4U*=zhJ%jxb3*{-?VXy+dmaH=`fy&xgswAj^c_$5WhyoJk| zhfj--ATq?mIR|F_%|)92{XeMF=%39$*>w1>LMIQwTpHR{yw3gv8q~Mj>hfc9pmXYk zZ+~%DL#}+d&ZW@usXrKwT?y%xhS~nj`&norVC!qP?^iJ*Z2m~$a@21nmr%}suhob`@ai}0hS${#yae@@7ze)y^1U$LAhP_Xr*ksKu(%qjntnWJ(f zGs1(e9S@hgm3J00g}ZcLhN+Mx9HmS@FvzG9=BpcnL8sQ?;G(~i&g8)Wk6sOF`UYIkq+4cp(?Z6p7`ThQQ;f54CkWG3|jm z{_k=}cYl0>cNUKM>`d(w`mt$tcZG1f5ZJnOwXH4wrX)Y@j6#@3pE_bv%b_$fO*p$! zZTB4!RnkvKyIyu-5L*U1XWZijL>x~z4aJs7*+npxw;Sb{je?3VW;w#nkeyfa@gv37 zgW9L^gDB4+fm!k?#`Y`MmeH2HWn$hPPMAix4xmYsR!H~IaO-|bTMSO2u>c{6iFfzD z2Du%|o&?GHPiw1%^wYsnt}}TEz3}OyBlfp>h$GwkjtNn3Ptv*Zmfz)U+N+9($_z(X zj%vGgwhO#U1b*si58-AC5yW{~e)&?xVMfbgeJ!bkkdoPAxt#m$YYbMZ6Q16iAKQV%STWgPpRwy1!-aU(G*0h)_D`VF^GCmX7Gt8bI^?aA8O3Tp z5h*-hH;yfd{>1T}_syL05y#5c;t2CS*(yiiTdXwy0d=hq= z7_dGPmTS!SPn=b8`nKp~kI8H3fInH;L6NCG=1UUMf&1U_t%UE=;f!D|y0u-G{zdDGaHpDO+-Q7a`ai_|9x z|71}9vajdo$Ks6IgahbyTSO$Y>NDrBW!xKmNoeUHLNX@IXUq}UBNB)Oz{>vXR@mvH zc<|UJ1(e6%5-JrvXNL&|Tl-hcP=(0D@hBGPzcf;Sje&W9eyT!@u;>i^VINjd~jkYgL~Z1 z1#jN+*0m3?1HFV!pEDb|+fV~S(MMz%&!gd=yBrd>h z_2()RcGtoIf|FYGwXj!s9)d4FDFB2^B=2ruXFfdLWw;}t4Wm(>lk6Tv5tP5g(NOlj zzbqF3n(|zKmr%dmNBTUS8PC7uh`L{^FDMxbym!IR4o`&Gj zQ#MRKLFH?LaZv=IPJdc zSlPp40^C8ZB6_r5Pj^BZu`33q%9iOywq>@$ILUCn|G_b|$3kh~N38ri^mG#$B58dq z>U!Svpt*y^ZfEz{rz3kg?Ks&n5B@r_-SyPJ$m|Lt8eDC_uOd48O(};p@lz8?D?P#5 zTcRXETlOowJ{o++zMP8}V1XHvYg7=}wcs%Mc*d!wX(@1w(SHZnT1jIZHu>5iN5r@J zWbpd5;^MW?!FZnQ!Y7kn*X7!Gs-yAwCT==2UuI=PmZTrwx^3-9SYBI%&LqxnO)>Mw zDLWh&^)nE~GR;s*iwES;WcAAQ-+=DjQ|XpIN#e>aO(e9zck^~GmS;+p44HVVpqE#9 zIR^=0pbQr5^%$gH;@jb26q<5{wlU_4g~C|N1xuSjPB5nopQ0p|?$&XO$jaux-j4@w zX{>&E9+U?z)S!Pp`srxwX;B$t-+mA&e(E7T4kgma>boW~&+s838dF0=I0}vGR_E6wR+mOjB3}hPjabT z4rAf0q_Xc&*&3osRt#>Vm8}Y{T<)XgAZT# z%=KR@+c`9Gzia7C*^n5Qico2qe3nnY_w^%`cKkxmqgK^{+WKIIsKBCrsQw_vpH#^~ zUyeZr9Lu^ZKbJdaKrr9=K*fQe#y~kUdoc~7vn>@wD;QqgmE9K_CmFCSb(!zgzzI0G z31@@|J!6|_7iRz&z(wwY{h|2yHmx`<{x;t0=j~T`cIlR^-9NGQ&|=#8qk-D*&WPUE z0d4==$H=?b0aQyhbqo)x!NKmN-I6nT^o71~)v$j#K_-LdRBupgR~+L|pjA&6{*b7Z z>H6V*NNqy>zwSw_TLxx|c5p~tFeSk}t6do1=X-={2+Tt0xD!}7qeFF43V5{tG_d*w z87*sAg~M=sNegg@KZPIm662VT-vw+cZ-=}z-KoxnI~|+TlP!0=p53XTh;IR|W}wVh zl6gLHnhwX^n}qcNLdchb#2TcGDvF-d>1}BUe?yQ{>>=A)G}{mb4jA)P_OODVo|jav zVsbrtJYtoMbso`v6nN9WcrYg!FQC@#;IB!fIUnXD`OGdTUZ zh4|bbus@TzLSc&wB~QgpQuzrN0&`Jk83!`}+y z$V9DzrkKisK7~9skB92-9crhYlTybv{k@B=`$U0e!bCrSOWY%Dltw5OrNnowtsXqd zVzqCx5QYs0XfWqzT|d+sBekxtMTb144Pi>Zv+d6usZvv5Z*Q!*Kj_vA2pt=lUE$ej z$p8BImG|<62i-oi40;T@=;7<-GxR3};TU-nZ@VSp>dQf1qArhvZmRA%f#f#p<0 zU+1U*$$#_+nUWQy1fe`(e9a;Gfdy}>l@i|jt$DK%du1VJYk3zQQ=S z3QO2VCD?Z*f+DT_E(n_prF=z-mPYCCg?zu6NjA@fHC+Bml8YDa5Krc7tr4e4bapDc zQz#WHM!kHXf*e*%BdzidHeWcR$&}f`%6OEPn+>BCl$>m5W&$a+P zD;R)|H1k?R*oEH8TPYq>A3yPLr%7by5iJ-wgKqMuCp1Pk#53kL6V<6)mQuD^1sv*; z5N}7nQ7KUr{OPU(r!tDfed0E?Y<<}EG;%7M7cOT0KVE=_NEhob962H((G#436z|ex zH=|J;HoXiL5Fa6o=5aWXW(r0b)U*SiDc;O?+}7MW$rA?=CnU{py=NbylJZS~2z{T7 z94P&Rz!`&?7c&+!5qE&_ifVS7NRh0^exW-oJ8dfq8m*+C#|Ue_n&o(>Ro85ukq9*c za}f$r`ywNFqoR|ew)K#9qC#j$?3-|`ZtR#MQr*bU?xh|05E7#N-+V|YApkrF6 z1 zVX(|k*7Ez5i~4gwb~2$B2BUFkv)UTVH}xR$azu&b96WA}M%`$PDsh@qUNvc%8+a*E z>1xOOcY`z*m``1bJiLwI{!@iY3krs%(`oAkp9V1Xn4a>bO|R4KSK774FYI<)fm^|3w703U$RE~$D0&FQXEdDLVM)QEvH#^`tCVNVe8adgL8Fo^cq zb6p4OJ@QG-l_8>yC_mx@Mt!!10V~TlFhZw}n{y6vh3pK(s)Z~eI9y|=Pb$W6E|QS4 zIk0KgiM@Ld`K?4_=}FT$@5mVGkA7P8HISYc`E+vIz9qJDLT_|1-AXyfxM~#%qoxol zH|Nr^n>(BRXq)kYA|HIk?+e?t#QgqFc7a3!n*wU=%1+i*pOZGSSAIp6$$y9SoGPY&JLj6@XE zEE3_ybcu94#i0CGJeY9e!o%M*p{NKIc3APS325NbQDg> zG(Z4p_SGJ}vsFEhL?Kz_99eFUJx*?a9(<&**44_J|LEJ%_F6bYk_nUKq)ClOJ5C?f zF7`t(trqi6yt3t*YU$@kgEHia%+=#bs!%(wszs>Gq&-z)rLq^$A_mk#MIwtr;6He- zgjEfe9oqyZ(E%0yuc5H6?@u7GxTii>vUoh#F`mL{=)%xrMrdp11#4PXXb^Q#lc#Wo z*9iJev->f}F_YT8#LiEoyk>PA9;`nW-#?NPFl1yr2>-H7I5qpBr0@U;e**d5FM-7~ zA#W@+d}#_&3qWTt;!xf@0+KaG0~LG`g7%RR zwQD;As}ME!`bh1*3I=6|)@mvyR{Irwx4j+b3k8!C(;8Nle-`5OK`|no+oNvFf67i% zC$rKOzop1E+Hu(x!Csc)tuE!;hoj2J$EFddx?m{?OejWCmJLk%v^oNim2lkz)EofE zH8xxKWL<8982N)JHkNb1zWcUJ^xoXjqG@H8O+D>1PMK#dRySP}N}4i>3}815jL|Hy z&A&%NK!l7K)NEnx`hC!In0esDk7ZDstls~a9jpN$n2sZXa}+?q@yYzSnTarw9qWq2 zv;Rh{>NArj{_D>ATr^eP2Vw@f0q3X-J%vUp!|NJ{5cVG&1Qel4&VOoxm<~SsPGz&h zB4?J46@M)YA-8B$ww&d^90+F0kG-McHH8u%@%u%{;&@;w?BL-*D-W0=YvsK;qrW?i zn9|fO%!wDnwM#d0)x25jHxSXiZ}+*Yf;O}rNs7Rg1*@*n8_#CS4{}4a^C|wZ!{}X_ za_6h2lO1}-7&Cw8L>`ns;X+8seO(L|x%l{02{O7X5>i{8fQrJf>kGUR&cN>1QO?>K z+edJ;Kc0NQzuc3H`B4?{94H^U-f!E$xQ8waEL5j#tN~g7u0b$(dmXh+v)J*jGx1LTyFKS< zLjp4PInlE1Qj`CR4ah+bOLX%9`BW@;UcqBUuIaSvpy0pqvn@El-FEpgF?MkQxTMA{ z&Dx*r$i4w{d|081(nXUY|7P#ZiC-c^GiMX|TG;$OR3T&-=>Y(;IsZKntP00b zQU9z!LfY1)ArUQvZ=X002feERYchrOyh!w8WO4HU0>}e`g8qk6JAfD2=r=^7B%6fg zCjABi*!AVSo@qT1>1=wNUF_91jMeU7-o4ZE9J&duG7ti}A6WT2lJ{4xe*R0fZJ30z z+dBcSvA-@%rHs{nO0s_1_cT9P*k=|%Z`iA^Xq?5@S7Z#6;6xH>3Pwa(C)wZntz;mu zH$Kq_3;sDPJS(EDuipOj0|*U&tu+|Q*pcimHxoEaCZ|qoEm5w$Oa28G1I#9XPmKPT zDx}=p&slzOJaawL*t5uDdoD~WaNGG9Sz{vq=h@d$)cHd<$Aw2f`%x~JVrL0eTfb9k zU7*69qQR`HTc}*yzl|dzJ{btitL1ABRMUnem#!ko5`v6P`QiE;Awqo{SN*jt_cc+J zIP$)tfGZ7g9;pz^N`Y{sTJb%o_n6cFw8vqlDtd+d9ha5=aq8U zc2ceIg4$g1zSJ_5DUzN2%qC7)&2qp)_z~dJm2zr!I=Sfl*S_R6*}PJO7pnvO)}dbC zma!xGw>_c0^8!EEJ(k7JPJ0f1{#T$6FMQ4ZUrN+UhFu9u@)p2y3GD0+Ua>gb0zO6$ zXJtZ&7kEeH?2&QK#M)b#=(pUmxvQ)W^+JG9X)$PDmG)4E{9(#@#fRnn$3$jyR zECZPQRF^)dcFSno)h&|}%L3il*(mipm7gpv+1YbtrC-ylUZ$zgHAo5u);-*e`5Pa? z*giJ|;Pag?XvM{z(~P=_M6F-Tx&b{MyOCCsJOqAspfZ;}XSZYR=KA{cXswo>`Agen8ADy__K z4CqXwm#~))kBfwh(W)ZxlOI^CszC$@7&A@|eK{K5`e#hHMWjL#{^cF~D~IKs!GAVb zWv^yS?19at6w3hfpg8w;D{o_RX9n^e!F-w-{=}PV$7(I$;2o0z+=f@{pZD*eB7S=u z52kBQ%wU*SKti;wNM^0#nSXplR$qkdR}_YnzmJIORfrc-d{7FLug^}fT$v(=K)>+uw3!rv)7VGxcaZmo>fwE{ za}iR$_QT*M)2h+Bc{f3IJq)x|Oqq)LfKuOWiohaY z3n$i;S4Wojg1<>+cwO#&=C-Z_`jk6v=|1H!`i;wRXlV*cO@t~X9PV4-PJV=N;XOE{mu0a(5KB45E0hv&)grK-1;1loIgj zeVX_t)1T0TGZxhL#GRzyI)YjNvxk{ygY`i23vG{kS?vi$ z&dTZVBL0+7BZm5!d%|h-N7uszj?}PlDV8~ut<<~O<(-Y&_6t@U2zeh4(c>f;5}ZUE z$_9PB97VUOY>Vg$DL-v7tSit8oCAD^Do&~cbhOyg*3gOqd0M4({}pm$#;aPf-3BZ~ zOTF=2b;I0mhj%JQf(9M`QUV;ezB-(@&8M|eAOd<`hgAWu(aP=sa_X|Qbe{+kSM{Xf z3Vak#czEp|(Y2}#{cKaa)4L7l=|zhEjs2ysi-Cnrnk@SncjreB(DH_#HaWjUUk8o4 zb)W@mmv?00q(iRFWtTdR7Nx6+N?<#3Wof%}0J8Nm@@^Z7HzRhU!S;QZYU{{w$o|dm zVd`&+Z^)HpulJE-k|VnS-(iL!%Zf2Va$??H)&8H3lsNACq3V`g9k4#c9PDT^9KWi4 zJ!1KHO7gEFfh3`{COwElDeNT;p*a%?m~`w@=4-3YloOW;9}hZ6Iz!vsXHZ&P2Gb3e zaSqWcbd*OI#iS&vJcdnk;J=+fGh8KPffn4AeXr9`3QR>|g=k3aM}VD7s?lUX9Iknz z7l$gxmd-p3RD`Rq+V_weG(v!V8f7c@9!)5J5qI+_)jvd~{h|!#8MgYX`kuulG=GD| zDw3dNr~*ZtcI+y87u09;5^dLo2S*O}CWwwSPGihXr%+=+Xe7+E>Bh8jbLD4tSb1L3 z125A-RKwR5D>}E6PQz}!#W!sbo9vaO>}r~4Kr)ZINuE&S zI|^@GeYB%|?D=a@zn`Tv0ZIqmhpaR&2fCPwqEb*CABi~FgLsm?ILG1sMF2LD6KjY7 z7y?ITuCYD1Uz(;3wYW>E>Kti4CF0*#mxJ8iA#~7s@Vy7?DK?S%pgZ_@l;${z2fuQ^N}TF2 zrnC;I9np8i9K}S+dITpD-CO%jTb~2ASSg

    F6^^c4ef0PfOoY!lLeFoa}Qb(h=ts zLGr6D!REA!&cYE=Zm(U%I{cyNLD{^b``BVWD4wJLX-m~cT1X)3S)$T& z11M;4IwZ{=FdRJ{_&<-Apd~^NC}Pz2apyKl>qS9aDF<7cuuZ0jI}U@ojxo@{-3mvo z9j^7@4rwzfXiN?t0D*+XbRSdKhUi;zA&qM@Qnu-#OkRUW3K|;J+(>x>@uLtG^DRJu6{QF1$eR47)PFcc+@uHWtpMqO9h_zV94npt9-tX52=MA= zV)>mLZ6clg_0dva_1B-04kdIp$mij05Kn$Bs+a(_TIW7RAL0bw=P`GAe2!Xfw;m)!a*eLNS!B{3T~#+MxE> zfX5th`O+QGc}SV<9=CR`m4=FXyYn^Q?;hEk!=9MaCS?=Fza?KMRa+*qAI1_OS~ilU zOgyQDGxXWMm)IvN%yJ56(l(cub9%9d^CY9%{s)$ZFdTjt+_rx`z!HR-`4UZg=0E`* zedMo*T2}|~87_;N&ARX#c1hBRro5=a73V$7HLw~*E%-kHvyFKy^BM$u>i%<>Itm+ z6V7*Sj6=X2lasc}_aT$n*^0es=&z`~zPd0^Fri_P`<-ss(8N`qEK5I`;c zG1F}Z1iSby)fb*GkSIB%S2W+We#c`LbzYV5iTn zLQRPSmJoa@Z)2x(tZtWOD-P@zG}R(H`pb;(V${QotIHR0jBG{k^ zvwbsMN^AWW33quX+jn=;WPhGdnXw#od)yWu_-Q|K6~Sqe6hgW!dh#G|#oK|OWGvMe z^bGwyW!ibBu%&^oA?kwax^T@WaNOL{{nv- zK<|)s03Zj=1YEOr0h~Ac%VWcDvHL&u5t?OMRE(qSuil2^XlRs4V za6)GT>qcfl6GbV!VLJAIsK-sxl^@qPo^JxtmXhQOy1QX77xd1vkGd7S-1^8j zH7fsd{na)eAuW?0N1qbbdl6Aq-Mf_xT6VtsKU@4GU#-F+0!^gp?hm3VLiz4ZLWP(1Pi%7CocM8&Q%=*1Cs(D;xgMul?4$ERz?e z5xh&*$jA=jC)X=*OBaDW;y{&ASljf#DkZV}Smry1KG}Bs(N@|o);lI$agQeZ#2wr1 zV-f`+Es@UDoOlahI&-Zd2WUxFy6wxgjIY0xa0+`^Wb`IrG3atVDJn1)&X{y4oLg^K zCaE<>I?PMT-=TtU<#-z_TOu9BR5dTeSdRQ#H;Bj4b~#;9}0m z_B|lSqw_?=81HY`pS9S4wWc!}Fe0^$UDSg2$KlTEXk>jtwOz z6W*VC5QW@xF_Ig`*Z*ehyz{6H@O8{x;k1Q|Bkv=sG^EUIxv_P~4gY$Xjb!sBrM3XX z<$0(2jptSVM|Z23Y#CkoMM=%_RCV#KWy;j2hxdTQ$ktc>H^0}*WrOwpI^IT_Oxsa1 z2=_vaCiUsfe2zY;B+zXgR&G>c_J$}KxXfvrMyxzR@Er{9su_r+ejZpoRy`GNmT z2fw0jXWu>qWHsMtFZ}wacmP0r;S>;f0=(G-yjX%Sk&f(DUq$e9b$Ab5&yk}?R050Z z60C;-d=?fBNah=t_m=;Qxwj0fa^2rW1yMjn8bl=o0cl}~qI7q6D=}!0?o^PHu0cz8 zN~cIOXrxQJJEZqBoYS?>IsY~HrE9Mb=i9uvoa24pCw_VV?mNsZvisOGBWdVZD`Dio zRU!C^jHPUgME6=od2VCYc0zY7D`flospFF{=pKU4Z9Dkl?D$1fn%nblAA%1!wdMDF zfy7iEh0!Z9N$M-`FNn@#rQ`=~*3OEL3fUotljS+ng@@mrIK!n$6$Xx7C;YoF%RVY% zInO?PpLk)rTS&~s$6fLjlTr1%GWU2|Og7d5jsC^!UBIIv2wUN82)%(E&GW`Y2Yp4} z0?<9He#Uj>)3lJXb9x-*T%x}Gm9NB)x`O*WPGh?KC?f!a)^JU}FEphl(T3M!j?vQ~ z(NPGYCbn|z08WU{Vn3!&XpgF2wdlkMN60DF#bn%pzIe47t4614LTNl_t+v^R`YEzRL#qCsq5tGVCKO zrGtY8#L>6}K)vKmB%}P4wC0#C@csk+79YJR5I! z`D;^6jAjj(2X7c8G1%WUq3kDSxkJt`&$d(INIMDfaS!X{dn<=hbe7If_9cx~@gaxK zV6wm4xu9OFDk3V%KQTMq{lkj$d7MSP(YU`a4A^gh$nBBwZ3OkFUC_9S>2cvl`1vS` z0r${k_B;2@J^a&9&C8`A~G%Z%ByETHrXx8Wb9ofMnBY!f6YL&d2*H2a@X91zp zi@8P6X*A~dG4tzjA9q;`al-7WXA0b=B%YX zc;MjF8daWM!~c23D<{e~<^+&P-Fu|gJ3B;7etM#0ufCSD0^_6`zCLzc5D0(Lf$#v&hWyAn}p+#Af?AtTj(B6{B z770#ar5n?SRR!#j^c`0s{J*Ez>M5OdFL${v=qE$#@; zhQ8sy+KtEj!D0SR7+3dtQXB5qZ!Jnxn`x&g>HC!86>{PaG=^3%# z_QwwEECGXl6p$T{DSIdB2aE0IndxFB&W9ckOvN~HEdhmQ3WFrq@)gI?STo*9TgZnlmNm)4|k-{Um!`=WJqfp_j|Kn zP`tyr`%@d4UFP!&X~cZ%$J=d<_XJ-fBm6%0vqO3|2HMA2jc;c?QE!^Z-NX@|1q|UY z+y*0J)D(tf8-RM$E6aods~gC)S}O%#+6(NXcgWsXE#zaEwYVRhFd>Xe+vdJSE&bfS zL#;^l%w5Z^dd%DhNS28lGdRK>x(OSJzNvb+!FAjRsiQFOY$5UkiWwa!O5Rrc5OrZE zA*Z_aLVIW$u6=7*l{$%ooo!tV}$bF(~-f{=6uU{LAd8M<98NfgXyQ;8t7SpqT| z=VeZZ#)Byt-xS;d9|5BjXP<;5+R96r(~}NCX8E=cWJHF z=aZFo9XXzSdHapJ=j&o`yP@y?!f7O%7~{6yq0mFh02`N6ULa*AtgI%Q=7y&NSMYWE zan$#Te-WJ4+(sLaw@~4SU-k=G#(5+vCZ`<6FzeFZ(L=}|7@%$folSF0taV+gw?neR z9l(THtgVj8#kTg9;W;s5zG}ul$U~AOs($;O57G%nRog)}LO6#FdDeik)E#Gl!s4pk zH;~MGogxHLjkgFN62FGTX(1xtwKnFec2F8~nhkj8&{sZEy%0(U7@Z{v;b?A5iohz{ z;CUwJ3;b>1b?1*x$Ak4+mjO_p+p_pgkYZD^=ugV1N z=U47Z9x_mJqSLE=ZugA|P2pa5%--m;*>>xr9zS@v;2V-n(U){c&wL9dhdddRDLeTD zSa6v3`dQ5_{pJSZ6a3s_T>ApYF%lO~3PAJ= z3iUG!0L&Q8zEzouOwD<&epNYK2!?DJ`tKNVlX(_4T#m=PXKSCFoZzcH0J76BUu$CO zWTw6FTpi5pLa6T|c_n^AF!0Asac>NLKECXu9<%u{1LXaWXwd%s~ zQJ!fAZ=I``8Alci&)`D4V<{1W8u&-)x!9y`KR)dSxRDIX0igFhp`1|WR#4xlN{OD| zzD@>?z!UK)SB3DzU1`ZD=|`~wa8wr(>3r#T2R+NXWCsJejFwvk4Sv?nF2WDT<^xn1 z9tG)8Laz>2gpba$s0cFnxczHlh=^9^S_1UJbk82RHs(){KzmelrrvcCXn*FR6BdY) zdxC}61Zf{FpdB4e9)a|*@fQFAdn4d?aTrqQCeRfQJ^Sq9iVK~P(O{Fr#aJLoe*wSS z2rg2rG2kAq8$)U@m{xhxx~=9qr*g3iDCS{805fB9ZVA+enhSB&{9zvcgh?)YqJ|{M z{7(Z;2t>qgoQL7TJ=sNv5FW6xpg0GKD_DK;u1Vmdo_tF|jXRHe=DnDggnUzDLRB?A z*-FMaSy}Fa$SfwW=1JtKY*RBFZSlr!f_9(+xaf&WGJ5SC$Z_e~UB!c1z!IE6%or_C zrMT&b@XXM1Z&HIN_F58XMTmm>j=U~eotiWcVV`i|J#0??L8o`jZ?7Xe2`yo~nDjtv zsl$eNzO-Jths014;GUUa$AYW|avELpfLoX7St=QBKR}cuNfC?9f6^-Y63p13H$0$>qz5&cmP43-)OFbw`@PoxARXBiGJXis4tMST>6v-s+mYTYq#7sRkWu z+3pN=eS`Wbh>3u%aNr#x76J3xU7<6KRz5TZ-fI_6$|}6OPh|&M;ClsqEzddS_rV-a z!2tj1f;VI9YV;OkUuor&V(dQaQk%9wMwWG;*qEDRI{SH03YQbN^qkMel+%(Gh3py< zS{l12h!vEAxK3A|_yD2e*kh8ZY*9aMq$$7bq);xP)nYi0kZ=K7RglYs{uB#jQ1ni4 z0Zgo)@F4iNykfW2DP#mIQJbPmI^QH=;$RSjQuMz!B1O)o=84aoMpc<*MAFC+>h| zP_V5E2(ZG4V?ju2z0#HlN4f;^1`td#2Gx?s%vODutm*XpsfnS%c?GY-4p-v!J!<`j7rGleREsD=}`Pu`#H#VQ!&;YTd3pT^0~Z9j;zG#JJTX%ybU7kMy$~}rwN6L zd-364U&_{{b-9E*ar1rXM=oB2RrKMSrFk$@Q8NSbqP@Nly@7p(I#=^}EG`ds&P%#F z7KzhPTOFvQ@;Qi!pu773hrp68m-}kI`26q+=;o$r*4PaIEwX;^K_DRJw}sXLhX;0l zCC~z;O&vRJdac?~dM!rakkjbT5P_a?ej;xUjT0vO#?#WBs_y3#Dr~PLysm+-^DM&d z3%m^q{Khsa2xm3+DSP`H-WAG$tG2z~npA@bLI6x6>V|&;irwfNR3J90rRIBrX69Fi z`aMpizQd3+VdS_7HdC5<|z_HaAEtg-0} z&Le=(&9qkU%gATe@3uamiZs^Jnx@*exgZXL5;yyS5OY#17Gvt+d1Nv8@vHBLI!ebYSp6hfO*PHZU;e(M@3nsM0wn3SkXR-BMcyI{YZlZ-ebdrgWoH_4UsI`jes1o!69j?T_7r^7W^ zH;+^ND3I_>%gSm$k1K_29#assegHSE@{l648Qxj(c{=_yokqzvR79{*C14AypPLqZ zqxyb2D&=~#)G3e#O$XwW1SO#U-vu-Vpwn1uL)p40GOpE>?Cdp71=5NQhmw5O;ki$< zE$#z*9SX!syE-H37J>A66d2tXf7Vt?(v+tSOkcpsDUkPmQPowFQ=I)Im&T6uTfwbp z-{|6LBn=>i1=hyz@l)4Ro0ow7HC1!vEaFxy#{Fj^dKI5coLl_yslGOWgFr|;@povW zw97)BH- zV{w$_=bsx5G^cf9d>uF+PTpANr+x(28Z~IMvPX9swvx@2( zIr-q#Uc8^5;lZPVE|JKg3iLzIS9b0K()m(;dD5j%rXoC~l>@}E!unI=gJ__@nF*xO zuW8)H?t_ej5{sEEHU;r*uXygluid0CCQQ=LT(E|?>3eQcinyYUN_((X%m}L(awE;P zyOF;6q>ciL7rV;i0kGY--WkQ;UfjC%(=vk{dO-cua!!DV?d|J>6n;$3A+%UkRn=rV z;pEE7Q8XA35Vkt_27>>u@q!|j0U$b!C0A+4G~ta2=-%J){}?T<3?W6?CkQD0@aNZw(NJvl`4tg>FtD2vHv6im(?`-qVlYiV^zyY%jL zqORrhd)&tSjW@ss-qvaongB& zk}iS4yWQ01C>p?{*343xeonY%saNx%2oo7=NBkP|Ak)h=-oFed8J2G&P+itv>HF0G zJc#p11(5+=FI#n51ife}pedcb^3=Tk1X=|IaWqJk)Fy%*Wcx4B>=Zv-*BH=^eGMLF z@COfzX#~ieCjdsG|2Y9ynnM;a*%vo%R*xdYJ0AmeP=}!dg!mMqQbqW|t9ub`1B9)6 zpiSfFe=mXs8On7YACw+3tI-b+5#blYL-^kI1SxX|;C>)B zTaS>J3Kb@Ce9UN8Gtds$g=m3BVqmN@5Jc@#RMuSdKN+pU2GtP|i3=aFFOnjoIq1{R zhG@xtalwMmfO9)9Y=&0JE72?ygP!Gaur z#1fzV$)|dIWW>~gmPm@L(XDYXOvOs$+LjyM#aNTaM>}?Tai%Q7`k>Iwu#F4Zm+;aB zhXCm*{rt|dps^xYSFDZ1*S~LHMJf6UwYg!hY>huR&=pkj0o%5ec1W6*`icKTCoqJm zk_Vd;${7+coHWt3R|3gKgBg3^^7w;8G>TTqh*E9tZaO`;7K`9I~9^S>3*`0$KBw}Ak z`6YPddL`%DwQW5zZszkiAtO+Gpw%^J`qXX6$Ky5##0hKPe=Nr(3o<>%Is%5>4V?^n zN-50w%Z-Ie%>sS*Fp%hxjc)@Z$N=P?H3v4cL_b^|yI)a*Vv*~?0Hl{g9-$2yo}+3C{PfAS~2I_Jz51$N~@d(-L|{V6!bo$S;8A z?Z+bo&VYznN1$_qW@Z-@g~)*7i{e2#c#bNxC*H{0>|Uq~I9a>8<~1^gHLkn+C=4f* zjV`m%8mgSX@7zMEd)FYvY{SHSPIhz1Se#+g&Xh4kx4FnCT@ZY~Rd5>fi6duzW z_9Sw$8xI1)@ExVB(o3pI zKSS*0``_P74@H!{{r!*0FAaL)voyo0rK5n|qi*i0*{T-5dFLLMS&e#yIe1=ox1@;s zvo$JDP!8z$ipfn6ZVC=R*;tcK$(i^aRyD~RhzAe-!Gmp4fYZ=L0h*;Q5aS8ZrcMVK zEz7dQMCYy0de<|diAm6v{}`eQu5slNzwQ>oE`jTeb0H=A&{GdRuw#wdGR^R1B*M@1 z8~naKRKiiIvP#E(26BYWELUI_3*)VOURo9(gMMW=@MSN?m7*Cn14F7nrreh9dLNz8 zb2wkU3nZLaknq`If!iPb%(>BhSIAosPk#9aQHzTBENBO?cr78)-!jY7gHwH?B{{I2iu@4Q$Kv;` z5>hTYq9-rZ7tihPHY6o^>UD;Vd3=`O-j$`Xu z?>2{uXczJW!`q0h{S$9DjQa!VH3TM~05d3PmH_b8@h*EE%wv}TYuQPY8IO}cSfI&H zp=S=fJagICmQ~?oAYX0;Ko|kQG%aBJMqeW;vz_?-x^=Z^P zSPo4C%g7hRefi`5*wxI{rLq*;_xCGhh}I|HQV{!=O6sXb#59V+k70Bl}fjK*3nMrhh|0GLc{{Wq*kI5$>K@{F}1I4Pv ztkj?!tB1Xu$MJ}=?r9CE``&+qD0W#JPu`1DLq=5K5Yci9`Atez6dimj-8wGT)D&ex zbVO=%|Mydyw?U1dE&;X}_;sR*LrhNmCzwEs{g$pHFp}N2>FMc9hoDM2r7;4+wdK~p zFWHnH4xwt7q0=U`GQ$V2*oD0D3$PJ+9P{rREhA%KkWe%%z9*CVDR7*fV)uB*9ph=;3s;a73Oor+A_pyZ^J_5bHrDcHB=tqNrqh)26cMo`4 zg!0iOAZ0?V^dU+PE4Sy}CFE@HRS5A_D%26!Pd7(A=CdDU5$RK)&iD2x7ql2 zV`n}=KyO^gI#2Y{N-wLm<+a>j*1}|f{Wcftr)Jh&gnbGmFTVe~fvF5O5ByFfdp;2y z_!v5Xidw#-WWrJM=@I=8m5uliSSxtHPk-`$uR%1=IuD$r#or(O^VEe;tL|i@Qx^E4 zBCHvn-?!hbAiVd#rUT8IH0pl+%6^L;M0U8~%G%+){?aDZ@V*~W&(fk)`t9lxEeOj$ zS!J@mI{|Z|3;?c-N)gn&3crDr`75ued~Ls>Ikqe z52f;cmTgcTB_l)Ysb4P>)d4~Cszm}>>~}iUNCYk$2jchg zb$|g@)L})G3CYlqrW~uB68vH5B|W%LZ`W-`3nq%t5a~3F;j<|=4kan)(=qcIcph)t zCca(O)LFZzAIRO-%=9u09X__RC2%S%)L8d}Q4+~5(8QDqnLa|*E^TtwOq)-9S<<^T zW~H;Nrg$rsHt4cAM`75()o3iw&c}3+OgD8dGl+L{(#g8aAKCLQ&g=zCyqmrPNUe^G zQ-|n0{w^?q?^%_)#tfO-_Gk_y-6dz%m>;9P&9$2*s{Viik&R!H{dGF#czFo1Yzfod zM!pH`FJ9!$>Fu6Il&%~=(HI;a(~`R#JNun)`bEbkKA(|?aQ`R~8D3lulEmi| zUrf)f2)SN()GSdLOuoGy^8BS$cZV${dv401biMlHLVEfr(kN%0jFBV)d`QI<3;(MZ zT@fV2B`>h(N1QGuS#xEkPx4f@s@RmD7plwl#lNf&nO4y-ZN;U*ST_)v7b4o*UQ!(> zVOAHIx5VrhnnT}lgE_PwAiA|a;P@L?4$}Rq$VP$t`WgP{5wihMC=CbrNL5fW8x{4q zLCSuCBa{n5Psd1=pJl8fPY?eX>|z=3K62@uK-1!TE;DMdNq5NV3!})5QsHv@H$EHn zni&s=ScF+d)v=j$LwVgPRx?taRj3(+YH+lUl5j_RQPV)md~QsDOi?% zSa5|Fa&=ax&{N4O&2?2Q01eKe$hXgaBBk}Rjwhp=F>pM!mZjE>wL8Du(Xhs}>SR)F z;4Q??cKj{V8~#HShlYhMuzgKpVBhi*^)>!gEEE8LpwiIL;70~+#>fX2wX*;CZ3#RL z?8;q3x7WHx4Td{fIE^hr7>{z3#23P7PD*~%;H<80*4FN1Jv3VLgVC}Hb>c`bxV)AM zg`cIUkW{6V+uvN~ZaDucZq5E9(ZvxEZacQ%LhL82Q=SlELz1dy;6ZNksi?qaJ{w zZwE#g8t}S)$@gcTtM2$QNvq{1BC1g}@BZGRgvM-LUjuLPWhS0g{ZO|!ULvKmayZ*qG4Dl-?gaf>tuD?- z2T@l9ofH-Xommj;Mfkhggj75<6)|7{#EIXjf|BKHV39nr2mY=HUqgOF<`F}5lWF{g z52(v(BjjHH$pkdoAyWsdQU+_5R|UEkm>NJ&_D-o=225-c?`*&*qKTEot#Fbl%_ZOp z>TV~w3Qig|Qg&lh@`;rtDX!(EV429lGC7^Z_4AP>AeJe-g6Lo8`5UMhFD!|mfP_^* z=A%U%{g38o>U>3M5D$$4(DF|^IVwTc%f4SSQu%Ff=2Ki(GvP?bjSm+T1O$!-x?8s2 z?`LbX^HU`x(GkZvp{h_Nn~K5Vba+H8tlCh9Cs9n9LsPE}43H&KMA3eL7fEL_RRC7{ zA^7@LkMO71zbZ7%xIAzPj(MAn?;h9hh5$l&t$#dxu*TcBZ!Iik?k#l7s<-QIFLd}N zSf?)4mlUZWu0(Z3SD4W|~BGbyLNESdn zyx-IP8TO<+9fCB?G&pt^NR0fQ3havc70at%)iPs|Ys{rS$(8g}$OveJ2BJ>&PXRudn+p$~~J$fYo|pOC>iG<%5u2um2bR=X9ON)pualn~~pllb2C zUxn}-#D@C2RGtae7R@w(pk_O@NTnSF4dH(W4d9(>L#Tsm6PUKoGd&laX$(yAxpC#~ z<&Ua3r4y5s>64~aE4j0cQa#-o zvrUXqB_o}j!Nm1 zSF$MhkddW;czfYQmm^m{ctd`HJAW3sCkifDi7y^TBw!_oz)D1X%GCX}5~k$I%lE-_ zgbf~Aa{y0;`P2dsawPaati}?|*6PXxSkH2XbOr0K0w(Kdye@D1rLICcv0Mp^2VF5< z{xF1>IK$MW<9wypwRvH!yBcfqC`19A4=Vf@3E+H)KK6)#35?p@`1^c#)PY6WJp9-d zMGX2)EI>#2AB~I>@|C(4`8xnFO1hAt7jIR@gIt|Sh2E)V*S4}2sE;rC{xbR%)i*BX z0=mP2L36eyA@%d{Tf>Dh*#`QT3+OQZ?XKwHdd5&u!i7%Wd}LGXvTxX<`1irP`Lssf zK>^?`;9tVkk^jE%NN5h=)v7RXctO66tuQ}91;U0!QIl`DAp5kl$IAWLm@UWou@XnV z%4u1KO~GAPmlAXGV?tjCI&a3Zkf5F;wVAW3P2Ecy;i!#g@wE8^)75fFcdS3G;;1K5 zsYXwXS2mkoo2Fz)oQ~Z-t;-n%Di6; z_k|)k_k}9CcOpU6=dhTQwGPhqw6}1tc)o%HVFS<8G z`ufK=aQ%=@051;0OOc?8-2wLM!O_pp8x+bdVjg{!%Ax#F))nSdhRtGD8(p+Y3YOIB z1&F-f02fE3)XT#7>&)qpgFoOhG8R1Y1Y8LS)CCsAq@4~w+GxXC$(*)5Q#%QSzb-w0 zr`I1*sP2e$hLxUCk5I>UMI-wR=eLg1=S?@FB?WqV^r><$v&&273Ysh#h1*0gf5jib z<9nf#X+6I8w41;25#n+2E&qC)P>%+kbgeqsrd@Xw>P<5!JeRfyRRSJPJ;M;y=JPG; zLfz?us(A_-)Z8Dd0h1kgr>|RYT2Va@z>dS}>kStEZfkrdD?0jmqhu{TnzD3=Jy5ta zafy+E;oFtCk>Bx@^VIp^S;dL#7~$@E{0L&_F0s>02oHZ zqUW3C%L`z_DdZ=vi*TxrHNwPbHQO~2s9g}@@${JTIw3b}0VKt1@w8I=YKPC}rV5i&lvA;` zoWe$no2Th;Y38&0Z{5O#`Rw12BWg2oe^*ULg95Nw$2S|W^+6BP2@r`nn8X-0tIrjrzetYvMv0`dHf@MMx-sx9khgHk+ECjD? z!PpsLw3bsgxJqJsXy;E|D)C%PRIJv&g>Q_PQFh@zdqHxn&r9l9^o65+)-@83P~4K& z6K`ozJipvbZ^EQNa++*~#s+Z#t0MlsfD>qU_M73E(@Y^j&d{!dkG?SDW24mlg>K3g zmeWa%mlus2?fasNchQ*#pG)gXmmu;a)42oCm46%nHnT)z5cAjZr+Ku~@V%10eE z4QrGNsExB?V5(70dr5&|>*?L?;nVu7`d~On6SkC_EaShoPw9SWJhtcvoG9Di7pY*j zIW(eYh9QW9N#aTBgAtxWnIuANL2Wzi#0-Jt(r_9yOL>2{?u9a)1BCIq1tBZy_5GhWe0Nt_RCJliBSt9 z^Vba8HLqKzj|FM9dK=%;$&gHJjZ(k@+lN22iVL>yCSvvm(daF zFip}kXmELAOXjknQ=xv@sJKLfpatI^`5LQCl@c`byz3yE*_CYwTr8 z;j7kAr5N8cOBLuH+?SJII@+XDm5lSpBj{M`e<6OkoFw|a+BWR?c&221Ls{-Q$t$~n zMu5YiLBJXQ+VpLvhDlrqx1T>uJ@DQmRp%|5bdwR$%Zp>t-MJR1J@kM~a+`X}54N~j z59d64*pa0!>>te#9{FN=#Si8vR|Ng2=cKcAgfK}v;vOfBOjRXLzwWRAPj6?f&Z(NN zoTMFe&DQY-`FtW{h+i+h*t>3)H1U0jmpT09rV~XH{qX&u@~hKm9LGbT^mt}eykg#? zO`1nYTs};CBI7xEEcV8Ggo04xuf#jzFQ1g}wwx*Xu=obDBKGV>*cpg;(CEPFDRPRk z{Drf6jpou{mOHdN0ACtDw1b+D$*gHQeA0Vi@#a}z3MO}i7ZD`Q=8$%~vjne{R&RLE zP7nGMq^o)`CZ`IqOu@T6$*h1_C*-N`R%~33oAi0}d_)5yApAG)KJ<-IrBK_gSvh;e zpEk-4k++8~Ij}=aK*inmG-0{LUei^FcWHwnR*9jVS`9Ug}C*m zn(De{_|1Yo$OkQ^uvLEJE-q&>=(ttGYvBzPV;te)3NZ}C?aQVH zXVHoDHPSCzgZ{WEud5-wpaLR42!}2Yvn(%vdpU#qY@!TON8o-0Tlm<}J>j{U9)Bp; zHC*KoqUXrYR)*(VD>5tBWyc8R-;14d*kJCK8l*e;2UxTzCK8yNsy(O}`$w zF7<;0JHvv6w>5Zf2cZ#o1bNa-b!sIOpQDH^h{J1n`{Y*;4QOkczgs)mRLRTz?)&P+ zc`EY8@8Rj(1YKV0~ zH`@g+Xk%5V50n|f(HZ(I|c-A(R5C_sS=BQ-TC?A%&NUOjhS4Ube3iI8k zK=fF53r73e>F~gQ=@mZrNI;@@qK;}Z-;&d(A>Wwk=v%S+Ga~_UiEpzls;0R=`Wn@9 zMAy0x63ET8^}r8~s8T;Pwt}fekVFwks#;zHsV_zN=J zxw2#LmX!ErCsirJIVu%{^rE|dh{b0fyLPV3^Lwy?`BF8{YbYBOAOud5E<*bi0<+<$ zj2GnFn@=x=&2Kh!9-w@r$A{Q{h&uX?PF&XWfNU&*leu&QpXPGbwIhi`l%@7_Sao}f zmk;C4N)H@)3RYzm`FUqRS&mEw7a0K;jt59ZfCco#Z!@Xi@1bGPDB`hNcQRO93)*zFrA*jijkVB`T@4DK29qRkAaOravpaQ8D?rW!kX{O464=Sxu+j0td4j z!(7unaCT~H3#6NETxi8V}T6!MS5;rr!3{ChWHHJ z*iSP&bq$+Zm*`ljw19;azplrVs@4>{Jf(9ohk9nj9e~x0qPNu9xCe7WZhL4Qc(6Js z3`(ECPTCX3GkKG`kd+xb8!RsAIkT5=(#U(mrOw29lD{N|s7|n7qu2It4Q5?lglapf z9+Wjrw2AK4Y{x`6wM=gx@ZO|=+O0gPkr0AQBocld!3EWdq~GtKB2lBcR4d=k)*Qiw zSerPu&FjPV_e&Tb90iB0ZZ^?ZU*7N7d5_UQ^YI=HQT_xhuaq*eU3Ft~h_<5_vPuKYjc9eE;~~mNOC)u&O&hbFkb2Xv(4ksjql3Q3E``oC5$Al z?;_kP8y}KHt!@s(A1+-Klv7(7JE?BlQ|?b6H%l;e=h>S>>!WpERmf8txr}IL6Jgc) z{KTxi_@mFFH|tN?nB=gqS6W(97a0zD6V{)Tk4+~&myxC)C}ZxiFIPQbo+XPS#(!;0Z2dq#%+&92=7Gw*H0?0=sUtRke)3hAv?LNTP-= zH5?aIP_U1%GY#Uh9Irlx5n^JG<<*Nas5`B@3rz3NX=An?`ZJ!2P;YNcb_qxLB~0_* zPM^1koI4>E5tR*HaGW#Ps;7lcC;ZQN;r~0Z@NZD=Z@n4^mbz1FN?GEb6IN|OcklK^ z1O^j|wZ=RUREk805hdox=G@ARI*k)w4f!rNK90+II0+B^nzm3%DW{@9e`B44dB3#x zuia||2xIEycwNzj$J!F14kNYAt}Q^N;)RSF zhygp|v>!3yk4^HTeU7#p>ORI(%E_pZ*;a2*f82%|K=*3z)h8m#S--OB-@xay@xJaz zbatpIwR$WrFYH8~etdP9P02~~75AA!H~pcOb^~>(#O%)H8?9X}rf)W%tL<|?H~ZfA zKDkjcqOARr*)C9m(-TYpYxQvKOMuF57zgsq;X3vDQa#OBHQiuCc!0v`9fW{%MgQ4$OxqGt=&fzhGf?I0 z3G~S%=+re9Ypp%y^TB?m(EL=fHbLz)NjbCmjP}URp#1cRow_wB6ol^%q4!B$CNeAX z+%{Qp&+hjUX>pjSL}!PqNhrjPbT^P=K8p#k3F})Z)gSa|b+feeGqvmEE$RAf^q<~% zmx1asyo)1s7>{a(tN@;WiKbdV;RvR%HXXI0z)d1-){^ouAvM>H^u%;TjS}F|P*wri z&fnFCf_k+m>Hdf!eN1Wr``1D`^;R3vR~1#2c|wo36#GnVrX5S>4@dq}_px&v|8g(4 zA};3{xBJ&p=8t;C-#Z**mC9ZG(o*5=-I4BxTMX(p>r2g}%q)i}hLO{u{UUNKP?b3)H^`Z_IA^r?fOdJSyk^5?$~NqKLb|voz(d1P-8v$_{lV-J zqQv-|7tAjJvM)&+VS@izHvJ8(IU?3G_4+x_lNoR24;B=}Is-3kcr%FZ>!7Q=X+@nGa}rGz2&4JOW=R{dddbH=Ga}J}7+1 zR1L;{iMJa!vVt?W`TkP_<fP-p<#I-5=gE6G?N7J&=B zom9JoE&wgb`~_P2o!Q1uh-MY7y&B{mJ`lRMb}(#*RR`*v?f2y&>0p#S=hz=1#dH4ugc*{ly^Fc01j zHZ=rCgMgh{F9U3*^^^Z#GlAtnCAJCq?^vE5&SQ<2vmV#~e`R?Nwx-kpSrjb-PWrgf zD>%UoJyj88d9`AH!CQW3dQ$iOl}TlN|TORG+bao@^{;P5LU6+nh$y(Sj7+j ziB-&U=ua0X{O=rn#Hbnqr1@X@&UX%PKGNl|zW<-7i9fPm86ZJN@NfKAu4Bx854$uV zqW1+56XEC2z`F$`;Ia&YnEPeKUxL>5k2B564+V6hRm8V(?cL+js5&_}>F@CZKSZBm8P7f1E zs9Q6m8gCG4kb%hn90Vu_>mH!}(XjodXa&ZXJHV*z$eBLm^#L-IVkM=e>E=__dw_LKE-2>C0l0vEey=@?PI;;G{2QV z2|m0gLgixu5i8m3eK2y#8vLek6WB$we?Ml*$D6!|e9KGd$)s^N8E8vA0Yi=eyw*yg z@1t-?b{&&hWho6S_Nxn4jlAEtQJCy?+(phdWccz(FIu_vjL|AuR4Syks7=?i1tMoO^n;?qq^|A&2l z@8iLk{IBg^&V%jKnZ7#DjADeUB?S|fa z(196VWe)FzasY|^4A?0CXK}|!VH$u!kq*$HK@6ni zHXh3Wsyd_p2Y=Y=KzcxePsS(3ju=c|MSF94oo~e2V^A7P^Elz0iVtiRBg)O8b@kb% zM3xIdg++MCegMw_Gi>_&7wZfb8amQc-7_$Ct6HohC?XW0_0_O1*%NWups4pL9Z|s4 z=3@i_L6+}#9o5fiM?`NL{z zrTJ^j3zAsNTkL?@@c%`)HuS-x2As^PL*gj@znCq;_;~Df5d;={a7gdtbnaK8^qyeZ z)JPl`uJTLB>P)jS&2#q8+lC~lGp%o0g1Dw3%`-xG%c-eUADVU85hO?d-47ET29|6d!`Ow02-=oUkYaz>|Dxb*}UdT66Eg) zIGNTA3(H>+2LM<`I1n&#N;lpEcz-DXn)A*iBgW&TMnD)wx7HxC3KSL)lm{n1X{y0~ zSWGw{Wd<$E%qE_Xf4t1w=K=IvxKpf~ZLmgEP{h8!ew&anusn}D-0FExvdO4M?ecY< zGM`!E%0Ly{|Har_hDF)6Z@h{K0}?~Gbc=L{#E=qFA|VJ2gOo^zbVy3qkcvZ>lF|m9 z64Ig6&`1ji>^1N6KJWkA-ur`x2VXdbx!1kcwXW+tf9Gd)ZlOd89g;uT?hms$Z`c!N zvBlX#`hxo-S#h(&qM&L_;2MejwZQJ5j~3LA?T%hxj-V%H_zL)dIkKn2HV-<+034yc z^()aA02EPnNAt0}VBc>&(zBcU^bxEq4M|`|&bNh^CX~3{looutzwp{~b8MRyO`@Pb zOydhWw{r{$-_p8Y$uT`@c1gyZZRdFeU}rK${vSJY^gi}BX18sR z{{a1GWRpOOY2RxeWoydXYK@WU&2hf9WwOs?=kpC%175?OtGD;=e!;LIfa%FoWxx+m zt!#jYAcoW*7%YLsY5&qtf9#0+L_0@HnLiLD-l72dRJ3Bq!-WMpd=~WNO|t$yhu2T) z2AH|Uz^D0mi*#JpC70>L50QuT?CDTJzj8FXlecjxQy7lSZQb`L)$zsLfx%h204 zB_Fq(>ydV^tP41t%I2$DPSR{IR7VOurvjwMJiGm8UR1{$%rk%NXuM0f5AcZUp*Vza zfF`I0MB{#7cx>$@pvUkw$16;YfB}zk((oqg6Bh{0vYC;8m?BLX-DQ>j&VKx@*4X{;j^p zn)X-VWOy;In6)gU{k+1CBTb^Z>p4LQ$;~AIuKWbS`tQwAi2A>;kRN3IMJqrRPKZp@ zIT-nXqGWkYpyG+bGw{gM0UwCg)cw~(uU~qkO3wgIw)z{9G-!|9PXT~4Rmyu$o@x36 z*XAmS33mJ9=vaO;7|6ZUi5~oY>1G2smo`^_&qRwm+4tH0Ssq2U+xGqSPo6f~*U6j> z#JDcTcE?6Ge#~cCO-(0GS&o;>|7wHF;8M+rxUx5&wY+dzt)oIbo*lTI9-Clf9>{xs zkyS}oukb3-qExm!dsKB?g|GWq#_UB~fOl$}b@`TO&&GKMP*_zU=N-vsO~8i)Y0xQ2>KSK)oP zwNsu?0D$Y*| zX8`9NU|ci75CY~)Y-$Fl9=l;R@$w$Hp8-l3Pt#{@b%J1TZ~}b%U{wZeJ&olib$ueuH*=rs`TPQ-JmCwXX#AWk+Rw_j zh7x0NvYSi#lg=-mEVWc;l)PR5Oq;cLb%11`fH@mj-Ag6X)6+SDzb^+4XrLHI5(W)9 z=XW?*4QMGZUcSM9_aA~s6Y%53%LzGwvn_G03EX0? zkF9is?{|h6@p<>;X>QJ!%?Np!~kF}TQLN*BQ8NX>rv z6;E)UuD;VpvrF33<(|vR)c4T^vn8$_=bJZB%-~oMahFnh{g120G5SA8)1;Z(n?}v{ zczQ`!eZxPkO4z@h53&o0Z?Chw;|*~+o2P8L2mx6CKjL*)v1q>_e*LX}*y)Sp!2TFy zCQH43M0IU>W}#tB;fowuG~b!ybUMX{_1%Z_;Uxvt>rafgV-3a`Qh4Ok_~~YQC66}g zR(owNV$J0OcMnI%>v0);G{SRGnOLWPtck>l-JXx&dKvtVoqF*;gQnmAQEEoX*WAKa zml()wlQ9LcwC6S#@WH<)SvFXgJ|5Q}Oh>>=c4_Ae{s6Z?U$&%M?C)Zh;=&@Q`KCJ1 zRQ$GWX7Fnd++xI*FtNFG6vBoQKk5yS3~n9*j+J0Yet?+XD?nP<&+Sj1h1L!-fa+-`TGXATcnT%t=x9a{L6I(#^^Wz(ARXq-Ih+9*$W6 zSC;pK?Euz@nQHa{9F+~Wx~#84=L`@Mbfe;a5a7%dE(rH&>hIf|W~M!^8rkbpw$K-R ze1_q4bx?yGX$r=A|mdv#| zP-h=h1zz$tjY;~*UTj6(z8N@YRszxtwo1H*aQ_I^wQ0KViJhIMPOj08=;(K=bo7>X zSr|XL$4_KWJLTVW68Tu)(q+_1-0{1nN3(qYo%gG}sZ{q2pZl+kRJ-Lm@Z_q@M5Bi1 zr~Z)c9G1?+ugVo`42%o!iC&iM+K<=J8}`JefaBv!WN=fN9w5xSzHM^g(8`2+x|J4} zC=Vx%*Or^Or|07r^5|_2^+b_z_F<4Q3Ihi39*r&7k8CpyW{IZ*BxC<}#~jA)zEx&a zB_=KB(f#}^Uz)$JYG6RKrr#j^;UMjMyD^W5!nFK+7Ps9Z{E|$2Bd@5^r@r%;a&vnk zCmFN2cocU5vI@aj2+WyF{zWraNl^n z;r0)anx^$vge!M5$ZZpqRn!1+Au zS^zCIw;jMi$TzmF<zc+yvX~wJi)1cx@-Hdp{~Q`!LNU{0Pfc8l-D?|>7$Ku+23~jF@ZV2 zD`o&LJiW62ieo?HU_+a~`s@E(Tsb=h!9fWsZ)j;qzJ-4c3A$r`;LLh;JbeGI=&~f^ z&wg+3js-EZ<2@RGoL<~Jl(ZjG$G$CyFAJUx^LfGE8F{=Y++8rxt-V4V!#gObl!w2` zG*3neEgi)0NR+$2TD{%hXSIfMdB2hn+BCEE+x}h*Sk(6ro4c_2H-G*f4rjSBcr^S3 z?Rq=CeES*Wd8cAqgp#hhTI_Qpg;W8AW`(e1B=R*1C8WTF+GD?12JIU z)x(_sZX890aX;iR$2Y6zrWYuJiwl2F&Lc5Cj1B$aIlrB{$$_(M6A|{x=y#w(nCIAQ zc=ETox9ihe6U>G&`2W3Obm_OKz5WqEi5&pop>8iyc0+)(3fVvPi)@oNO^R&ZprnaOczghCAZL-!Az#|b~45+jlrmXIa z+U-0lFHrvzr z{8;Tn3SEEHen`|in;STg<`X5Xh(;!E9mi@@Jm^pFYnEW;MjU7M**e=_f|~Bd(7Ag_ zhb!ayt7$;>V#Pol?2L{Tcx03HywvPR=EHpluN%BLem*9joT+3Cot!g!83t2$9~sFM z63N6w^pillnCpw${h@CpXljB?x_mr+D2#sTMqCgzi*86%h!8cB+<@S1etu9SERl`^ z&-b+X&(>7YhTN-DkijwIAUo^>5QK)86{VFkTc+PIp+s$emhQM=I%3$Y9gCpXHt_wO z68m83r06P>@J57h3dlGNAsJ2h`1`JFG!;_aodcWWLTc~M$qO|#i1x%5;L~ulp8}cG zf~c7)IoCxy#imL1WN2_I!1$PS?H}~>_ctfh5bkkr+Ozz`E^HB}q55k606_Di=1~5> zaUW)3HPoL`!rJXJ8`S9>rX*@!XEYB8jxq`xMLt5rdz~2GBkIgW#_aGf`l~OFeVcHW zlR3JM4q}1wV`(6|V%kqCAzWDC=eH0x(wpqfq$O$JaSF}}3Hw?I6?PK?2yUs;qNqb} zylw?&L^QKAXrKIu_nEScK!U;ukLKrJNeEGR5++k8Y@EG7j9PK96hj2h=PtHZ?!ZH_ zkTAlYzuW;x7)}zJ-RG19Z46TG0-!lV7W(|7Rj+0~OoQU(${nF%a&DFOf}^6-2^iDB zslt;cnY9e-j`bm_O4Ei)nB%Xom!LXCVor3UaFFiwzI+P|@}06AS{CqRq;V{=??NYd zj{kxpfVFO8or=W<)Z1%4tno)rhjoWa^m;U7=cyZGQa>D-I$qb!tFEB5uKPStq3llg z^yrYi*YR>6(~av86=qwNSsSsnwXmW?-_g;TfKQ7mE(bM6D}}-p75;bgh2wX8KW(J3 zKFo1Sy;USIqj?|$Sw7=wCH&zmBFyBNJ-sZEtd%LEo{ginsLAjHb9)$tnsR$Ga-9sA z-4{u2D~hOCbp2Jl!-EwqF9G2~(h$YXM1ybsKMx>*8#`sYFGZ@shx&)J`+4Wj`1F7@ z@C}tjhOl`spg8R2K(G(1XhoBIFHM|V$qoixdWNANQ6xdFzNwhbf%sBIA=0PnBGieg zFoe8X+wp{{D}N%XbIbngxzL-vPY+|rea8E~A(jWV}E8)xLP9F>YsQm`dIEDeZ(gB2&!C9z*Hh%(SQ*d?ES= z)^O75i+|`3eSJSlj)jlaVN|_Eush0Ak?FJwggHd5Sp|^wcC@_Dode-@_YYGN09e$!odZFx7UsoAnoMhuCJ-p=RM~MJY9lt$i~) zVkgOXoSlgLyEF05E5jvTJ1MYv3vuQ5)0xnpiFi?b^n321^N_*EU0x>c5mCagegXcCqq+aF;k0=9%lq&} zxXv@@j**B+%M+X`*+|*+Lr!hk+TJ!n`|)C0wC&IUT;cbOLv~N%eo$+K+d3u{>-y?) zRdcs4unI&+oyZL3fxV?E+h$t4mkGn0>krVjUz0dFva(gmIOp>u@XZ5j5*87ZgdDM~ zkmdeoci_S*QIPMg94{^{4lWId38^T~V&WI@l^Cm{{P74Tep?jM0&>I@NfqzPb0l@` zfKxYhM6U{8?}p;%GhysoI7|fMQ2zvKziw(J1|F79CJCB5sGB2}(41O@r~LN13Mh&2 z^T*ULn0Nqy9Kj@D$N0+``c`bUoK8a=85JDeQ@h%qo&_l7h89DIiJKI9*x$&G+XO@s zS%~vp!AycQ#}~u`;VY=4XcAMjg_7%VqtyH4)EM+O6($Y_j=F`(a#ZH;qrI6eJ|~?N z4|cY&|u|Ji#bj`+U)t5Ys^1jJgO{2G;s{>_53ULPFvHG+7l$N&Gil} zKeOj23my#FTz+_L91)ubu@h(pMV`$Yg~Efo%!*ka9kLSE34Ju7FF$N z*25a;JBM^goA)aQH!f#_D4)zwkRRT~?^>GD&rrq{U< zC!V^7@x!p0i|mn1!}Pxc22ZO+=Zg;a-gsWWL++!yw;L_U^xyphc$iA$N6 z+K@>?un@z*OvOy@!yI4UVPhV73=6Vk`r1rSnviLffTXr-!4@LR4^63_SiYMSrlwaU zk07P#^TIv|H!+3M!QLr$Fu@Q>ipj+~;z&H*kY9Zay*Rp9zvhc|S)=Y}NxC84wB^7# z?w*6-2=kJKPl3 zQ>)0v#&%bC+#HT@+QW`*fN_vfD4si2vh##N9M|LyGg4|I7kxchhNN!eE|4T7m8%jo zLc&vY++5{OmR4X#NU{Mzl@U zOxa4Hs~bLeO&tnrJN{=W?OHBMVF;V_gObRaPm5PtWYV} zo0E$M1&QAlgd|+K$IG51j7O&hu@z9L&_&8U5W;}+^lTy4(5}GI$BGC=oP;ituivAa z5gc$15<_T$wsCtvYXa+Ifi>uD6%(Ua;RJdp*<&WYeHHYcxgDbQ*|$d>ZdQJZ_w1rL zFTq?vUkjo5`{(hH_Z*xftmx2WmiSnvNE2ywGVf)<>~VgwV`>f279t}h+vWpx2PMDx zJ(z(UUjCw=)5diuD|GFGiQp+I^2_& z?WOf}ljA7g^Z0*kV-ilBnGja7;H9>3iT)YTqZz3ZsfR7(1{y5YLxNJobYkVNzj`;k zd=N(jfpZMM(Z6N=OM1h@hf$$CL>%RCx;x+bbKoLuYd%C5S*&@^ul)79Gxz1}E9M){ zEoWXov~(HC@<*x`Rv5e@&yvOUhkQ@tbVgKZiK1bRsEuaUw zG{6Hr&Nbh5gy^?-#wqOSW0ME3LmvqVOO@Ag{%0kC1;Hc=A=mJ<6lnpaM~AnXKMzZ- z8pCA1a!gWQY9wF&gWqaep$U{pO%`?uipN>6ZPm}IOGs2p_0(cT)S#g^*l0JX1UsHs zG9*MNiX=jFP>$@Qy;rd}A+BNT_w7d#^V574$~{I0NPnprEQbQ8>;}ueQ1=I{k`V7i zB*>47N?<`*u=m{wrVoF_&OlwaRXm6rtWnGEab0W~{VS^dHnZ^l6ZCXs$Ja>Z6*uhPHNM)NWNZq{?n->jr@T>KE$nTBJ0zDm^Y?T3(uEvL-R0`itO zSPePY;<9cWXF4k!WxYPjtQUNJCI@+#4&z!%kZ+Paaaj|B-*v!M;??ay1+Ytnrc{`9 zo7uLr?g%^dYqWV`k?^5vtaAjeNhZV{sH}NJ?;Tdy77g8D7WTPoKcm-9dP^9E0Cgd| zeIZmRxrdgez{d;Psz@%r$EjySHXU*cYa}n0F1uO>q?d!Us z7_6RU4mIBsQCr!K4>NJiIZWt3ALQP%X|=m-y*0axcq5EreRUK4;6z7FQ?I$2(S(zF zC=u_%C0|v{Y2uQ{R9WIpL{WXrTf4ABwU^uPKD#6_K+YH#>I^C3}a(D_-Yq> z4cb2JrOc7PndA9mIabM8YC+P8{(Shwsp5GTSM)nFYrDpRjQz%LgUEpXOZwDNxea1L zm1j)gG!uk8>c$NvT9|*V)*WdXVNl&L=NeOvr}rcY(SN1+R4N}DWfnFd4d$Qh$2?us zk;;*>YPhjdFvV3?;SIWjmp?xTD<;YRR!B+en+7Wbh_sE;wojP2Dzxbh9((T(xCjwE3X>rn9a8WrneZGiejS>uMW z`-k9dxJPNQ=1djqzF%oP1j({%4iB#x>Hl+}ZHy+Rk9*n>?f(GD4KAPG-etRxbO>vW zI2&w23|d1NfV;h1_~G-lSDetxPk?%>j*Z{J6taAC1S!Gr=&(BB*fEhmY+!}qbLN-* zpt|9UsE>zoeX0mO3$ph@b`?<^ozDh88M{F6Q z@h)j%j|ML^rGc>Zy;N#NGqKma=5EbMUQwQ32#rj!w=MJOlQ_xEm%?i&rlI`1Lh~dO zQp6741qsMbKT3$h+}RACZyfQKu*T?77I9JS)dR~y!laX%f}@AQu?Df!fA_`M zOCj&M7D}mM`;Wu1nKz>&aUp~kU*bjFVcttL(TShp|=tlP(3dAY&Wq( z*zqF$^4F-$i%@e4Os0YvNi(?1+>E1tatvHB+5^d*+OZC<-d6KV5|U0_Jr=<~(7s8q zK37dfVM!*VZ*Xy}vWQ6E^OPJrq9CWCx8VZAO0^Wr?B@hGal2G!80AEi#uH|U3Dli> zBg7+xT6&|r-!qPS9-Lxkd1@s28gqJ)0s5krWHM6%y)j&O^$D@AxX3E%!JS-P#CIx0 zRc23SLDv#ifAL1=n67>;q>gIsLf(Uh;G*_v4*6y7=$2oN``PS|-#@^$CPrpSwkQ0z+f5l8cgeaS4T_e`YIh zzRK-K&fG0xMca~RBbh$V*`E6~tzIh}o2q5`4%S1shNg2VN*5isjPkTeUb4Q07Y^b5 zdj`pqOyLA8_S_#;viiU4Z-xccRzI5%&OMe!-+#gu_~6-fVV(A^Qwa@r z)o;0O-~o@EkLSv+zYRsT@uKy6nlaLz?D@zgbc>d~S2lrUX!0h#D(z5bwx~;G=y%pZ z5g6AB$H;j0w50ffKGB(WEh|YF6#2Lq86kK(^ri|KfdI;m?L5Kc zrMA+|4GO_DC`I_AEPN$=59RWggmyl~#8%-~4y?jaVIAI~&@XD^oM6#^`yDEn`CL5= zqA@y{NIM)e5N2Sb>0K`!eo{i{{dd2+KQC&aBM8Q$4mPEUo@HN_Z%k3ldQmtw1k`1p zV?_cWuPJ^rV@>dduN$Q+OF+WZ&BKKAXH0cN<;l6aCnGk+<+e8@2Tp@Gh0LF(HJ_r^ zPviBkl32L|jV?by!@wP~!EUkgja_YbJe&Pt<3db_BY+KnXH>r-&%X>cqREB!YF%Q=-1HIvg_Et0L`aGSx zhbo9D_K3a^j-Es=r2Q;*+Qt6iY<4KHbICJgH~DO5+2|5s+J5dH9YhDteGl#b_Cdja zpT!{Lik^uGN+n#9B<(M^=#(JWssgl3)DI&a+44uY-_VJRVOaTAX|z8_gK1xD?(VTBf@ z##Nq|EPQ*PF1~C^A?vSa^0``jhG*Py5+&D8W;G+|)w9^}q=+S~mj_)OWtjX89!J@b4TBCECV^7G@ zB6hrgo*1@&5gd~y4;BI2C?d12$lB=NQmiiwQ;LlwqEy4-tg==>daBWx& zctv;%ha?&1hKbBs1I0L$^aTJWDY}vtsGl3?O@b(tEYgqaIa-@eOB6)LsySJJe<&Zz zgZjL*fN3hR%D02-e1dO!ZSbjNNew1?y9Q)1M-ddP5uT!1g)h%pT}NX8#-^^!`-Q$z z4>tS(7}G0=h8)^7ZX|g0hYd`un0tPf+7l=R`*z*Qe`|kB^@1AN4PQ$M_NV_ zJO3Q0OL(_#`3pb~0?GA~T_m^2raw~TFA8Z@L_4JU>cTGm*=Q4R_$VHz{BYi%3U{&1 z*t_+^hMHFW1)cgRziw3v(}uSON<|$Gd5{6P#OKNjDYZUyJl}3BpcUkVlHIFQJcH;_SJK)p z*=Wf;Dh-6hVgwxeFG1-eo@Pft|I z<*@uzk?+;aA^W#D3St4Utnj3#pre~CQkviaeU7RKF`ESl`Oap#j|)@dufhf{00WvW zhgTF48HB3Bgv!mULeyHC?G7>d7)RRRAfO)qK;%m9XxG zK(#;|m{zD?z6A==1$hC*=m|<>lQCi0$^_vF5AB1Knqpk^ZzZ=&s~ZTgM^n6~U=v!or<;w+%#rb_ zhGFdjHKCq(N{aOhZiDtF5e-!)&4c`-krn56dIVUY`;c+gs}qpKm=OC^m-}3+bmprc zBT6A1CYrdLeW=hFE}Io8bTU>OMWwAi&R+sY4_Uy~NE}u(DkHM2-R#WNCAY{ccfRlb zbh_G|Agtr75E-ib^`hHH7Q3J#-B?Xlb9XftjTjKRe$sTjU|2Q@;buP-cjigVFt>Ah zpRF{m(@itD@z(Z&Sc>={5t*&dEFg)i$f3vsu;`bHFDVrfp4(Hp`9jR65|eIs*$>9{ z=FCjN-RUe(blW4BzuRXf2|45@v*A60yUO2ob?$IB;PQ=7d_03xLVRYJYaDheHAW>A z5wh8kAjW%~DKM$qDA*C4I^6Hq)t`wyeHs+rwS1y!lt&4vbx{! zjLXx?WcTWzF3HJU9;uR`mJz%+9OkK z|Hpc>5;-I{BUu+g3bB1crWA3tESW+3uMKgntzvSankjz{8I*%Fcc1@a9Bfk8VCF{v zc`zJP(+eHFegEj+9f>p?G@GLIRM$gwFJBXtIEPv$QR&7?@r2$;9EY*as@dytGP?FK zCk?$Z=!b0lHuwH@J4%K_@8Y`>aq1#auGs(n%&9CaS_wirtTE!wLom!yN4bG85;h9a zyNfX1WDRF9$T|@9*feN70da0g;{yO%UUri=krCCe6e+?8Wd5*hXu+3D3;*L(jEuHc z$6aE%PV_~Vm=n)3>*XAn5VKk3DaA*oL$+#en4+%5y|;xmReBn)IICM`)vM}QWi5p1kU=j=vQHMWl`iE5xB6{j zODubo(0IOC*69*Nek3DZGG(j-KC1A!=7^-wM5On3(7K7%p$4&-r`|`QYi926(pz0g;30-sBJ0Zx+ke^J8}M)k`&9oz z=c`;V@humSpsS}6Zc==CahfjMy|vwId++4VklWlW$jegi9Wd`~6Se$`&CRDckt=&L zrIeq&HCg4%p`FcDpxbgnoF8_1!r$)C2yZit-phnhzt?Y1tzt9~>kkKcA1I?t{v(ZAD` zZGlhnR5ko9YLW|U8q8J(w%7IYV4#WlpI#<}5KLngLxmCvO9I~O-`NO0OL&vWL`a&d z5F{DN98T_z5w)RU9CcZYNcHn(a1@%2@+I${0qGQpUa(0sD3?e_LquMVcZ8GsFGX<< zfb#hs_N|Q)_11`+(eBeK&JrE^ z|15YId3d@QXfc|a+(jSCw%9sBS27hk;1&DP9L~W#)H*?9{lSgVn%6Fe`435K?%X$% z4@;_u>VVdnZb-lIhl!U$Z%;(HH%cD9S-$n;8B^Cw%}Ymu`WDV7%|Jz7Civ#1-%s;w zk|$Y5s^_mN4_SnLoFyke)jVC$F?VS8Ib8E`aaH-S`JW{s=Q)ApK=F<(gk6;RbA|lZ zGWeaFPtm_Nh08rUkjZWwL<$n$6Qxq6YPM}aK|$&|y0?wTeb&cY)gznWig6oA?Ctz>$3V=MK#R0pnH!{+0dt3rcUWI3 zg_4DZE*Q1t%`fEO9FsuV+n?Y|yF{iqmb<)zoZR~R>h~91Bi?y+gwWmdzn%O6%yBx| zlj--2^}b_|co;x6eG^#{Xuy4AAeQG?;lJ%|^5EZ?DT&OzD-ty`&3}LmBk4Q6ugAVwxl|%SbuXzteN2`o4-ksC}kF1$O zAc{;6OZfhPX=+pX*aLNL=l=7zcrRqiPlMCSP(*Rld8W4aS&MsB!`nj! zrE*5(M%@qy^;@zRN}ln9J^hZO8(SU@1gzuV^_<{^Mi=F7EFYs@C`A5T2Q^l(2z-!K zE9&P@`D^NgsFYuKy|6{L64wo9(-^2X4*vFNqG`SAw;Jl>rFSKUPZP;F9|#G588eP} ztb(0kByssfSEwfTS@(GDZtp5?TZTu6aq_SRT;QjsxYw>3^Y)a^RI}XWk^W*0Cyc{4 zT%dyn4Md&fu0&tBiN897CFZJ9i`HlPO9lGxNZ7y6`GMoq2d5L!1!g#e`4+x5+2g4O z%m^?C8MgnMKHK=R2ZW3Zp&p$F#}Y~q$%5}vl>Sf4P`mb<=3dPcUm+#|xuL#D^FIE1|#4BM@h%e|BMx&2fq{UqnfJi5Q0n!pjAlujg$@gn(3-*m|6i! zpp40fgoyG#Pe?%4r2pJL#ZCU zEnbzX%|mKqo3VYLBcol8fFvtXR2G;*?1dN^NZ5PkQ@9=+9r2EPxj86m54ANaunj*eBs_&hoY5eaO@0Ax~W5yW5TRmkX*dvK<$j z9`}E$o4=T=*@TD+c~yM*TTt zU5c1c!;TBQ-OahDGDLjY8&f(e@pt*)YZ#pk+MvCL^YoWyhulEhm+6#ZmF**bj z1fA}-Smb|tmhS%h@f^HR79Glu_MoS@^Z#>G|KAgKgYv($x53;{*3b{G8&M%BbEx^6 zcbxS~_sf*{fv^E4!^vC3GE9@nlmv=5m13#z6ptoAHDI#C?Ht^_u7oe3XK5AtE>Q2W z-mS>-WY{ak+=L<0TLUQdDE)p%sFZ*g;{<`6;3`Kyjsz>wC@ioA<@Kq|C@tr}T8 z3fK(LXxf^6qAv-97m$z?TO6a<+VW*TP$JBmJ7h&ZdN4Y< z1mChb-w>}ZY5yGT`iy@u0V24v4+3c~Dd3yK@2lSkDLVTS6z#CO0~Y;0W(S z3sXI89z4)K;8mYJFaP&OBOeSg5Sl)ovW{1GshSA(e~_8E_BNC#>G;+c zh`O;UoCAi_!HU9lQb>97K9(J_L6<6h2kz zC^u`=oA9VX3=UwlG|PoJYan5o&1t5XukBFo^9LoV`#r`Grcqxlkbk}T2PhfMmykLP zpItQ!&iT;7&u2|Zz+yONDq-mNgIB%VWp#^gT&-IO88~%rJ5$NNA>clGb$-0nIfdu4 zt~!cQyF)I(P!(^3xTN=Tz-!{B+hX*h@6w+6P7RJ=u@wRE*?g?+e1(CXDL$(nCJ9z# z!xxpJDasSf5d4q&|24C)AjhHhFE72@di}8%dms_L#;P;ggjLwC-@z$VNiJLMPVD}a zPJ80uC1HOo(nmM$d$!$xTq9RzC@yu)EOVlVMSa|rugU$}^7ru%A9(hKzjV@4GE-ub zoaDdS)p@!rv=;QRPmF$6z=514+2A|I7#3;IThLZBJH-2soNpNpQS0@ zTQi5aP`Q5Ph8Z#VuD`R|Mz!JT+H&UkRr^VV|9K=-1BD-Feu6&wW@6T(I#vaQMcMDy zl|5)~_w64iLN13&gb=;!6-*-SpED>Wt@!Vt5Ki{~LaMGNiibyMT_Oq zu;$gRX2g;Nh}G)wKJJxon9K^NDI(Uk`r`)(qJ5rBqUoI50+QYWS)^wFb#hzWXoxj! zcgYsl8B<0O zE@9di<19@qF2~f}di|}yWvrJ)6pBwm3yp#~K}i%hW%CtV&cE_rVeUud39uyZ2t%U2-S3R zoh0hZt7YP@fj(tB6G^61VGCns(FcLu8Rr)Pr+$HQJXDG2OB0L3Pi+PQcS*KjydHNG zXl{@m09{*|oUnck(v63tAEv!i-G4u6jq(dg`~Y6|$Hw?|gr45gdUxG;>#>x+w8GLh zVXrDSHTTabA_rg`h1t$JLbAs$PWPOFZPnC&3%tqpN6PR?OmAysHTxGQP_HFAhzTXH zzhqhe6mQZ}C(F4ZZ9ACBZns|IqlV(9VUGOksgW?u3~Q18=u#MU-+eTtbW!o5@9 zvY^&vqNCz7fg^@MRXHvZx*XJ(NFS#Cm z%e!PX5_gLh+(Wlbw800M+xz_ItLu1ymQ(n0rM23f5jCQR4o)6e86bE!`O#X{4fu@k z-l^kz=n1=}sYs~0z2;CZNTy$DiB)>Lh@3goOhAJ2KS^o~TJ*OCF-SWGZbXt<$%xBL zhe|<=6C%=}NMd*`8%@KQce{AD+h0OJ=>E`;6vwv#m z`k;rEolw-)gP!^FoPWhF(yc||$i9ltu)XuhIO-J4izb>j#b?id6zOBnf!DgkP4VAr z-y`gL1*ZE#WWz3-9@#!?NKyv%L84e1Zw>1X`QqeIRnf$_gm{a9vx2`lQl8WzLvk06 z%!-0$&sIK91*?iF`tr042o`!Dhkxu!Le_ydL5kb%Y&Rv5RaN7tpaN-oeF{WsS+b{d z`k>V58DM{y7U{x`AN8&n73K|vV7FrX6fi+b%y?( zPpaTvY!x2bv$3ftZG28x^~DuBJ_+xXiqVOkfAP9TkT>t@MeKv;aLqj8hN~-&%G|Z7 zoQ5mEXMxsI^LH<5QlyBp*%;%zvyt5K@~vj{f0^zBVH9^YnY3Y^HfQ;`=Z&P&>E5@M+~swu;|sALUO$A6<6g zw0f39IzoWb@#E*p93acZ=%BfluBvs?_ zvj(aJMB2LEsQs_-HZW$Q)0;8*0#Ulu(tD}Q?6N5yjUJPWR+T5j>aw^kDc#0&C?4iqc~R9h@~eq7il9{Ca&f);w^@hE!Nrs z|4h;wH@;irnn4elIn@^$Q5efjWKdnb+8SaEWbF$txJT#o3FNX$Fb`|6VFv zr|d(fb0#Xuyr!`+*UG2$j>Z+X_R=qVG>2MHh04Hi%Vzv=J+xSDv}VC;(SH^$_{-!W zk?U>*fj_9=*a>$*BP}c}d|(a)-gX-A_j2ZuE4l-3aHXJm6*{bTbuVq8uA@9_08XGF z7Xi+(3sUaqHwr4>he(aYSSiUL)6pHxd}M*%!D!O4nmtSZov^Xd1xxhYkL5xIehvB> zkpw)^^L~GRMUcM3yQ$W}`Wm7(SoyR$v)*Y|V<&s2M{~4S%x)C6`5aU_ZHh@rnWX@P zmFqt;49*4~j{qPKU!JXEMB*_=kwl1?DMb)bgx=}O)7O?%4nRvU>?M?s6Xw5$9)}4` z{oRibOu5KUkT|02bhBeDV0~1Ab2DEoQjPoZ)!A@hCMMD;zs>j*)!YnBWrN1HIqX^- z?;hV@E;hKNESqFoENZ%^C9Q687&XBUdNd-JYFcHq_PV>S34+b_VucFLb4IVM+ZNx7 zbe_#?t*jR{9({B`Qy=Qy_qew>zf#cZm6zK^FFKLYbZlRLGCAhDVVfsZXuMc)trG;^ zB2d6G?}5!mMp8##F<4Cl?VqB~WRh%hVs5*ccXd=Gny^Ig2WEs>!MAYn1*aO_Y$b=4 zRH~^0K+6FqLZQ+vW+$cw2!Ac^!PkIVp98&ukrQTLISy*to4Y>d^sPxi%K*=7IIJL- zb7u~y_eEKRW6tQUx=l4(HRqGxn?5V3WIa`jLM(vk@`@Y#Xp@J-*_ubRa(P zueI@>uEx~)8`tfbbnqf#tAeK3&x{R*|CI}Cpj_DgN_~^&hI-VG~a!Wk<87pruo3A6Fmq;4#1qmwz*LtS1No)nCp}NgFoRC=AU5Bv`Ys=%#U(UHx z#@)prU+_LQ9tL+gwp;J=QVnkWHJ{vXxK4jHe032sH`#u5?G-TO?|(czXUVH-c5u8g zTCpg;dX=U_dpC_2-=|LS!Z*9YG1|DxwJRS6vpJ9`7z$NXlRKAOiez0ey9Cxp{OF+z zATZ>;SjOm1SMny2GkCJ1lGFNNC}X)0aN%SZQ_c;6b0_9W5c)>#+))g#IRZnH&VAzQ zy3$gOK8b|z+e@j!vNoLB(mNu|YEFZi?GOVw;sh1SYA~1}rHmcsR(KF0@$sFA$|G*W zc7=>aaDhbC$FcRWfSqofV3S0yWx#KrPH*)L?g^`&hruiyZZtgj4 zL+Y(}F}@P&9tf;CW6@1il3HG`|W@zM(*-l{~oZ3)>Ef zV8TdZyQ3JL^k%78KO`ZMX7@kJ86?!&2t&y7zr6<<=J6U}#<9 zH5TDO%GGuI9bUu3w>QSURWVRDQV*Is`Efp|XqHc+5Hb$;5|tML0)mx`m+?aQCL7qsB@vP#PWj;uwJUeMsaPZD>2ZmX{F z&;S3VA?pg`W&tFJp4}K@ki$xIx&S{FbaDUjp;uso)$c6kXW5Ax6?FY?0{zrqN07{#~IE{1JtQ zWsj{gnEk16G-7$FjlrCn^SB-371_uUWbru;|I$6t*QvHL$=|_+lLz2MKfFa^1-!=} z{CPG6*@fzvB!|2JUN@`fQoq8;Q1w+5`8CY)%%=HC;K=w*9iLi%FSfx%~ycIShLrB5%x# zt3DI!Rklo86U+O1#$|mn+?q!QgA1(~4Ba9wH>xt{BT_>7vbXYTtLXMGUY&?Xy=5)C z^L1g+dnu)SU0j>s&W7~8Y{@LwN>c!@*?yYfu^lRL{4?wnn7kj33&NMfsm!^!P1VI? zmWfN!r!AsJI|Sn*RrELyJI}tia`Zq~>QrmI$j;QS$CEpm<7@7BaK@=_pKdq8;)L!- zs%tR+{LtYw)OZvx%;XS!nbBh>gl8-?W~G5|n`;A1_y_>Ba*Zo|fYQnmQyeA5a~l4h zP@6V@lDnVy9nO7|EH`f-+S*6m^9=Tyg`V(BGGyC|{oWfT`eJ|KB&5{loL8dYs^GYtGlSJ?cFqqnfPVsmawtuTSl>V{+f_3%o`Rd)}Zq! z!}gKq0{60+Ihc>8^ruXFX%E)l$7?)&q9#qh`%>>4cRZJxy;_^dVH{ng`ug-hq3AdC=)2yG9 z*B&@|E22G7J4Mfve8?H=wLw}~S1ZpqC#F0VFOn}mO=~-GCr8Tg5G`e;1&EyIcT%1X zme3Txau=ymXrGX=zG4YVe)8=uLo#%EbQ3B1YyU10d9)3sDRYjHdM|oWe>}U?ME~_1 zJ7ga_>fVNZcka*khuhyAPG;eX9a!ab-!#3DH*%7Zv@ow>MagS*i)O_&+5c|p8VHNt zxX{3Z+pUbt#vk;ftNp-kRistbXkG&Y!v8E$~!{tPXoy zyr_0cy2$+eaE?eGr7_P=IhTkPx|MM60PUrFY}gG&-My0jg5RR9Yb%gx=m$DU|IgRZ zS(+C!%-J0O>65x|l_a;*LPR7R9*aI|USLX?AUYTqL_Op4Spmb86jb)*Yzr#&Yleze zRny#!GM$EQY#rcEK{6hzW}oT|$@q}9v#&C-`?P?JZ$E%JadhlUb_}RKo_)UVo4i>_ zE#pWJh8_P4+LE2AC~t6D%R}n+V?ZPt?3I;L!dl*Nt_c}$!UJTE9zF4y6qck>r~(E! zdNZW7jl$;;!;Sb5^C&-5rZ%Jga+*?F1aInr4> z1KdmQC!2~NeLUDv@U4>I?Vp>w&op`XcV*R8RhbNLRR8$27P-=#I_&FqW?MnBH?TS4 z{p{FybWg{fw<9H={`q_Fg_2n_?O4C1{a~zAK#0d*X6TK`Z@E{;? z{Ro|TkUOBGPFQpR#SER9%uU%N(|T>G2)v37@S7(th&>d!!!pheW2C3jt&ms$Y0M1T zm8(6UF!IPhH#Eu52e|s8%8HmlpqKOaU@aCkls`Z3=He2(hDDpX=C`bs7ro5ehOiy9 z+%nhppw~Stz3N<}juz7s;)TcSl0u!0_BiY4X4h3Us&Kobflk*T9M8x8b}>{$K=ay$ zSL;JelDY|IN2 zI(`57$zCz*<01R9APtbX+(Jv&5MhC8E60X4RQ>Sp&p@{;!_3iT5M_`{veC zXRZ|+2Nrkn6n2eqHVXSV;Jd7ZjG83d^oGd1L2U~1_$M2(%9gwx;^=KD{MH2k65%K> z@q#3+oaGcqowLg&BKC^t>~uP{Z}tGIcB$?W$%5k9@a>!oC)e-bM`uD@hb`fiOJJTZ zG9YL;9lLB3K7Cu&WlB(J-Cdf7MR|-&cy+mpKV65lX3QQudsHN!&Qk2w*wkmCb5%QP zP6y&Ieu_1nUQdQi2fYREm!j^ZkKjnNJb4GyPL37ji2P|MKY;Xg5ITJ@Z^6KQ1Jie| zRcr0wd}O!=Qp9Ns?{8J6SzTmXrR)p^B@<1&i@Os3?XZ*4Os-Mbjh4ZwZjg99{qx=1 zX=T>9`+HQe0;8akv!Rz&FPV+4Wj*<+u~<9yCSdcX={{gS>i*9J{_op-7G%JfIy^FA zD;48+#L4eJ4I;>=r>|@24TiuFO7vZWXU)Z&OMJ_sU-1TYW|OY&ha>H8vw=!eJf!5z zMteDrXZ|A+YI)E=RngGXyl{Zw$!-9%l@OyP=Tro4va1T z=?`+YtMWHrSurT`=}UsEJHB8lY)1}gosOj^DIZ5t0?{^Vy|?J z!e3E9l8UA{cw69HT*k9C1oX2tJ5@z%ql==3j%;@7u+-g{wv2l>;{N0xZ5L!KUeO6k zxuNCJZBMm7GM*Uy zwxh6>yF#%e9Oy7^jC0@f*Et4`(MczR1}ie?jU`N^-wdFwXcFmlkb`R;iwn#4cmoO5 zfvv<``hMgV`Qo07=>vK2QYb|tDrV0A*NsOx^zPgvp@k6DkN#Tet^SnC!8y?QQQWC~ z>;>drrOlEm9iJO&P8H3&aioPFY*Ab3m#jO!7cT&!=QddW#J!07=}I)AtLByG+M0~EQOZe5bToa&{~^9FJ!<*e%Ro9?z=OICE2(}i#yF)li~OYS)h-zL1Q zUs@v?Q>0+?Y*2k?QAhf7(i6BiFC*R(-S#>G@Z2*DgX+T@&M0xav~I-= zL>@5DSowK`Z5V(DW9e7Q9e4S{XqZkms5(2xu?D-{BNxg`ZM^MZZRQVItZwzFRK!KUTh`Dy-rPbm7@iY65mnE@ziP%@355g_S3AJ5|!JosHo_Q7(MUb zw6UOWff@lWhlCwo^l^%=5HB7&b9;L>@2W!w^q_jc>~xaB7Zl^v$Nm-E#EqtF_q_nS z<8-wkr7)U81D;)Nz-SGFUUwI?hMhRK!yB0^)c3o?!^Rd>dQNjDOhHycfH%wMKFp6B zh-=+@R@Y#^myPh^{T#0}4UV52kLs7X(6Zx0L?PpOJeO6O#32vY?~MR-hkadK2#ak2 zph9oil()cC)0VIBS41ia;qd(j4k=gr(S}Q$s_}p(=jM zn%>sK{pBVUc_$dSB#C&cPtos7az$_tRLd2oixV_(SFIEqVGvfCP~aiQkW_7Q72BcA zZG2@_>-Eu<)F0)bl*NtO1p1d;N^^2dCN`bxm5)pYM_Sve9m`l%CC`u>13gRN>#XxVnI26Rr>?IUV3}B3-_`HIEV&eueYwBf6@^e|cNA zsA978*xwQn07Z@UNOPC}h@b0npQp1fFH*Hpp|N`gC-UMVe0`+bMVD!fU;2ZEzla>nnxHrdp0p6q)nmed05R=?==SEsUp zmtONgdR5I5af2(MF1UWaWMN;nfXxQHz-k73+$+k@_LtV1Dhxf|2V>up9b#JkG&3fq&t v#5^zWd6W$^cub+6czeJ1JVre3qQd8xso|{8n*x=WxWFIQ(gDphKOO&nmb&?S literal 0 HcmV?d00001 From 5a568035a9feb2fd79daf172f13b886af218230c Mon Sep 17 00:00:00 2001 From: Henk Berendsen <61596108+hb140502@users.noreply.github.com> Date: Tue, 6 Jun 2023 12:59:38 +0200 Subject: [PATCH 30/32] Finishing touches (#71) * Disable all logging output when running tests * Add additional logging messages * Reduce testing account request interval * Remove redundant member account tags * Remove redundant logging message and variable * Add function retrieving the AWS policy tag from admin panel * Change global account counter attributes to local * Make base OU ID configurable in admin panel * Fixed spacing between GitHub and AWS sync buttons * Replace hard-coded security tags to retrieved from admin panel * Incorporate security flaw tag into create_move_account tests --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> --- website/projects/admin.py | 7 +- website/projects/aws/awsapitalker.py | 3 + website/projects/aws/awssync.py | 110 ++++++------ website/projects/aws/awssync_checks.py | 15 -- website/projects/aws/awssync_structs.py | 11 +- .../migrations/0016_awspolicy_base_ou_id.py | 18 ++ website/projects/models.py | 3 +- .../templates/admin/projects/change_list.html | 2 + .../tests/tests_aws/test_awsapitalker.py | 2 +- .../projects/tests/tests_aws/test_awssync.py | 162 +++++++++++------- .../tests/tests_aws/test_awssync_checks.py | 31 ++-- .../tests/tests_aws/test_awssync_structs.py | 72 ++------ 12 files changed, 214 insertions(+), 222 deletions(-) create mode 100644 website/projects/migrations/0016_awspolicy_base_ou_id.py diff --git a/website/projects/admin.py b/website/projects/admin.py index 74ea5802..1c59e216 100644 --- a/website/projects/admin.py +++ b/website/projects/admin.py @@ -203,5 +203,8 @@ class ClientAdmin(admin.ModelAdmin): class AWSPolicyAdmin(admin.ModelAdmin): """Custom admin for AWS Policies.""" - list_display = ["policy_id", "tags_key", "tags_value", "is_current_policy"] - search_fields = ("policy_id",) + list_display = ["base_ou_id", "policy_id", "tags_key", "tags_value", "is_current_policy"] + search_fields = ( + "base_ou_id", + "policy_id", + ) diff --git a/website/projects/aws/awsapitalker.py b/website/projects/aws/awsapitalker.py index 32d6ad21..56a404d8 100644 --- a/website/projects/aws/awsapitalker.py +++ b/website/projects/aws/awsapitalker.py @@ -18,6 +18,7 @@ def __init__(self): self.sts_client = boto3.client("sts") self.max_results = 20 + self.conditional_tag = {"Key": "AutoCreated", "Value": ""} def create_organization(self, feature_set: str) -> dict: """ @@ -37,6 +38,7 @@ def create_organizational_unit(self, parent_id: str, ou_name: str, tags: list[di :param tags: tags (list of dictionaries containing the keys 'Key' and 'Value') to be attached to the account. :return: dictionary containing information about the organizational unit. """ + tags.append(self.conditional_tag) return self.org_client.create_organizational_unit(ParentId=parent_id, Name=ou_name, Tags=tags) def attach_policy(self, target_id: str, policy_id: str): @@ -79,6 +81,7 @@ def create_account(self, email: str, account_name: str, tags: list[dict] = []) - :param tags: tags (list of dictionaries containing the keys 'Key' and 'Value') to be attached to the account. :return: dictionary containing information about the account creation status. """ + tags.append(self.conditional_tag) return self.org_client.create_account( Email=email, AccountName=account_name, IamUserAccessToBilling="DENY", Tags=tags ) diff --git a/website/projects/aws/awssync.py b/website/projects/aws/awssync.py index b6252fdd..c93d312a 100644 --- a/website/projects/aws/awssync.py +++ b/website/projects/aws/awssync.py @@ -13,7 +13,6 @@ from projects.aws.awsapitalker import AWSAPITalker from projects.aws.awssync_checks import Checks -from projects.aws.awssync_checks_permissions import api_permissions from projects.aws.awssync_structs import AWSTree, Iteration, SyncData from projects.models import AWSPolicy, Project @@ -31,10 +30,6 @@ def __init__(self): self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5 self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3 - self.accounts_created = 0 - self.accounts_moved = 0 - self.accounts_to_create = 0 - self.SUCCESS_MSG = "Successfully synchronized all projects to AWS." self.FAIL_MSG = "Not all accounts were created and moved successfully. Check the console for more information." self.API_ERROR_MSG = "An error occurred while calling the AWS API. Check the console for more information." @@ -44,23 +39,20 @@ def __init__(self): def get_syncdata_from_giphouse(self) -> list[SyncData]: """ - Create a list of SyncData struct containing email, slug and semester. - - Slug and semester combined are together an uniqueness constraint. + Create a list of SyncData struct containing email, slug. - :return: list of SyncData structs with email, slug and semester + :return: list of SyncData structs with email, slug """ sync_data_list = [] current_semester = Semester.objects.get_or_create_current_semester() for project in Project.objects.filter(mailinglist__isnull=False, semester=current_semester).values( - "slug", "semester", "mailinglist" + "slug", "mailinglist" ): project_slug = project["slug"] - project_semester = str(Semester.objects.get(pk=project["semester"])) project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address - sync_data = SyncData(project_email, project_slug, project_semester) + sync_data = SyncData(project_email, project_slug) sync_data_list.append(sync_data) return sync_data_list @@ -72,13 +64,6 @@ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[S """ return [project for project in giphouse_data if project not in aws_data] - def get_tag_value(self, tags: list[dict[str, str]], key: str) -> str: - """Return the value of the tag with the given key, or None if no such tag exists.""" - for tag in tags: - if tag["Key"] == key: - return tag["Value"] - return None - def extract_aws_setup(self, parent_ou_id: str) -> AWSTree: """ Give a list of all the children of the parent OU. @@ -86,7 +71,6 @@ def extract_aws_setup(self, parent_ou_id: str) -> AWSTree: :param parent_ou_id: The ID of the parent OU. :return: A AWSTree object containing all the children of the parent OU. """ - member_accounts = [] aws_tree = AWSTree( "root", parent_ou_id, @@ -94,27 +78,15 @@ def extract_aws_setup(self, parent_ou_id: str) -> AWSTree: Iteration( ou["Name"], ou["Id"], - member_accounts := [ - SyncData( - account["Email"], - self.get_tag_value(tags, "project_slug"), - self.get_tag_value(tags, "project_semester"), - ) + [ + SyncData(account["Email"], account["Name"]) for account in self.api_talker.list_accounts_for_parent(parent_id=ou["Id"]) - for tags in [self.api_talker.list_tags_for_resource(resource_id=account["Id"])] ], ) for ou in self.api_talker.list_organizational_units_for_parent(parent_id=parent_ou_id) ], ) - incomplete_accounts = [ - account for account in member_accounts if not (account.project_slug and account.project_semester) - ] - - if incomplete_accounts: - raise Exception(f"Found incomplete accounts in AWS: {incomplete_accounts}.") - return aws_tree def get_or_create_course_ou(self, tree: AWSTree) -> str: @@ -126,6 +98,9 @@ def get_or_create_course_ou(self, tree: AWSTree) -> str: if not course_ou_id: course_ou = self.api_talker.create_organizational_unit(root_id, course_ou_name) course_ou_id = course_ou["OrganizationalUnit"]["Id"] + self.logger.info(f"Created semester OU '{course_ou_name}' with ID '/{root_id}/{course_ou_id}'.") + else: + self.logger.info(f"Semester OU '{course_ou_name}' exists with ID '/{root_id}/{course_ou_id}'.") return course_ou_id @@ -133,17 +108,35 @@ def attach_policy(self, target_id: str, policy_id: str) -> None: """Attach policy to target resource.""" try: self.api_talker.attach_policy(target_id, policy_id) + self.logger.info(f"Attached policy with ID '{policy_id}' to target ID '{target_id}'.") except ClientError as error: if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException": raise + self.logger.info(f"Policy with ID '{policy_id}' is already attached to target ID '{target_id}'.") + + def get_current_base_ou_id(self) -> str: + """Get the manually configured current base OU ID set in the Django admin panel.""" + for policy in AWSPolicy.objects.all(): + if policy.is_current_policy: + return policy.base_ou_id + raise Exception("No current base OU ID found") def get_current_policy_id(self) -> str: - """Get the currrent policy stored on the GiPHouse website.""" + """Get the manually configured current policy ID set in the Django admin panel.""" for policy in AWSPolicy.objects.all(): if policy.is_current_policy: return policy.policy_id raise Exception("No current policy found") + def get_current_policy_tag(self) -> dict: + """Get the manually configured current policy tag set in the Django admin panel.""" + for policy in AWSPolicy.objects.all(): + if policy.is_current_policy: + tag = {"Key": policy.tags_key} + tag["Value"] = policy.tags_value if policy.tags_value else "" + return tag + raise Exception("No current policy tag found") + def create_and_move_accounts( self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str ) -> bool: @@ -155,18 +148,13 @@ def create_and_move_accounts( :param destination_ou_id: The organization's destination OU ID. :returns: True iff **all** new member accounts were created and moved successfully. """ + accounts_created = 0 + accounts_moved = 0 + for new_member in new_member_accounts: - # Create member account response = self.api_talker.create_account( - new_member.project_email, - new_member.project_slug, - [ - {"Key": "project_slug", "Value": new_member.project_slug}, - {"Key": "project_semester", "Value": new_member.project_semester}, - {"Key": "course_iteration_tag", "Value": "no-rights"}, - ], + new_member.project_email, new_member.project_slug, [self.get_current_policy_tag()] ) - # Repeatedly check status of new member account request. request_id = response["CreateAccountStatus"]["Id"] for _ in range(self.ACCOUNT_REQUEST_MAX_ATTEMPTS): @@ -183,14 +171,16 @@ def create_and_move_accounts( if request_state == "SUCCEEDED": account_id = response_status["CreateAccountStatus"]["AccountId"] + self.logger.info(f"Created member account '{new_member.project_email}' with ID '{account_id}'.") + accounts_created += 1 - self.accounts_created += 1 try: self.api_talker.move_account(account_id, root_id, destination_ou_id) - self.accounts_moved += 1 - self.api_talker.untag_resource(account_id, ["course_iteration_tag"]) + accounts_moved += 1 + self.logger.info(f"Moved new member account '{new_member.project_email}'.") + self.api_talker.untag_resource(account_id, [self.get_current_policy_tag()["Key"]]) except ClientError as error: - self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.") + self.logger.debug(f"Failed to move new member account '{new_member.project_email}'.") self.logger.debug(error) break @@ -202,10 +192,10 @@ def create_and_move_accounts( ) break - self.accounts_to_create = len(new_member_accounts) - self.logger.debug(f"Accounts created: {self.accounts_created}/{self.accounts_to_create}") - self.logger.debug(f"Accounts moved: {self.accounts_moved}/{self.accounts_to_create}") - success = self.accounts_to_create == self.accounts_created == self.accounts_moved + accounts_to_create = len(new_member_accounts) + self.logger.info(f"Accounts created: {accounts_created}/{accounts_to_create}") + self.logger.info(f"Accounts moved: {accounts_moved}/{accounts_to_create}") + success = accounts_to_create == accounts_created == accounts_moved return success @@ -215,23 +205,21 @@ def pipeline(self) -> bool: :return: True iff all pipeline stages successfully executed. """ - self.checker.pipeline_preconditions(api_permissions) - + base_ou_id = self.get_current_base_ou_id() + policy_id = self.get_current_policy_id() root_id = self.api_talker.list_roots()[0]["Id"] - aws_tree = self.extract_aws_setup(root_id) - self.checker.check_members_in_correct_iteration(aws_tree) + + aws_tree = self.extract_aws_setup(base_ou_id) self.checker.check_double_iteration_names(aws_tree) aws_sync_data = aws_tree.awstree_to_syncdata_list() giphouse_sync_data = self.get_syncdata_from_giphouse() merged_sync_data = self.generate_aws_sync_list(giphouse_sync_data, aws_sync_data) - ou_id = self.get_or_create_course_ou(aws_tree) - - policy_id = self.get_current_policy_id() - self.attach_policy(ou_id, policy_id) + course_ou_id = self.get_or_create_course_ou(aws_tree) + self.attach_policy(course_ou_id, policy_id) - return self.create_and_move_accounts(merged_sync_data, root_id, ou_id) + return self.create_and_move_accounts(merged_sync_data, root_id, course_ou_id) def synchronise(self, request): """ diff --git a/website/projects/aws/awssync_checks.py b/website/projects/aws/awssync_checks.py index 3c7c0e4b..9cb64b44 100644 --- a/website/projects/aws/awssync_checks.py +++ b/website/projects/aws/awssync_checks.py @@ -14,21 +14,6 @@ def __init__(self): self.api_talker = AWSAPITalker() self.logger = logging.getLogger("django.aws") - def check_members_in_correct_iteration(self, AWSdata: AWSTree) -> None: - """Check if the data from the member tag matches the semester OU it is in.""" - emails_inconsistent_accounts = [ - member.project_email - for iteration in AWSdata.iterations - for member in iteration.members - if member.project_semester != iteration.name - ] - - if emails_inconsistent_accounts: - raise Exception( - f"There are members in a course iteration OU with an inconsistent course iteration tag.\ - Inconsistent names are {emails_inconsistent_accounts}" - ) - def check_double_iteration_names(self, AWSdata: AWSTree) -> None: """Check if there are multiple OU's with the same name in AWS.""" names = [iteration.name for iteration in AWSdata.iterations] diff --git a/website/projects/aws/awssync_structs.py b/website/projects/aws/awssync_structs.py index e0d8b934..7b094d8f 100644 --- a/website/projects/aws/awssync_structs.py +++ b/website/projects/aws/awssync_structs.py @@ -4,25 +4,20 @@ class SyncData: """Structure for AWS giphouse sync data.""" - def __init__(self, project_email: str, project_slug: str, project_semester: str) -> None: + def __init__(self, project_email: str, project_slug: str) -> None: """Create SyncData instance.""" self.project_email = project_email self.project_slug = project_slug - self.project_semester = project_semester def __eq__(self, other: SyncData) -> bool: """Overload equals for SyncData type.""" if not isinstance(other, SyncData): raise TypeError("Must compare to object of type SyncData") - return ( - self.project_email == other.project_email - and self.project_slug == other.project_slug - and self.project_semester == other.project_semester - ) + return self.project_email == other.project_email and self.project_slug == other.project_slug def __repr__(self) -> str: """Overload to repr function for SyncData type.""" - return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')" + return f"SyncData('{self.project_email}', '{self.project_slug}')" class Iteration: diff --git a/website/projects/migrations/0016_awspolicy_base_ou_id.py b/website/projects/migrations/0016_awspolicy_base_ou_id.py new file mode 100644 index 00000000..e3d5eb8b --- /dev/null +++ b/website/projects/migrations/0016_awspolicy_base_ou_id.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.3 on 2023-06-04 15:12 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0015_alter_awspolicy_tags_key"), + ] + + operations = [ + migrations.AddField( + model_name="awspolicy", + name="base_ou_id", + field=models.CharField(default="", max_length=50), + ), + ] diff --git a/website/projects/models.py b/website/projects/models.py index 62645b57..0c904bc5 100644 --- a/website/projects/models.py +++ b/website/projects/models.py @@ -9,7 +9,7 @@ class AWSPolicy(models.Model): - """AWS global policy id and tags submission fields.""" + """AWS global base OU id, policy id and tags submission fields.""" class Meta: """Meta class for AWSPolicy model.""" @@ -17,6 +17,7 @@ class Meta: verbose_name = "AWS Policy" verbose_name_plural = "AWS Policies" + base_ou_id = models.CharField(max_length=50, unique=False, default="", null=False, blank=False) policy_id = models.CharField(max_length=50, unique=False, null=False, blank=False) tags_key = models.CharField(max_length=50, unique=False, default="", null=False, blank=False) tags_value = models.CharField(max_length=50, unique=False, default="", null=False, blank=True) diff --git a/website/projects/templates/admin/projects/change_list.html b/website/projects/templates/admin/projects/change_list.html index 9e2f1890..a19bafbd 100644 --- a/website/projects/templates/admin/projects/change_list.html +++ b/website/projects/templates/admin/projects/change_list.html @@ -4,6 +4,8 @@ {% block object-tools-items %}

  • Synchronize projects of the current semester to GitHub +
  • +
  • Synchronize projects of the current semester to AWS
  • {{ block.super }} diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py index 907d1efb..ee08ad5a 100644 --- a/website/projects/tests/tests_aws/test_awsapitalker.py +++ b/website/projects/tests/tests_aws/test_awsapitalker.py @@ -210,4 +210,4 @@ def test_untag_resource(self): self.api_talker.untag_resource(account_id, [tag_key]) received_tags = self.api_talker.org_client.list_tags_for_resource(ResourceId=account_id)["Tags"] - self.assertEqual(received_tags, []) + self.assertEqual(received_tags, [self.api_talker.conditional_tag]) diff --git a/website/projects/tests/tests_aws/test_awssync.py b/website/projects/tests/tests_aws/test_awssync.py index a33dd9fa..e0cc0974 100644 --- a/website/projects/tests/tests_aws/test_awssync.py +++ b/website/projects/tests/tests_aws/test_awssync.py @@ -24,9 +24,16 @@ User: Employee = get_user_model() +class QuietAWSSync(AWSSync): + def __init__(self): + super().__init__() + self.logger = MagicMock() + + @mock_organizations @mock_sts @mock_iam +@patch("projects.admin.AWSSync", new=QuietAWSSync) class AWSSyncTest(TestCase): def setUp(self): """Set up testing environment.""" @@ -41,6 +48,8 @@ def setUp(self): self.sync.logger = self.logger self.sync.checker.logger = self.logger + self.sync.ACCOUNT_REQUEST_INTERVAL_SECONDS = 0.1 + def setup_policy(self): policy_name = "DenyAll" policy_description = "Deny all access." @@ -59,6 +68,12 @@ def setup_policy(self): tags_value="true", ) + def get_tags_for_account(self, account_email): + accounts = self.sync.api_talker.org_client.list_accounts()["Accounts"] + account_id = [account["Id"] for account in accounts if account["Email"] == account_email][0] + tags = self.api_talker.org_client.list_tags_for_resource(ResourceId=account_id)["Tags"] + return tags + def test_get_syncdata_from_giphouse_normal(self): """Test get_emails_with_teamids function in optimal conditions.""" self.semester = Semester.objects.create(year=2023, season=Semester.SPRING) @@ -74,9 +89,9 @@ def test_get_syncdata_from_giphouse_normal(self): self.assertIsInstance(email_id, list) self.assertIsInstance(email_id[0], SyncData) expected_result = [ - SyncData("test0@giphouse.nl", "test0", "Spring 2023"), - SyncData("test1@giphouse.nl", "test1", "Spring 2023"), - SyncData("test2@giphouse.nl", "test2", "Spring 2023"), + SyncData("test0@giphouse.nl", "test0"), + SyncData("test1@giphouse.nl", "test1"), + SyncData("test2@giphouse.nl", "test2"), ] self.assertEqual(email_id, expected_result) @@ -113,75 +128,46 @@ def test_AWS_sync_list_both_empty(self): self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) def test_AWS_sync_list_empty_AWS(self): - test1 = SyncData("test1@test1.test1", "test1", "test1") - test2 = SyncData("test2@test2.test2", "test2", "test2") + test1 = SyncData("test1@test1.test1", "test1") + test2 = SyncData("test2@test2.test2", "test2") gip_list = [test1, test2] aws_list = [] self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list) def test_AWS_sync_list_empty_GiP(self): - test1 = SyncData("test1@test1.test1", "test1", "test1") - test2 = SyncData("test2@test2.test2", "test2", "test2") + test1 = SyncData("test1@test1.test1", "test1") + test2 = SyncData("test2@test2.test2", "test2") gip_list = [] aws_list = [test1, test2] self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), []) def test_AWS_sync_list_both_full(self): - test1 = SyncData("test1@test1.test1", "test1", "test1") - test2 = SyncData("test2@test2.test2", "test2", "test2") - test3 = SyncData("test3@test3.test3", "test3", "test3") + test1 = SyncData("test1@test1.test1", "test1") + test2 = SyncData("test2@test2.test2", "test2") + test3 = SyncData("test3@test3.test3", "test3") gip_list = [test1, test2] aws_list = [test2, test3] self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [test1]) - def test_get_tag_value(self): - tags = [{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}] - self.assertEquals(self.sync.get_tag_value(tags, "project_semester"), "2021") - self.assertEquals(self.sync.get_tag_value(tags, "project_slug"), "test1") - self.assertEquals(self.sync.get_tag_value(tags, "project_name"), None) - def test_extract_aws_setup(self): self.sync.api_talker.create_organization(feature_set="ALL") root_id = self.api_talker.list_roots()[0]["Id"] - ou_response = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1") + ou_response = self.api_talker.create_organizational_unit(root_id, "OU_1") ou_id = ou_response["OrganizationalUnit"]["Id"] - account_response = self.api_talker.create_account( - email="account_1@gmail.com", - account_name="account_1", - tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}], - ) + account_response = self.api_talker.create_account("account_1@gmail.com", "account_1") account_id = account_response["CreateAccountStatus"]["AccountId"] self.api_talker.move_account(account_id=account_id, source_parent_id=root_id, dest_parent_id=ou_id) aws_tree = self.sync.extract_aws_setup(root_id) - expected_sync_data = [SyncData("account_1@gmail.com", "test1", "2021")] + expected_sync_data = [SyncData("account_1@gmail.com", "account_1")] expected_iteration = Iteration("OU_1", ou_id, expected_sync_data) expected_tree = AWSTree("root", root_id, [expected_iteration]) self.assertEqual(aws_tree, expected_tree) - def test_extract_aws_setup_no_slugs(self): - self.sync.api_talker.create_organization(feature_set="ALL") - root_id = self.api_talker.list_roots()[0]["Id"] - - response_OU_1 = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1") - OU_1_id = response_OU_1["OrganizationalUnit"]["Id"] - response_account_1 = self.api_talker.create_account( - email="account_1@gmail.com", - account_name="account_1", - tags=[], - ) - account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"] - - self.api_talker.move_account(account_id=account_id_1, source_parent_id=root_id, dest_parent_id=OU_1_id) - - with self.assertRaises(Exception) as context: - self.sync.extract_aws_setup(root_id) - self.assertIn("Found incomplete accounts in AWS", str(context.exception)) - def test_get_or_create_course_ou__new(self): self.sync.api_talker.create_organization(feature_set="ALL") root_id = self.sync.api_talker.list_roots()[0]["Id"] @@ -202,8 +188,8 @@ def test_get_or_create_course_ou__already_exists(self): "root", "r-123", [ - Iteration("Spring 2023", "ou-456", [SyncData("alice@giphouse.nl", "alices-project", "Spring 2023")]), - Iteration("Fall 2023", "ou-789", [SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023")]), + Iteration("Spring 2023", "ou-456", [SyncData("alice@giphouse.nl", "alices-project")]), + Iteration("Fall 2023", "ou-789", [SyncData("bob@giphouse.nl", "bobs-project")]), ], ) @@ -245,6 +231,12 @@ def test_attach_policy__caught_exception(self): def test_attach_policy__reraised_exception(self): self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123") + def test_get_current_base_ou_id(self): + test_base_ou_id = "o-123456" + self.aws_policy = AWSPolicy.objects.create(base_ou_id=test_base_ou_id, is_current_policy=True) + current_base_ou_id = self.sync.get_current_base_ou_id() + self.assertEqual(current_base_ou_id, test_base_ou_id) + def test_get_current_policy_id(self): self.policy_id1 = AWSPolicy.objects.create( policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False @@ -256,11 +248,28 @@ def test_get_current_policy_id(self): self.assertIsInstance(current_policy_id, str) self.assertEqual(current_policy_id, self.policy_id2.policy_id) - def test_get_current_policy__no_current_policy_id(self): + def test_get_current_policy__no_current_policy(self): self.policy_id1 = AWSPolicy.objects.create( policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False ) + self.assertRaises(Exception, self.sync.get_current_base_ou_id) self.assertRaises(Exception, self.sync.get_current_policy_id) + self.assertRaises(Exception, self.sync.get_current_policy_tag) + + def test_get_current_policy_tag__has_key_and_value(self): + test_key = "not-moved" + test_val = "pending-move" + self.aws_policy = AWSPolicy.objects.create( + policy_id="p-123456", tags_key=test_key, tags_value=test_val, is_current_policy=True + ) + current_policy_tag = self.sync.get_current_policy_tag() + self.assertEqual(current_policy_tag, {"Key": test_key, "Value": test_val}) + + def test_get_current_policy_tag__has_key_only(self): + test_key = "not-moved" + self.aws_policy = AWSPolicy.objects.create(policy_id="p-123456", tags_key=test_key, is_current_policy=True) + current_policy_tag = self.sync.get_current_policy_tag() + self.assertEqual(current_policy_tag, {"Key": test_key, "Value": ""}) def test_create_move_account(self): self.sync.api_talker.create_organization(feature_set="ALL") @@ -269,40 +278,53 @@ def test_create_move_account(self): dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + SyncData("alice@giphouse.nl", "alices-project"), + SyncData("bob@giphouse.nl", "bobs-project"), ] + self.setup_policy() + success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + tags_alice = self.get_tags_for_account("alice@giphouse.nl") + tags_bob = self.get_tags_for_account("bob@giphouse.nl") + self.assertTrue(success) + self.assertNotIn({"Key": "no_permissions", "Value": "true"}, tags_alice + tags_bob) - def test_create_move_account__exception_failure(self): + def test_create_move_account__exception_move(self): self.sync.api_talker.create_organization(feature_set="ALL") root_id = self.sync.api_talker.list_roots()[0]["Id"] dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + SyncData("alice@giphouse.nl", "alices-project"), + SyncData("bob@giphouse.nl", "bobs-project"), ] + self.setup_policy() with patch.object(self.sync.api_talker, "move_account", side_effect=ClientError({}, "move_account")): success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + tags_alice = self.get_tags_for_account("alice@giphouse.nl") + tags_bob = self.get_tags_for_account("bob@giphouse.nl") + self.assertFalse(success) + self.assertIn({"Key": "no_permissions", "Value": "true"}, tags_alice) + self.assertIn({"Key": "no_permissions", "Value": "true"}, tags_bob) - def test_create_move_account__no_move(self): + def test_create_move_account__exception_describe(self): self.sync.api_talker.create_organization(feature_set="ALL") root_id = self.sync.api_talker.list_roots()[0]["Id"] dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + SyncData("alice@giphouse.nl", "alices-project"), + SyncData("bob@giphouse.nl", "bobs-project"), ] + self.setup_policy() with patch.object( self.sync.api_talker, "describe_create_account_status", @@ -310,7 +332,12 @@ def test_create_move_account__no_move(self): ): success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + tags_alice = self.get_tags_for_account("alice@giphouse.nl") + tags_bob = self.get_tags_for_account("bob@giphouse.nl") + self.assertFalse(success) + self.assertIn({"Key": "no_permissions", "Value": "true"}, tags_alice) + self.assertIn({"Key": "no_permissions", "Value": "true"}, tags_bob) def test_create_move_account__failed(self): self.sync.api_talker.create_organization(feature_set="ALL") @@ -319,16 +346,18 @@ def test_create_move_account__failed(self): dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("alice@giphouse.nl", "bobs-project", "Fall 2023"), + SyncData("alice@giphouse.nl", "alices-project"), + SyncData("alice@giphouse.nl", "bobs-project"), ] + self.setup_policy() with patch.object( self.sync.api_talker.org_client, "describe_create_account_status", return_value={"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}}, ): success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + self.assertFalse(success) def test_create_move_account__in_progress(self): @@ -338,10 +367,11 @@ def test_create_move_account__in_progress(self): dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou") dest_ou_id = dest_ou["OrganizationalUnit"]["Id"] members = [ - SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"), - SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"), + SyncData("alice@giphouse.nl", "alices-project"), + SyncData("bob@giphouse.nl", "bobs-project"), ] + self.setup_policy() with patch.object( self.sync.api_talker.org_client, "describe_create_account_status", @@ -349,7 +379,12 @@ def test_create_move_account__in_progress(self): ): success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id) + tags_alice = self.get_tags_for_account("alice@giphouse.nl") + tags_bob = self.get_tags_for_account("bob@giphouse.nl") + self.assertFalse(success) + self.assertIn({"Key": "no_permissions", "Value": "true"}, tags_alice) + self.assertIn({"Key": "no_permissions", "Value": "true"}, tags_bob) def test_pipeline__no_accounts_no_ou(self): self.sync.checker.api_talker.simulate_principal_policy = MagicMock( @@ -357,9 +392,11 @@ def test_pipeline__no_accounts_no_ou(self): ) self.sync.api_talker.create_organization(feature_set="ALL") self.setup_policy() - pipeline_success = self.sync.pipeline() root_id = self.sync.api_talker.list_roots()[0]["Id"] + with patch("projects.aws.awssync.AWSSync.get_current_base_ou_id", return_value=root_id): + pipeline_success = self.sync.pipeline() + root_ous = self.sync.api_talker.list_organizational_units_for_parent(root_id) root_ou_names = [ou["Name"] for ou in root_ous] @@ -388,12 +425,14 @@ def test_pipeline__new_accounts_existing_ou(self): self.sync.get_syncdata_from_giphouse = MagicMock( return_value=[ - SyncData("alice@giphouse.nl", "alices-project", current_semester), - SyncData("bob@giphouse.nl", "bobs-project", current_semester), + SyncData("alice@giphouse.nl", "alices-project"), + SyncData("bob@giphouse.nl", "bobs-project"), ] ) - pipeline_success = self.sync.pipeline() + with patch("projects.aws.awssync.AWSSync.get_current_base_ou_id", return_value=root_id): + pipeline_success = self.sync.pipeline() + course_accounts = self.sync.api_talker.list_accounts_for_parent(course_ou_id) course_account_emails = [account["Email"] for account in course_accounts] @@ -424,7 +463,6 @@ def test_synchronise__api_error(self): def test_synchronise__sync_error(self): sync_error = Exception("Synchronization Error") - self.sync.api_talker.create_organization(feature_set="ALL") with patch("projects.aws.awssync.AWSSync.pipeline", side_effect=sync_error): response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True) diff --git a/website/projects/tests/tests_aws/test_awssync_checks.py b/website/projects/tests/tests_aws/test_awssync_checks.py index 9989201b..3af187d4 100644 --- a/website/projects/tests/tests_aws/test_awssync_checks.py +++ b/website/projects/tests/tests_aws/test_awssync_checks.py @@ -26,16 +26,16 @@ def setUp(self): "Fall 2020", "54321", [ - SyncData("email1@example.com", "project1", "Fall 2020"), - SyncData("email2@example.com", "project2", "Fall 2020"), + SyncData("email1@example.com", "project1"), + SyncData("email2@example.com", "project2"), ], ), Iteration( "Spring 2021", "98765", [ - SyncData("email3@example.com", "project3", "Spring 2021"), - SyncData("email4@example.com", "project4", "Spring 2021"), + SyncData("email3@example.com", "project3"), + SyncData("email4@example.com", "project4"), ], ), ], @@ -49,16 +49,16 @@ def setUp(self): "Fall 2020", "54321", [ - SyncData("email1@example.com", "project1", "Fall 2020"), - SyncData("email2@example.com", "project2", "Fall 2020"), + SyncData("email1@example.com", "project1"), + SyncData("email2@example.com", "project2"), ], ), Iteration( "Spring 2021", "98765", [ - SyncData("email3@example.com", "project3", "Fall 2021"), - SyncData("email4@example.com", "project4", "Spring 2021"), + SyncData("email3@example.com", "project3"), + SyncData("email4@example.com", "project4"), ], ), ], @@ -72,16 +72,16 @@ def setUp(self): "Fall 2020", "54321", [ - SyncData("email1@example.com", "project1", "Fall 2020"), - SyncData("email2@example.com", "project2", "Fall 2020"), + SyncData("email1@example.com", "project1"), + SyncData("email2@example.com", "project2"), ], ), Iteration( "Fall 2020", "98765", [ - SyncData("email3@example.com", "project3", "Fall 2021"), - SyncData("email4@example.com", "project4", "Spring 2021"), + SyncData("email3@example.com", "project3"), + SyncData("email4@example.com", "project4"), ], ), ], @@ -90,13 +90,6 @@ def setUp(self): self.logger = MagicMock() self.checks.logger = self.logger - def test_check_members_in_correct_iteration(self): - # Test when correct - self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1)) - - # Test when incorrect - self.assertRaises(Exception, self.checks.check_members_in_correct_iteration, self.aws_tree2) - def test_check_double_iteration_names(self): # Test when correct self.assertIsNone(self.checks.check_double_iteration_names(self.aws_tree1)) diff --git a/website/projects/tests/tests_aws/test_awssync_structs.py b/website/projects/tests/tests_aws/test_awssync_structs.py index 3915bed0..b81e6f82 100644 --- a/website/projects/tests/tests_aws/test_awssync_structs.py +++ b/website/projects/tests/tests_aws/test_awssync_structs.py @@ -14,7 +14,7 @@ def setUp(self): def test_throw_type_error_SyncData_class(self): """Test Type Error when equals is called on wrong type.""" - self.assertRaises(TypeError, self.sync("a", "b", "c").__eq__, 123) + self.assertRaises(TypeError, self.sync("a", "b").__eq__, 123) class AWSSyncListTest(TestCase): @@ -24,9 +24,9 @@ def setUp(self): self.sync = awssync.AWSSync() self.syncData = awssync.SyncData - self.test1 = self.syncData("test1@test1.test1", "test1", "test1") - self.test2 = self.syncData("test2@test2.test2", "test2", "test2") - self.test3 = self.syncData("test3@test3.test3", "test3", "test3") + self.test1 = self.syncData("test1@test1.test1", "test1") + self.test2 = self.syncData("test2@test2.test2", "test2") + self.test3 = self.syncData("test3@test3.test3", "test3") def test_AWS_sync_list_both_empty(self): gip_list = [] @@ -56,24 +56,13 @@ def setUp(self): self.sync = awssync.AWSSync() self.awstree = awssync.AWSTree("Name", "1234", []) self.iteration = awssync.Iteration("Name", "1234", []) - self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020") - - self.sync_list = [ - awssync.SyncData("email1@example.com", "Spring 2022", "Project A"), - awssync.SyncData("email2@example.com", "Fall 2022", "Project B"), - awssync.SyncData("email3@example.com", "Spring 2022", "Project C"), - ] - self.aws_list = [ - awssync.SyncData("email4@example.com", "Fall 2021", "Project D"), - awssync.SyncData("email5@example.com", "Spring 2022", "Project E"), - awssync.SyncData("email6@example.com", "Fall 2022", "Project F"), - ] + self.sync_data = awssync.SyncData("email@example.com", "Project X") self.treelist = [ - awssync.SyncData("email1@example.com", "project1", "Fall 2020"), - awssync.SyncData("email2@example.com", "project2", "Fall 2020"), - awssync.SyncData("email3@example.com", "project3", "Spring 2021"), - awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + awssync.SyncData("email1@example.com", "project1"), + awssync.SyncData("email2@example.com", "project2"), + awssync.SyncData("email3@example.com", "project3"), + awssync.SyncData("email4@example.com", "project4"), ] self.aws_tree1 = awssync.AWSTree( @@ -84,16 +73,16 @@ def setUp(self): "Fall 2020", "54321", [ - awssync.SyncData("email1@example.com", "project1", "Fall 2020"), - awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + awssync.SyncData("email1@example.com", "project1"), + awssync.SyncData("email2@example.com", "project2"), ], ), awssync.Iteration( "Spring 2021", "98765", [ - awssync.SyncData("email3@example.com", "project3", "Spring 2021"), - awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + awssync.SyncData("email3@example.com", "project3"), + awssync.SyncData("email4@example.com", "project4"), ], ), ], @@ -107,39 +96,16 @@ def setUp(self): "Fall 2020", "54321", [ - awssync.SyncData("email1@example.com", "project1", "Fall 2020"), - awssync.SyncData("email2@example.com", "project2", "Fall 2020"), - ], - ), - awssync.Iteration( - "Spring 2021", - "98765", - [ - awssync.SyncData("email3@example.com", "project3", "Fall 2021"), - awssync.SyncData("email4@example.com", "project4", "Spring 2021"), - ], - ), - ], - ) - - self.aws_tree3 = awssync.AWSTree( - "AWS Tree", - "12345", - [ - awssync.Iteration( - "Fall 2020", - "54321", - [ - awssync.SyncData("email1@example.com", "project1", "Fall 2020"), - awssync.SyncData("email2@example.com", "project2", "Fall 2020"), + awssync.SyncData("email1@example.com", "project1"), + awssync.SyncData("email2@example.com", "project2"), ], ), awssync.Iteration( - "Fall 2020", + "Spring 2020", "98765", [ - awssync.SyncData("email3@example.com", "project3", "Fall 2021"), - awssync.SyncData("email4@example.com", "project4", "Spring 2021"), + awssync.SyncData("email3@example.com", "project3"), + awssync.SyncData("email4@example.com", "project4"), ], ), ], @@ -152,7 +118,7 @@ def test_repr_Iteration(self): self.assertEquals(repr(self.iteration), "Iteration('Name', '1234', [])") def test_repr_SyncData(self): - self.assertEquals(repr(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')") + self.assertEquals(repr(self.sync_data), "SyncData('email@example.com', 'Project X')") def test_awstree_to_syncdata_list(self): self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist) From aaa7c8b0d0b47bf9fa978f87e974bce065ac746a Mon Sep 17 00:00:00 2001 From: mitchellboes <49476235+mitchellboes@users.noreply.github.com> Date: Fri, 9 Jun 2023 11:35:32 +0200 Subject: [PATCH 31/32] Documentation AWS configuration (#72) * added AWS deployment instructions to README * corrected typo * added part about base OU id * Add info configurable base * Change configuration instructions to be concise and complete * Replace old incorrect reference * Fix spelling mistake * Add info about new account request interval and attempts --------- Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com> --- README.md | 63 ++++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 51 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 7cb75dee..88d0c5eb 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ This is the code for the website of [GiPHouse](http://giphouse.nl/) powered by [ ## Table of Contents - [GiPHouse website](#giphouse-website) + - [Table of Contents](#table-of-contents) - [Features](#features) - [Authentication and Users](#authentication-and-users) - [GitHub OAuth](#github-oauth) @@ -21,11 +22,13 @@ This is the code for the website of [GiPHouse](http://giphouse.nl/) powered by [ - [AWS Synchronization](#aws-synchronization) - [Mailing Lists](#mailing-lists) - [Tasks](#tasks) + - [Styling](#styling) - [Development and Contributing](#development-and-contributing) - [Getting Started](#getting-started) - [Logging into the Backend](#logging-into-the-backend) - [Registering a GitHub App for repository synchronisation](#registering-a-github-app-for-repository-synchronisation) - [Registering a G Suite service account for mailing list synchronisation](#registering-a-g-suite-service-account-for-mailing-list-synchronisation) + - [Registering an AWS environment for synchronisation](#registering-an-aws-environment-for-synchronisation) - [Dependency Management](#dependency-management) - [Fixtures](#fixtures) - [Tests](#tests) @@ -45,7 +48,7 @@ This is the code for the website of [GiPHouse](http://giphouse.nl/) powered by [ - [`build-docker` job](#build-docker-job) - [`deploy` job](#deploy-job) - [Secrets](#secrets) - - [Server](#server) + - [Server Configuration](#server-configuration) - [Keeping Everything Up to Date](#keeping-everything-up-to-date) ## Features @@ -158,7 +161,7 @@ Each project in the current semester with a team mailing list gets its own AWS m Since all AWS member accounts have isolated environments, each team is able to configure their own AWS environment as desired. The AWS member accounts are restricted in their abilities using a pre-configured [SCP policy](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_scps.html) that is applied to the course semester Organizational Unit (OU) where all team member accounts reside. For example, the SCP policy can be set such that only (certain types of) [EC2](https://aws.amazon.com/ec2/) instances may be launched. -Such specific configuration details can be found under the [Deployment](#deployment) section. +Such specific configuration details can be found under the [Getting Started](#registering-an-aws-environment-for-synchronisation) section. The entire AWS synchronization process, also referred to as the pipeline, can be initiated in the Django admin interface under Projects by pressing the large `SYNCHRONIZE PROJECTS OF THE CURRENT SEMESTER TO AWS` at the top-right and roughly goes through the following stages: @@ -180,12 +183,12 @@ The entire AWS synchronization process, also referred to as the pipeline, can be ![pipeline-flowchart](resources/pipeline-flowchart.drawio.png) -After the synchronization process has finished, success or failure is indicated by a green or red response box respectively. -Verbose details for each synchronization run is logged using the `logging` module and can be accessed in the backend, for example to inspect causes of failed runs. +After the synchronization process has finished, a response box is returned indicating success (green), soft-fail (orange) or hard-fail (red). +Verbose details for each synchronization run is logged using the `logging` module and can be accessed in the backend. for example to inspect causes of failed runs. -An example of a possible AWS environment in the form a tree is the following: +An example of a possible AWS Organizations environment in the form a tree is the following: ``` -root +base (root/OU) │ ├── Fall 2022 (OU) │ ├── team-alice@giphouse.nl (member account) @@ -198,17 +201,23 @@ root └── admin@giphouse.nl (management account) ``` +The "base" (either root or OU), under which all relevant resources are created and operated on as part of the synchronization process, offers flexibility by being configurable in the Django admin panel. + When an AWS member account has been created for a team mailing list as part of an AWS Organization, an e-mail is sent by AWS. This process might take some time and is under AWS' control. It is important to be aware that gaining initial access to the member account is only possible by formally resetting the password; there is no other way. Also note well that each project team member will receive such mails because the team mailing list works as a one-to-many mail forwarder. -By default, all newly created member accounts under an AWS organization are placed under root with no possible alternative. -Once the member accounts have been created, they are moved to the current course semester OU. -Unfortunately, AWS does not specify how long it at most takes to finalize the status of a new member account request. -This introduces the possibility of there being a time period between having a newly created member account under root and moving it to its corresponding OU that is restricted with an attached SCP policy, possibly giving the member account excessive permissions. -To mitigate this risk, every newly created account comes with a pre-defined [tag](https://docs.aws.amazon.com/tag-editor/latest/userguide/tagging.html) and the SCP policy attached to root should deny all permissions for accounts under root with the specific tag (see [Deployment](#deployment) section for more details on SCP policy configuration). -The tag gets removed after the account has been moved to its destination OU. +By default, all newly created member accounts under an AWS organization are placed under root. +Once the member accounts have been created under root, they are automatically moved to the current course semester OU. +Note that: (1) it is not possible to create a new member account that gets placed in a specific OU and (2) new requested member accounts can not be moved unless the account creation has been finalized to `SUCCESS` and AWS does not specify an upper bound for the time it takes for a new member account creation to finalize. + +Due to point (2), the code contains the variables `ACCOUNT_REQUEST_MAX_ATTEMPTS` for the number of times to check the status of a new member account request, and `ACCOUNT_REQUEST_INTERVAL_SECONDS` for the time to wait in between attempts. +These values are currently hard-coded and can be tweaked, should they cause problems with the synchronization process. + +Points (1) and (2) pose the possibility of there being a time period between having a newly created member account under root and moving it to its corresponding OU that is restricted with an attached SCP policy, possibly giving the member account excessive permissions. +To mitigate this risk, every newly created account comes with a pre-defined [tag](https://docs.aws.amazon.com/tag-editor/latest/userguide/tagging.html) and the SCP policy attached to root should deny all permissions for accounts under root with the specific tag (see [Getting Started](#registering-an-aws-environment-for-synchronisation) section for more details on SCP policy and tag configuration). +The tag then automatically gets removed after the account has been moved to its destination course semester OU. ### Mailing Lists Admin users can create mailing lists using the Django admin interface. A mailing list can be connected to projects, users and 'extra' email addresses that are not tied to a user. Relating a mailing list to a project implicitly makes the members of that project a member of the mailing list. Removing a mailing list in the Django admin will result in the corresponding mailing list to be archived or deleted in G suite during the next synchronization, respecting the 'archive instead of delete' property of the deleted mailing list. To sync a mailing list with G Suite, one can run the management command: `./manage.py sync_mailing_list` or use the button in the model admin. This will sync all mailing lists and the automatic lists into G Suite at the specified domain. @@ -275,6 +284,35 @@ To enable the synchronisation feature of mailing lists to G Suite, a project and The credentials and admin user can then be setup in Github secrets. The email of the G Suite user used to manage to the G Suite domain has to be stored in the Github secret `DJANGO_GSUITE_ADMIN_USER`. The credentials json file has to be `base64` encoded and stored in the Github secret `DJANGO_GSUITE_ADMIN_CREDENTIALS_BASE64` (you can use the linux command `base64` for encoding the json file). +#### Registering an AWS environment for synchronisation +To enable the AWS synchronisation feature, the following points need to be configured only once in advance: + +- Create AWS Organizations with all features enabled. + - Ensure Service Control Policies (SCPs) feature is enabled. + - Enable AWS CloudTrail for logging account activity (optional, recommended). +- Increase AWS Organizations quota for maximum number of member accounts to expected amount. + - Default quota is set to 10. + - Expected amount should be at least the number of unique projects in the current semester. +- Set AWS API credentials for `boto3` as environment variables. + - `AWS_ACCESS_KEY_ID`: access key for AWS account. + - `AWS_SECRET_ACCESS_KEY`: secret key for AWS account. + - **(!)** Currently not automated using GitHub secrets. +- AWS API caller has sufficient permissions for all synchronization actions. + - AWS API caller is IAM user acting on behalf of the management account of the AWS Organizations. + - Ensure you are logged in as the management account when creating the IAM user for API access. + - Pre-defined IAM policies `AWSOrganizationsFullAccess` and `IAMFullAccess` are more than sufficient. + - **(!)** Restrictive custom IAM policy adhering to the principle of least privilege is recommended. +- Create SCP policies under AWS Organizations. + - SCP policy restricting member accounts under root with a custom key-value tag. + - Manually attach policy to root. + - SCP policy for course semester OUs (e.g. to only allow EC2 resources of a specific type). + - Automatically attached to course semester OUs. +- Configure a current AWS Policy under `Projects/AWS Policies` in the Django admin panel. + - Set the base ID (root or OU) value. + - Set the SCP policy ID value for course semester OUs. + - Set the restricting custom key-value tag specified in the root SCP policy. + - Mark the `Is current policy` checkbox to make the configuration active. + ### Dependency Management The Python dependencies are managed using a tool called [Poetry](https://python-poetry.org/), which automatically creates virtual environments that ease development and makes it easy to manage the dependencies. See the [Poetry documentation](https://python-poetry.org/docs/) for more information. @@ -373,6 +411,7 @@ This repository is public and the GitHub Actions CI runner logs are also public, The current server is an Amazon Web Services Elastic Cloud Computing (AWS EC2) instance that runs Ubuntu 18.04. EC2 instances have a default `ubuntu` user, that is allowed to execute `sudo` without password. The `docker-compose.yaml` file includes all services that are necessary to run the website in a production environment. That is why Docker is the only dependency on the host. These steps are the necessary setup for a production server. + 1. Add the SSH public keys of engineers to the `authorized_keys` of the `ubuntu` user. 2. Disable SSH password login. 3. Install `docker` and `docker-compose`. From 858bc3e076ef7e9bc0c485c4a7f2bd35a33ea74e Mon Sep 17 00:00:00 2001 From: 1058274 <70607431+1058274@users.noreply.github.com> Date: Fri, 9 Jun 2023 17:51:59 +0200 Subject: [PATCH 32/32] Merge upstream main --- poetry.lock | 1844 +++++++++-------- pyproject.toml | 2 + website/giphousewebsite/settings/base.py | 43 + website/giphousewebsite/urls.py | 1 + .../0007_alter_project_description.py | 19 + .../migrations/0017_merge_20230609_1748.py | 13 + website/projects/models.py | 4 +- .../projects/templates/projects/index.html | 7 +- 8 files changed, 1064 insertions(+), 869 deletions(-) create mode 100644 website/projects/migrations/0007_alter_project_description.py create mode 100644 website/projects/migrations/0017_merge_20230609_1748.py diff --git a/poetry.lock b/poetry.lock index 9e1c0056..c3a44377 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. + [[package]] name = "absl-py" version = "1.3.0" @@ -5,6 +7,10 @@ description = "Abseil Python Common Libraries, see https://github.com/abseil/abs category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "absl-py-1.3.0.tar.gz", hash = "sha256:463c38a08d2e4cef6c498b76ba5bd4858e4c6ef51da1a5a1f27139a022e20248"}, + {file = "absl_py-1.3.0-py3-none-any.whl", hash = "sha256:34995df9bd7a09b3b8749e230408f5a2a2dd7a68a0d33c12a3d0cb15a041a507"}, +] [[package]] name = "admin-totals" @@ -13,6 +19,11 @@ description = "Django Admin Totals, add totals to your columns in Django admin. category = "main" optional = false python-versions = "*" +files = [ + {file = "admin-totals-1.0.1.tar.gz", hash = "sha256:ba46e0307d35f3e6b2d10db8f23bb593e8024d59a074946d5a292b8de98f2509"}, + {file = "admin_totals-1.0.1-py2-none-any.whl", hash = "sha256:609017540245373afe78dca105760331a9c2aa6fbafca1b5b5f1d12c7e6b3895"}, + {file = "admin_totals-1.0.1-py3-none-any.whl", hash = "sha256:59c190e478f8d31d10117f75d4ff0a4dc73db8975ef9d6438515ff326019638d"}, +] [package.dependencies] Django = "*" @@ -24,6 +35,10 @@ description = "ASGI specs, helper code, and adapters" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, + {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, +] [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] @@ -35,6 +50,29 @@ description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, + {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, + {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, + {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, + {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, + {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, + {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, + {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, + {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, + {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, + {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, + {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, + {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, + {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, + {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, + {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, + {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, + {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, + {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, + {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, + {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, +] [package.dependencies] click = ">=8.0.0" @@ -49,16 +87,41 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "bleach" +version = "5.0.1" +description = "An easy safelist-based HTML-sanitizing tool." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, + {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, +] + +[package.dependencies] +six = ">=1.9.0" +tinycss2 = {version = ">=1.1.0,<1.2", optional = true, markers = "extra == \"css\""} +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.2)"] +dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"] + [[package]] name = "boto3" -version = "1.26.78" +version = "1.26.150" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" +files = [ + {file = "boto3-1.26.150-py3-none-any.whl", hash = "sha256:0ab83f1b8f997527a513152bc64fd1873536b1d92bdc98cb40f927aca6af6325"}, + {file = "boto3-1.26.150.tar.gz", hash = "sha256:be4e27d48744651fbd0898a6b51faaddd71936651167ba3c2e19855083ce137e"}, +] [package.dependencies] -botocore = ">=1.29.78,<1.30.0" +botocore = ">=1.29.150,<1.30.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -67,11 +130,15 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.29.78" +version = "1.29.150" description = "Low-level, data-driven core of boto 3." category = "main" optional = false python-versions = ">= 3.7" +files = [ + {file = "botocore-1.29.150-py3-none-any.whl", hash = "sha256:9af58faa67c99d860eabba4cd030b5ee5f4e7e1c301edd6a9174419f75b39334"}, + {file = "botocore-1.29.150.tar.gz", hash = "sha256:0e8c8f0dab008418e4e136ecf2a450fa01bae5b725b7b43ff7cc13beebbf33aa"}, +] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -88,6 +155,10 @@ description = "Extensible memoizing collections and decorators" category = "main" optional = false python-versions = "~=3.7" +files = [ + {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, + {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, +] [[package]] name = "certifi" @@ -96,6 +167,10 @@ description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, +] [[package]] name = "cffi" @@ -104,6 +179,72 @@ description = "Foreign Function Interface for Python calling C code." category = "main" optional = false python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] [package.dependencies] pycparser = "*" @@ -115,9 +256,13 @@ description = "The Real First Universal Charset Detector. Open, modern and activ category = "main" optional = false python-versions = ">=3.6.0" +files = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "click" @@ -126,6 +271,10 @@ description = "Composable command line interface toolkit" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -137,6 +286,10 @@ description = "Cross-platform colored terminal text." category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "coverage" @@ -145,6 +298,58 @@ description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, +] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} @@ -159,6 +364,34 @@ description = "cryptography is a package which provides cryptographic recipes an category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320"}, + {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c"}, + {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0"}, + {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748"}, + {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146"}, + {file = "cryptography-38.0.3-cp36-abi3-win32.whl", hash = "sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0"}, + {file = "cryptography-38.0.3-cp36-abi3-win_amd64.whl", hash = "sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55"}, + {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249"}, + {file = "cryptography-38.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548"}, + {file = "cryptography-38.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a"}, + {file = "cryptography-38.0.3.tar.gz", hash = "sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd"}, +] [package.dependencies] cffi = ">=1.12" @@ -178,12 +411,16 @@ description = "Python @deprecated decorator to deprecate old python classes, fun category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, + {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, +] [package.dependencies] wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] +dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] [[package]] name = "django" @@ -192,6 +429,10 @@ description = "A high-level Python web framework that encourages rapid developme category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "Django-4.1.3-py3-none-any.whl", hash = "sha256:6b1de6886cae14c7c44d188f580f8ba8da05750f544c80ae5ad43375ab293cd5"}, + {file = "Django-4.1.3.tar.gz", hash = "sha256:678bbfc8604eb246ed54e2063f0765f13b321a50526bdc8cb1f943eda7fa31f1"}, +] [package.dependencies] asgiref = ">=3.5.2,<4" @@ -209,6 +450,7 @@ description = "A simple Django app to render list filters in django admin using category = "main" optional = false python-versions = "*" +files = [] develop = false [package.dependencies] @@ -227,10 +469,30 @@ description = "A helper class for handling configuration defaults of packaged ap category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "django-appconf-1.0.5.tar.gz", hash = "sha256:be3db0be6c81fa84742000b89a81c016d70ae66a7ccb620cdef592b1f1a6aaa4"}, + {file = "django_appconf-1.0.5-py3-none-any.whl", hash = "sha256:ae9f864ee1958c815a965ed63b3fba4874eec13de10236ba063a788f9a17389d"}, +] [package.dependencies] django = "*" +[[package]] +name = "django-bleach" +version = "3.0.1" +description = "Easily use bleach with Django models and templates" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "django-bleach-3.0.1.tar.gz", hash = "sha256:0e5f145bca1a52e822097853403832d92e957e4a1d47bb90233de9d07219247b"}, + {file = "django_bleach-3.0.1-py2.py3-none-any.whl", hash = "sha256:093973e02e02f88bbe38f6bbc903d28ef762ba9f3f84aafb3823f27922e819ed"}, +] + +[package.dependencies] +bleach = {version = ">=5,<6", extras = ["css"]} +Django = ">=3.2" + [[package]] name = "django-bootstrap5" version = "22.1" @@ -238,6 +500,10 @@ description = "Bootstrap 5 for Django" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "django-bootstrap5-22.1.tar.gz", hash = "sha256:70b51f020ef95a64780a0b5d5fdb1fade6e7b5e26c53355cc4f3648eca239cab"}, + {file = "django_bootstrap5-22.1-py3-none-any.whl", hash = "sha256:cf3f257abf750f19e47eddc106066ddb182576185494965d1408eddcb3a7380b"}, +] [package.dependencies] Django = ">=3.2" @@ -249,6 +515,10 @@ description = "Compresses linked and inline JavaScript or CSS into single cached category = "main" optional = false python-versions = "*" +files = [ + {file = "django_compressor-4.1-py2.py3-none-any.whl", hash = "sha256:61f313852b4c8d4ef2534cda3d2366f45ca3e399b3cbe10590e516cc6b45542d"}, + {file = "django_compressor-4.1.tar.gz", hash = "sha256:8ece621d2a98f6c6635480cb8b3701db890a99f793f95ca20cb00abc194d331d"}, +] [package.dependencies] django-appconf = ">=1.0.3" @@ -262,6 +532,10 @@ description = "Django app to easily add actions to an object's admin change form category = "main" optional = false python-versions = ">=3" +files = [ + {file = "django-easy-admin-object-actions-1.1.0.tar.gz", hash = "sha256:a60fd5164c1299a6ed3c174a041c30e9613d9f7c28fe4789068d19cb7aa7cdf2"}, + {file = "django_easy_admin_object_actions-1.1.0-py3-none-any.whl", hash = "sha256:4bb371e0938af4ab5863b78d6100dbb07f960bd533f9dd9448c944d82fd18394"}, +] [package.dependencies] django = ">=3" @@ -273,10 +547,26 @@ description = "SASS processor to compile SCSS files into *.css, while rendering, category = "main" optional = false python-versions = "*" +files = [ + {file = "django-sass-processor-1.2.2.tar.gz", hash = "sha256:f6098c181cc95a21593df6bb502791e32015615222803de216fdcc8bb42c0f77"}, + {file = "django_sass_processor-1.2.2-py3-none-any.whl", hash = "sha256:d5e2970228ec9648da83d083a2b468fa682bef80357d0bab8e3f6c6df301681e"}, +] [package.extras] management-command = ["django-compressor (>=2.4)"] +[[package]] +name = "django-tinymce" +version = "3.5.0" +description = "A Django application that contains a widget to render a form field as a TinyMCE editor." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "django-tinymce-3.5.0.tar.gz", hash = "sha256:ebe7e39e080415a0d4ca339f0f159754a7cc1dc7cd5276f32f9d3db3220134f8"}, + {file = "django_tinymce-3.5.0-py3-none-any.whl", hash = "sha256:f9d3758670ad55912cbabdd41a14e0b6cfda47868f9c6d92a4480ca320356d53"}, +] + [[package]] name = "faker" version = "8.16.0" @@ -284,6 +574,10 @@ description = "Faker is a Python package that generates fake data for you." category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "Faker-8.16.0-py3-none-any.whl", hash = "sha256:bb10913b9d3ac2aa37180f816c82040e81f9e0c32cb08445533f293cec8930bf"}, + {file = "Faker-8.16.0.tar.gz", hash = "sha256:d70b375d0af0e4c3abd594003691a1055a96281a414884e623d27bccc7d781da"}, +] [package.dependencies] python-dateutil = ">=2.4" @@ -296,6 +590,10 @@ description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" @@ -309,9 +607,14 @@ description = "Flake8 and pylama plugin that checks the ordering of import state category = "dev" optional = false python-versions = "*" +files = [ + {file = "flake8-import-order-0.18.1.tar.gz", hash = "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92"}, + {file = "flake8_import_order-0.18.1-py2.py3-none-any.whl", hash = "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543"}, +] [package.dependencies] pycodestyle = "*" +setuptools = "*" [[package]] name = "freezegun" @@ -320,6 +623,10 @@ description = "Let your Python tests travel through time" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, + {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, +] [package.dependencies] python-dateutil = ">=2.7" @@ -331,6 +638,10 @@ description = "Google API client core library" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.10.2.tar.gz", hash = "sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320"}, + {file = "google_api_core-2.10.2-py3-none-any.whl", hash = "sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e"}, +] [package.dependencies] google-auth = ">=1.25.0,<3.0dev" @@ -350,7 +661,11 @@ description = "Google API Client Library for Python" category = "main" optional = false python-versions = ">=3.7" - +files = [ + {file = "google-api-python-client-2.65.0.tar.gz", hash = "sha256:b8a0ca8454ad57bc65199044717d3d214197ae1e2d666426bbcd4021b36762e0"}, + {file = "google_api_python_client-2.65.0-py2.py3-none-any.whl", hash = "sha256:2c6611530308b3f931dcf1360713aa3a20cf465d0bf2bac65f2ec99e8c9860de"}, +] + [package.dependencies] google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" google-auth = ">=1.19.0,<3.0.0dev" @@ -365,6 +680,10 @@ description = "Google Authentication Library" category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" +files = [ + {file = "google-auth-2.14.0.tar.gz", hash = "sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d"}, + {file = "google_auth-2.14.0-py2.py3-none-any.whl", hash = "sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700"}, +] [package.dependencies] cachetools = ">=2.0.0,<6.0" @@ -374,7 +693,7 @@ six = ">=1.9.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] -enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] @@ -385,6 +704,10 @@ description = "Google Authentication Library: httplib2 transport" category = "main" optional = false python-versions = "*" +files = [ + {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, + {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, +] [package.dependencies] google-auth = "*" @@ -398,6 +721,10 @@ description = "Google Authentication Library" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "google-auth-oauthlib-0.7.1.tar.gz", hash = "sha256:9940f543f77d1447432a93781d7c931fb53e418023351ad4bf9e92837a1154ec"}, + {file = "google_auth_oauthlib-0.7.1-py2.py3-none-any.whl", hash = "sha256:860e54c4b58b2664116c9cb44325bc0ec92bcd93e8211698ceea911b1b873b86"}, +] [package.dependencies] google-auth = ">=2.14.0" @@ -413,6 +740,10 @@ description = "Common protobufs used in Google APIs" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.56.4.tar.gz", hash = "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"}, + {file = "googleapis_common_protos-1.56.4-py2.py3-none-any.whl", hash = "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394"}, +] [package.dependencies] protobuf = ">=3.15.0,<5.0.0dev" @@ -427,6 +758,10 @@ description = "A comprehensive HTTP client library." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "httplib2-0.21.0-py3-none-any.whl", hash = "sha256:987c8bb3eb82d3fa60c68699510a692aa2ad9c4bd4f123e51dfb1488c14cdd01"}, + {file = "httplib2-0.21.0.tar.gz", hash = "sha256:fc144f091c7286b82bec71bdbd9b27323ba709cc612568d3000893bfd9cb4b34"}, +] [package.dependencies] pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} @@ -438,6 +773,10 @@ description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] [[package]] name = "jinja2" @@ -446,6 +785,10 @@ description = "A very fast and expressive template engine." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] [package.dependencies] MarkupSafe = ">=2.0" @@ -460,6 +803,10 @@ description = "JSON Matching Expressions" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] [[package]] name = "libsass" @@ -468,17 +815,81 @@ description = "Sass for Python: A straightforward binding of libsass for Python. category = "main" optional = false python-versions = "*" +files = [ + {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, + {file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"}, + {file = "libsass-0.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:6b984510ed94993708c0d697b4fef2d118929bbfffc3b90037be0f5ccadf55e7"}, + {file = "libsass-0.21.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e25dd9047a9392d3c59a0b869e0404f2b325a03871ee45285ee33b3664f5613"}, + {file = "libsass-0.21.0-cp36-abi3-macosx_10_14_x86_64.whl", hash = "sha256:12f39712de38689a8b785b7db41d3ba2ea1d46f9379d81ea4595802d91fa6529"}, + {file = "libsass-0.21.0-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e2b1a7d093f2e76dc694c17c0c285e846d0b0deb0e8b21dc852ba1a3a4e2f1d6"}, + {file = "libsass-0.21.0-cp36-abi3-win32.whl", hash = "sha256:abc29357ee540849faf1383e1746d40d69ed5cb6d4c346df276b258f5aa8977a"}, + {file = "libsass-0.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:659ae41af8708681fa3ec73f47b9735a6725e71c3b66ff570bfce78952f2314e"}, + {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, + {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, +] [package.dependencies] six = "*" [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] [[package]] name = "mccabe" @@ -487,14 +898,22 @@ description = "McCabe checker, plugin for flake8" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] [[package]] name = "moto" -version = "4.1.3" +version = "4.1.11" description = "" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "moto-4.1.11-py2.py3-none-any.whl", hash = "sha256:5003126c46ce70fe351ff1cb67dc8d9a5983f403fae13b7628b0fb503d19039e"}, + {file = "moto-4.1.11.tar.gz", hash = "sha256:f3e966ba1460751e19eab5356545813b29c05478b47eb0da445d688949339be2"}, +] [package.dependencies] boto3 = ">=1.9.201" @@ -508,17 +927,17 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] apigatewayv2 = ["PyYAML (>=5.1)"] appsync = ["graphql-core"] -awslambda = ["docker (>=2.5.1)"] -batch = ["docker (>=2.5.1)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] ds = ["sshpubkeys (>=3.1.0)"] -dynamodb = ["docker (>=2.5.1)"] -dynamodbstreams = ["docker (>=2.5.1)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.3)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.3)"] ebs = ["sshpubkeys (>=3.1.0)"] ec2 = ["sshpubkeys (>=3.1.0)"] efs = ["sshpubkeys (>=3.1.0)"] @@ -526,8 +945,8 @@ eks = ["sshpubkeys (>=3.1.0)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] route53resolver = ["sshpubkeys (>=3.1.0)"] -s3 = ["PyYAML (>=5.1)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.3.3)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.3)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] ssm = ["PyYAML (>=5.1)"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] @@ -538,6 +957,10 @@ description = "Experimental type system extensions for programs checked with the category = "dev" optional = false python-versions = "*" +files = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] [[package]] name = "numpy" @@ -546,6 +969,36 @@ description = "NumPy is the fundamental package for array computing with Python. category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "numpy-1.23.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d79ada05005f6f4f337d3bb9de8a7774f259341c70bc88047a1f7b96a4bcb2"}, + {file = "numpy-1.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:926db372bc4ac1edf81cfb6c59e2a881606b409ddc0d0920b988174b2e2a767f"}, + {file = "numpy-1.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c237129f0e732885c9a6076a537e974160482eab8f10db6292e92154d4c67d71"}, + {file = "numpy-1.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8365b942f9c1a7d0f0dc974747d99dd0a0cdfc5949a33119caf05cb314682d3"}, + {file = "numpy-1.23.4-cp310-cp310-win32.whl", hash = "sha256:2341f4ab6dba0834b685cce16dad5f9b6606ea8a00e6da154f5dbded70fdc4dd"}, + {file = "numpy-1.23.4-cp310-cp310-win_amd64.whl", hash = "sha256:d331afac87c92373826af83d2b2b435f57b17a5c74e6268b79355b970626e329"}, + {file = "numpy-1.23.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:488a66cb667359534bc70028d653ba1cf307bae88eab5929cd707c761ff037db"}, + {file = "numpy-1.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce03305dd694c4873b9429274fd41fc7eb4e0e4dea07e0af97a933b079a5814f"}, + {file = "numpy-1.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8981d9b5619569899666170c7c9748920f4a5005bf79c72c07d08c8a035757b0"}, + {file = "numpy-1.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a70a7d3ce4c0e9284e92285cba91a4a3f5214d87ee0e95928f3614a256a1488"}, + {file = "numpy-1.23.4-cp311-cp311-win32.whl", hash = "sha256:5e13030f8793e9ee42f9c7d5777465a560eb78fa7e11b1c053427f2ccab90c79"}, + {file = "numpy-1.23.4-cp311-cp311-win_amd64.whl", hash = "sha256:7607b598217745cc40f751da38ffd03512d33ec06f3523fb0b5f82e09f6f676d"}, + {file = "numpy-1.23.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ab46e4e7ec63c8a5e6dbf5c1b9e1c92ba23a7ebecc86c336cb7bf3bd2fb10e5"}, + {file = "numpy-1.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8aae2fb3180940011b4862b2dd3756616841c53db9734b27bb93813cd79fce6"}, + {file = "numpy-1.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c053d7557a8f022ec823196d242464b6955a7e7e5015b719e76003f63f82d0f"}, + {file = "numpy-1.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0882323e0ca4245eb0a3d0a74f88ce581cc33aedcfa396e415e5bba7bf05f68"}, + {file = "numpy-1.23.4-cp38-cp38-win32.whl", hash = "sha256:dada341ebb79619fe00a291185bba370c9803b1e1d7051610e01ed809ef3a4ba"}, + {file = "numpy-1.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:0fe563fc8ed9dc4474cbf70742673fc4391d70f4363f917599a7fa99f042d5a8"}, + {file = "numpy-1.23.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c67b833dbccefe97cdd3f52798d430b9d3430396af7cdb2a0c32954c3ef73894"}, + {file = "numpy-1.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f76025acc8e2114bb664294a07ede0727aa75d63a06d2fae96bf29a81747e4a7"}, + {file = "numpy-1.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12ac457b63ec8ded85d85c1e17d85efd3c2b0967ca39560b307a35a6703a4735"}, + {file = "numpy-1.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95de7dc7dc47a312f6feddd3da2500826defdccbc41608d0031276a24181a2c0"}, + {file = "numpy-1.23.4-cp39-cp39-win32.whl", hash = "sha256:f2f390aa4da44454db40a1f0201401f9036e8d578a25f01a6e237cea238337ef"}, + {file = "numpy-1.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:f260da502d7441a45695199b4e7fd8ca87db659ba1c78f2bbf31f934fe76ae0e"}, + {file = "numpy-1.23.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61be02e3bf810b60ab74e81d6d0d36246dbfb644a462458bb53b595791251911"}, + {file = "numpy-1.23.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296d17aed51161dbad3c67ed6d164e51fcd18dbcd5dd4f9d0a9c6055dce30810"}, + {file = "numpy-1.23.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4d52914c88b4930dafb6c48ba5115a96cbab40f45740239d9f4159c4ba779962"}, + {file = "numpy-1.23.4.tar.gz", hash = "sha256:ed2cc92af0efad20198638c69bb0fc2870a58dabfba6eb722c933b48556c686c"}, +] [[package]] name = "oauthlib" @@ -554,6 +1007,10 @@ description = "A generic, spec-compliant, thorough implementation of the OAuth r category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] [package.extras] rsa = ["cryptography (>=3.0.0)"] @@ -567,6 +1024,31 @@ description = "Google OR-Tools python libraries and modules" category = "main" optional = false python-versions = ">= 3.6" +files = [ + {file = "ortools-9.4.1874-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c7452ef873959c5b778ab0bca9d07960bd678a02ce1f99c2900374483642958d"}, + {file = "ortools-9.4.1874-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1d5764186a04168777c89c27a752854e873c02d5f2f0f774ea0f4e98cfd9294"}, + {file = "ortools-9.4.1874-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:faae0765f83f0ab934f7429f4d266b13c1b804fcdac78862f766ef57606388ef"}, + {file = "ortools-9.4.1874-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a840bdcdc04b646953355d3a04c8b2ac08409edf103bce323bbe7caec28b2aa"}, + {file = "ortools-9.4.1874-cp310-cp310-win_amd64.whl", hash = "sha256:809474eed0b1b05489c0396a0e905dad15d66cd2672ed90e6a0382c1340123c3"}, + {file = "ortools-9.4.1874-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:cf01ca0fc6ce02460b485fa429cbc836585f685cf5269cd3c1f8af6995809659"}, + {file = "ortools-9.4.1874-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c13ed1d8297f996e8be778a88dda504bceb8b8de28a26a8a489f27faadd8ca8"}, + {file = "ortools-9.4.1874-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4e1e38911344ce265cb3b58ddc5fa14d9136a171a07a4ce6f668c14263d479"}, + {file = "ortools-9.4.1874-cp36-cp36m-win_amd64.whl", hash = "sha256:b7e8a08901f3e8b1e2b685fa338705c33b6bc13494b695587609eef7cec36d75"}, + {file = "ortools-9.4.1874-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:70863800eaa90eb72bf7057783e2c33e26c45a120c4b92fc2432efddfb5b7ea1"}, + {file = "ortools-9.4.1874-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b5e4131041e763c035747eaa942fe6f39a5ece7c4987c31927c42e0c2406126"}, + {file = "ortools-9.4.1874-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f8b8002068ff8ceed80512488fb03ce26fed46c7e8924820ea6816a30edabfc"}, + {file = "ortools-9.4.1874-cp37-cp37m-win_amd64.whl", hash = "sha256:c61b09d951027fef534065ea5752928a2a5519fb64c8f078434e33c110742813"}, + {file = "ortools-9.4.1874-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:6985ca897d9c412d85912b2fb8256e0e5b58dcbb921f4fe06f36df34a59be4ab"}, + {file = "ortools-9.4.1874-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eb7aeeadc2d17ca7170bda9dac5beedfb4d8359f6e6530be68d219525f30cc27"}, + {file = "ortools-9.4.1874-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52e49d97adc8684dce2185dbab9b772f5d65a1823a7e71959997aa14f34df2c4"}, + {file = "ortools-9.4.1874-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edff7689201afd24e63b5a1d5375ccbc75c6de3efe7e5ecc7fcbef22054c3a"}, + {file = "ortools-9.4.1874-cp38-cp38-win_amd64.whl", hash = "sha256:160b833363b5acbf53e998df3f031673ed98a879962bcafd59388cf8adc2fb47"}, + {file = "ortools-9.4.1874-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:98295c0f7ba0f363292775aab958aae5f6384e4b0e3f23565bf98a38215e9f41"}, + {file = "ortools-9.4.1874-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d6cd9ef6758377fc892026db51e73f5233727c5054deead7965ef57aa6812b91"}, + {file = "ortools-9.4.1874-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38d053961d0f804a8018bf6fa907f51ddb4259777e9d5994b78aed5dd01587f"}, + {file = "ortools-9.4.1874-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:283aa430551b0b6a0553ee5564f51cbb641ba6e534ca9822c72628a3cbcbb425"}, + {file = "ortools-9.4.1874-cp39-cp39-win_amd64.whl", hash = "sha256:78d7f69726a96af990e943b0df13e4af81faa8c8c2dc4ee9bc629a4fcc2bb066"}, +] [package.dependencies] absl-py = ">=0.13" @@ -580,6 +1062,10 @@ description = "Utility library for gitignore style pattern matching of file path category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, + {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, +] [[package]] name = "pillow" @@ -588,6 +1074,49 @@ description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "Pillow-8.4.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:81f8d5c81e483a9442d72d182e1fb6dcb9723f289a57e8030811bac9ea3fef8d"}, + {file = "Pillow-8.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f97cfb1e5a392d75dd8b9fd274d205404729923840ca94ca45a0af57e13dbe6"}, + {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb9fc393f3c61f9054e1ed26e6fe912c7321af2f41ff49d3f83d05bacf22cc78"}, + {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d82cdb63100ef5eedb8391732375e6d05993b765f72cb34311fab92103314649"}, + {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62cc1afda735a8d109007164714e73771b499768b9bb5afcbbee9d0ff374b43f"}, + {file = "Pillow-8.4.0-cp310-cp310-win32.whl", hash = "sha256:e3dacecfbeec9a33e932f00c6cd7996e62f53ad46fbe677577394aaa90ee419a"}, + {file = "Pillow-8.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:620582db2a85b2df5f8a82ddeb52116560d7e5e6b055095f04ad828d1b0baa39"}, + {file = "Pillow-8.4.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:1bc723b434fbc4ab50bb68e11e93ce5fb69866ad621e3c2c9bdb0cd70e345f55"}, + {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cbcfd54df6caf85cc35264c77ede902452d6df41166010262374155947460c"}, + {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70ad9e5c6cb9b8487280a02c0ad8a51581dcbbe8484ce058477692a27c151c0a"}, + {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25a49dc2e2f74e65efaa32b153527fc5ac98508d502fa46e74fa4fd678ed6645"}, + {file = "Pillow-8.4.0-cp36-cp36m-win32.whl", hash = "sha256:93ce9e955cc95959df98505e4608ad98281fff037350d8c2671c9aa86bcf10a9"}, + {file = "Pillow-8.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2e4440b8f00f504ee4b53fe30f4e381aae30b0568193be305256b1462216feff"}, + {file = "Pillow-8.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8c803ac3c28bbc53763e6825746f05cc407b20e4a69d0122e526a582e3b5e153"}, + {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a17b5d948f4ceeceb66384727dde11b240736fddeda54ca740b9b8b1556b29"}, + {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1394a6ad5abc838c5cd8a92c5a07535648cdf6d09e8e2d6df916dfa9ea86ead8"}, + {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:792e5c12376594bfcb986ebf3855aa4b7c225754e9a9521298e460e92fb4a488"}, + {file = "Pillow-8.4.0-cp37-cp37m-win32.whl", hash = "sha256:d99ec152570e4196772e7a8e4ba5320d2d27bf22fdf11743dd882936ed64305b"}, + {file = "Pillow-8.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7b7017b61bbcdd7f6363aeceb881e23c46583739cb69a3ab39cb384f6ec82e5b"}, + {file = "Pillow-8.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:d89363f02658e253dbd171f7c3716a5d340a24ee82d38aab9183f7fdf0cdca49"}, + {file = "Pillow-8.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a0956fdc5defc34462bb1c765ee88d933239f9a94bc37d132004775241a7585"}, + {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b7bb9de00197fb4261825c15551adf7605cf14a80badf1761d61e59da347779"}, + {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72b9e656e340447f827885b8d7a15fc8c4e68d410dc2297ef6787eec0f0ea409"}, + {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a4532a12314149d8b4e4ad8ff09dde7427731fcfa5917ff16d0291f13609df"}, + {file = "Pillow-8.4.0-cp38-cp38-win32.whl", hash = "sha256:82aafa8d5eb68c8463b6e9baeb4f19043bb31fefc03eb7b216b51e6a9981ae09"}, + {file = "Pillow-8.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:066f3999cb3b070a95c3652712cffa1a748cd02d60ad7b4e485c3748a04d9d76"}, + {file = "Pillow-8.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:5503c86916d27c2e101b7f71c2ae2cddba01a2cf55b8395b0255fd33fa4d1f1a"}, + {file = "Pillow-8.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4acc0985ddf39d1bc969a9220b51d94ed51695d455c228d8ac29fcdb25810e6e"}, + {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b052a619a8bfcf26bd8b3f48f45283f9e977890263e4571f2393ed8898d331b"}, + {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:493cb4e415f44cd601fcec11c99836f707bb714ab03f5ed46ac25713baf0ff20"}, + {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8831cb7332eda5dc89b21a7bce7ef6ad305548820595033a4b03cf3091235ed"}, + {file = "Pillow-8.4.0-cp39-cp39-win32.whl", hash = "sha256:5e9ac5f66616b87d4da618a20ab0a38324dbe88d8a39b55be8964eb520021e02"}, + {file = "Pillow-8.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:3eb1ce5f65908556c2d8685a8f0a6e989d887ec4057326f6c22b24e8a172c66b"}, + {file = "Pillow-8.4.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ddc4d832a0f0b4c52fff973a0d44b6c99839a9d016fe4e6a1cb8f3eea96479c2"}, + {file = "Pillow-8.4.0-pp36-pypy36_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3e5ddc44c14042f0844b8cf7d2cd455f6cc80fd7f5eefbe657292cf601d9ad"}, + {file = "Pillow-8.4.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70e94281588ef053ae8998039610dbd71bc509e4acbc77ab59d7d2937b10698"}, + {file = "Pillow-8.4.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:3862b7256046fcd950618ed22d1d60b842e3a40a48236a5498746f21189afbbc"}, + {file = "Pillow-8.4.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4901622493f88b1a29bd30ec1a2f683782e57c3c16a2dbc7f2595ba01f639df"}, + {file = "Pillow-8.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c471a734240653a0ec91dec0996696eea227eafe72a33bd06c92697728046b"}, + {file = "Pillow-8.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:244cf3b97802c34c41905d22810846802a3329ddcb93ccc432870243211c79fc"}, + {file = "Pillow-8.4.0.tar.gz", hash = "sha256:b8e2f83c56e141920c39464b852de3719dfbfb6e3c99a2d8da0edf4fb33176ed"}, +] [[package]] name = "platformdirs" @@ -596,6 +1125,10 @@ description = "A small Python package for determining appropriate platform-speci category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "platformdirs-2.5.3-py3-none-any.whl", hash = "sha256:0cb405749187a194f444c25c82ef7225232f11564721eabffc6ec70df83b11cb"}, + {file = "platformdirs-2.5.3.tar.gz", hash = "sha256:6e52c21afff35cb659c6e52d8b4d61b9bd544557180440538f255d9382c8cbe0"}, +] [package.extras] docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] @@ -608,6 +1141,22 @@ description = "" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "protobuf-4.21.9-cp310-abi3-win32.whl", hash = "sha256:6e0be9f09bf9b6cf497b27425487706fa48c6d1632ddd94dab1a5fe11a422392"}, + {file = "protobuf-4.21.9-cp310-abi3-win_amd64.whl", hash = "sha256:a7d0ea43949d45b836234f4ebb5ba0b22e7432d065394b532cdca8f98415e3cf"}, + {file = "protobuf-4.21.9-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b5ab0b8918c136345ff045d4b3d5f719b505b7c8af45092d7f45e304f55e50a1"}, + {file = "protobuf-4.21.9-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:2c9c2ed7466ad565f18668aa4731c535511c5d9a40c6da39524bccf43e441719"}, + {file = "protobuf-4.21.9-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:e575c57dc8b5b2b2caa436c16d44ef6981f2235eb7179bfc847557886376d740"}, + {file = "protobuf-4.21.9-cp37-cp37m-win32.whl", hash = "sha256:9227c14010acd9ae7702d6467b4625b6fe853175a6b150e539b21d2b2f2b409c"}, + {file = "protobuf-4.21.9-cp37-cp37m-win_amd64.whl", hash = "sha256:a419cc95fca8694804709b8c4f2326266d29659b126a93befe210f5bbc772536"}, + {file = "protobuf-4.21.9-cp38-cp38-win32.whl", hash = "sha256:5b0834e61fb38f34ba8840d7dcb2e5a2f03de0c714e0293b3963b79db26de8ce"}, + {file = "protobuf-4.21.9-cp38-cp38-win_amd64.whl", hash = "sha256:84ea107016244dfc1eecae7684f7ce13c788b9a644cd3fca5b77871366556444"}, + {file = "protobuf-4.21.9-cp39-cp39-win32.whl", hash = "sha256:f9eae277dd240ae19bb06ff4e2346e771252b0e619421965504bd1b1bba7c5fa"}, + {file = "protobuf-4.21.9-cp39-cp39-win_amd64.whl", hash = "sha256:6e312e280fbe3c74ea9e080d9e6080b636798b5e3939242298b591064470b06b"}, + {file = "protobuf-4.21.9-py2.py3-none-any.whl", hash = "sha256:7eb8f2cc41a34e9c956c256e3ac766cf4e1a4c9c925dc757a41a01be3e852965"}, + {file = "protobuf-4.21.9-py3-none-any.whl", hash = "sha256:48e2cd6b88c6ed3d5877a3ea40df79d08374088e89bedc32557348848dff250b"}, + {file = "protobuf-4.21.9.tar.gz", hash = "sha256:61f21493d96d2a77f9ca84fefa105872550ab5ef71d21c458eb80edcf4885a99"}, +] [[package]] name = "psycopg2-binary" @@ -616,6 +1165,79 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = true python-versions = ">=3.6" +files = [ + {file = "psycopg2-binary-2.9.5.tar.gz", hash = "sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:0775d6252ccb22b15da3b5d7adbbf8cfe284916b14b6dc0ff503a23edb01ee85"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec46ed947801652c9643e0b1dc334cfb2781232e375ba97312c2fc256597632"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3520d7af1ebc838cc6084a3281145d5cd5bdd43fdef139e6db5af01b92596cb7"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cbc554ba47ecca8cd3396ddaca85e1ecfe3e48dd57dc5e415e59551affe568e"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:5d28ecdf191db558d0c07d0f16524ee9d67896edf2b7990eea800abeb23ebd61"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:b9c33d4aef08dfecbd1736ceab8b7b3c4358bf10a0121483e5cd60d3d308cc64"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:05b3d479425e047c848b9782cd7aac9c6727ce23181eb9647baf64ffdfc3da41"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e491e6489a6cb1d079df8eaa15957c277fdedb102b6a68cfbf40c4994412fd0"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:9e32cedc389bcb76d9f24ea8a012b3cb8385ee362ea437e1d012ffaed106c17d"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:46850a640df62ae940e34a163f72e26aca1f88e2da79148e1862faaac985c302"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-win32.whl", hash = "sha256:3d790f84201c3698d1bfb404c917f36e40531577a6dda02e45ba29b64d539867"}, + {file = "psycopg2_binary-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:1764546ffeaed4f9428707be61d68972eb5ede81239b46a45843e0071104d0dd"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-macosx_10_9_universal2.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:426c2ae999135d64e6a18849a7d1ad0e1bd007277e4a8f4752eaa40a96b550ff"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cf1d44e710ca3a9ce952bda2855830fe9f9017ed6259e01fcd71ea6287565f5"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024030b13bdcbd53d8a93891a2cf07719715724fc9fee40243f3bd78b4264b8f"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcda1c84a1c533c528356da5490d464a139b6e84eb77cc0b432e38c5c6dd7882"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:2ef892cabdccefe577088a79580301f09f2a713eb239f4f9f62b2b29cafb0577"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_24_ppc64le.whl", hash = "sha256:af0516e1711995cb08dc19bbd05bec7dbdebf4185f68870595156718d237df3e"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e72c91bda9880f097c8aa3601a2c0de6c708763ba8128006151f496ca9065935"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-win32.whl", hash = "sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903"}, + {file = "psycopg2_binary-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:56b2957a145f816726b109ee3d4e6822c23f919a7d91af5a94593723ed667835"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:f95b8aca2703d6a30249f83f4fe6a9abf2e627aa892a5caaab2267d56be7ab69"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:70831e03bd53702c941da1a1ad36c17d825a24fbb26857b40913d58df82ec18b"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:dbc332beaf8492b5731229a881807cd7b91b50dbbbaf7fe2faf46942eda64a24"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:2d964eb24c8b021623df1c93c626671420c6efadbdb8655cb2bd5e0c6fa422ba"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:95076399ec3b27a8f7fa1cc9a83417b1c920d55cf7a97f718a94efbb96c7f503"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:3fc33295cfccad697a97a76dec3f1e94ad848b7b163c3228c1636977966b51e2"}, + {file = "psycopg2_binary-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:02551647542f2bf89073d129c73c05a25c372fc0a49aa50e0de65c3c143d8bd0"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:63e318dbe52709ed10d516a356f22a635e07a2e34c68145484ed96a19b0c4c68"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7e518a0911c50f60313cb9e74a169a65b5d293770db4770ebf004245f24b5c5"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d38a4656e4e715d637abdf7296e98d6267df0cc0a8e9a016f8ba07e4aa3eeb"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:68d81a2fe184030aa0c5c11e518292e15d342a667184d91e30644c9d533e53e1"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:7ee3095d02d6f38bd7d9a5358fcc9ea78fcdb7176921528dd709cc63f40184f5"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:46512486be6fbceef51d7660dec017394ba3e170299d1dc30928cbedebbf103a"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b911dfb727e247340d36ae20c4b9259e4a64013ab9888ccb3cbba69b77fd9636"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:422e3d43b47ac20141bc84b3d342eead8d8099a62881a501e97d15f6addabfe9"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c5682a45df7d9642eff590abc73157c887a68f016df0a8ad722dcc0f888f56d7"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:b8104f709590fff72af801e916817560dbe1698028cd0afe5a52d75ceb1fce5f"}, + {file = "psycopg2_binary-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:7b3751857da3e224f5629400736a7b11e940b5da5f95fa631d86219a1beaafec"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:043a9fd45a03858ff72364b4b75090679bd875ee44df9c0613dc862ca6b98460"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9ffdc51001136b699f9563b1c74cc1f8c07f66ef7219beb6417a4c8aaa896c28"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c15ba5982c177bc4b23a7940c7e4394197e2d6a424a2d282e7c236b66da6d896"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc85b3777068ed30aff8242be2813038a929f2084f69e43ef869daddae50f6ee"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:215d6bf7e66732a514f47614f828d8c0aaac9a648c46a831955cb103473c7147"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:7d07f552d1e412f4b4e64ce386d4c777a41da3b33f7098b6219012ba534fb2c2"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a0adef094c49f242122bb145c3c8af442070dc0e4312db17e49058c1702606d4"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:00475004e5ed3e3bf5e056d66e5dcdf41a0dc62efcd57997acd9135c40a08a50"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7d88db096fa19d94f433420eaaf9f3c45382da2dd014b93e4bf3215639047c16"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:902844f9c4fb19b17dfa84d9e2ca053d4a4ba265723d62ea5c9c26b38e0aa1e6"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-win32.whl", hash = "sha256:4e7904d1920c0c89105c0517dc7e3f5c20fb4e56ba9cdef13048db76947f1d79"}, + {file = "psycopg2_binary-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:a36a0e791805aa136e9cbd0ffa040d09adec8610453ee8a753f23481a0057af5"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:25382c7d174c679ce6927c16b6fbb68b10e56ee44b1acb40671e02d29f2fce7c"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9c38d3869238e9d3409239bc05bc27d6b7c99c2a460ea337d2814b35fb4fea1b"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c6527c8efa5226a9e787507652dd5ba97b62d29b53c371a85cd13f957fe4d42"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e59137cdb970249ae60be2a49774c6dfb015bd0403f05af1fe61862e9626642d"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:d4c7b3a31502184e856df1f7bbb2c3735a05a8ce0ade34c5277e1577738a5c91"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:b9a794cef1d9c1772b94a72eec6da144c18e18041d294a9ab47669bc77a80c1d"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5254cbd4f4855e11cebf678c1a848a3042d455a22a4ce61349c36aafd4c2267"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c5e65c6ac0ae4bf5bef1667029f81010b6017795dcb817ba5c7b8a8d61fab76f"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:74eddec4537ab1f701a1647214734bc52cee2794df748f6ae5908e00771f180a"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:01ad49d68dd8c5362e4bfb4158f2896dc6e0c02e87b8a3770fc003459f1a4425"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-win32.whl", hash = "sha256:937880290775033a743f4836aa253087b85e62784b63fd099ee725d567a48aa1"}, + {file = "psycopg2_binary-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:484405b883630f3e74ed32041a87456c5e0e63a8e3429aa93e8714c366d62bd1"}, +] [[package]] name = "pyasn1" @@ -624,6 +1246,10 @@ description = "ASN.1 types and codecs" category = "main" optional = false python-versions = "*" +files = [ + {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, +] [[package]] name = "pyasn1-modules" @@ -632,6 +1258,10 @@ description = "A collection of ASN.1-based protocols modules." category = "main" optional = false python-versions = "*" +files = [ + {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, + {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, +] [package.dependencies] pyasn1 = ">=0.4.6,<0.5.0" @@ -643,6 +1273,10 @@ description = "Python style guide checker" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] [[package]] name = "pycparser" @@ -651,6 +1285,10 @@ description = "C parser in Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] [[package]] name = "pydocstyle" @@ -659,6 +1297,10 @@ description = "Python docstring style checker" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, +] [package.dependencies] snowballstemmer = "*" @@ -673,14 +1315,22 @@ description = "passive checker of Python programs" category = "dev" optional = false python-versions = ">=3.6" - -[[package]] +files = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] + +[[package]] name = "pygithub" version = "1.57" description = "Use the full Github API v3" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "PyGithub-1.57-py3-none-any.whl", hash = "sha256:5822febeac2391f1306c55a99af2bc8f86c8bf82ded000030cd02c18f31b731f"}, + {file = "PyGithub-1.57.tar.gz", hash = "sha256:c273f252b278fb81f1769505cc6921bdb6791e1cebd6ac850cc97dad13c31ff3"}, +] [package.dependencies] deprecated = "*" @@ -698,6 +1348,10 @@ description = "JSON Web Token implementation in Python" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, + {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, +] [package.extras] crypto = ["cryptography (>=3.4.0)"] @@ -712,6 +1366,18 @@ description = "Python binding to the Networking and Cryptography (NaCl) library" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] [package.dependencies] cffi = ">=1.4.1" @@ -727,6 +1393,10 @@ description = "pyparsing module - Classes and methods to define and execute pars category = "main" optional = false python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] [package.extras] diagrams = ["jinja2", "railroad-diagrams"] @@ -738,10 +1408,64 @@ description = "Extensions to the standard Python datetime module" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] [package.dependencies] six = ">=1.5" +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] + [[package]] name = "rcssmin" version = "1.1.0" @@ -749,6 +1473,28 @@ description = "CSS Minifier" category = "main" optional = false python-versions = "*" +files = [ + {file = "rcssmin-1.1.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2211a5c91ea14a5937b57904c9121f8bfef20987825e55368143da7d25446e3b"}, + {file = "rcssmin-1.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7085d1b51dd2556f3aae03947380f6e9e1da29fb1eeadfa6766b7f105c54c9ff"}, + {file = "rcssmin-1.1.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:1512223b6a687bb747e4e531187bd49a56ed71287e7ead9529cbaa1ca4718a0a"}, + {file = "rcssmin-1.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:6158d0d86cd611c5304d738dc3d6cfeb23864dd78ad0d83a633f443696ac5d77"}, + {file = "rcssmin-1.1.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:0a6aae7e119509445bf7aa6da6ca0f285cc198273c20f470ad999ff83bbadcf9"}, + {file = "rcssmin-1.1.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:506e33ab4c47051f7deae35b6d8dbb4a5c025f016e90a830929a1ecc7daa1682"}, + {file = "rcssmin-1.1.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:352dd3a78eb914bb1cb269ac2b66b3154f2490a52ab605558c681de3fb5194d2"}, + {file = "rcssmin-1.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:30f5522285065cae0164d20068377d84b5d10b414156115f8729b034d0ea5e8b"}, + {file = "rcssmin-1.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:49807735f26f59404194f1e6f93254b6d5b6f7748c2a954f4470a86a40ff4c13"}, + {file = "rcssmin-1.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f1a37bbd36b050813673e62ae6464467548628690bf4d48a938170e121e8616e"}, + {file = "rcssmin-1.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ddff3a41611664c7f1d9e3d8a9c1669e0e155ac0458e586ffa834dc5953e7d9f"}, + {file = "rcssmin-1.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8b659a88850e772c84cfac4520ec223de6807875e173d8ef3248ab7f90876066"}, + {file = "rcssmin-1.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:1d7c2719d014e4e4df4e33b75ae8067c7e246cf470eaec8585e06e2efac7586c"}, + {file = "rcssmin-1.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:37f1242e34ca273ed2c26cf778854e18dd11b31c6bfca60e23fce146c84667c1"}, + {file = "rcssmin-1.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f31c82d06ba2dbf33c20db9550157e80bb0c4cbd24575c098f0831d1d2e3c5df"}, + {file = "rcssmin-1.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7da63fee37edf204bbd86785edb4d7491642adbfd1d36fd230b7ccbbd8db1a6f"}, + {file = "rcssmin-1.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c28b9eb20982b45ebe6adef8bd2547e5ed314dafddfff4eba806b0f8c166cfd1"}, + {file = "rcssmin-1.1.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:32ccaebbbd4d56eab08cf26aed36f5d33389b9d1d3ca1fecf53eb6ab77760ddf"}, + {file = "rcssmin-1.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7c44002b79f3656348196005b9522ec5e04f182b466f66d72b16be0bd03c13d8"}, + {file = "rcssmin-1.1.0.tar.gz", hash = "sha256:27fc400627fd3d328b7fe95af2a01f5d0af6b5af39731af5d071826a1f08e362"}, +] [[package]] name = "requests" @@ -757,6 +1503,10 @@ description = "Python HTTP for Humans." category = "main" optional = false python-versions = ">=3.7, <4" +files = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] [package.dependencies] certifi = ">=2017.4.17" @@ -766,7 +1516,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" @@ -775,6 +1525,10 @@ description = "OAuthlib authentication support for Requests." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] [package.dependencies] oauthlib = ">=3.0.0" @@ -785,20 +1539,24 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "responses" -version = "0.22.0" +version = "0.23.1" description = "A utility library for mocking out the `requests` Python library." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "responses-0.23.1-py3-none-any.whl", hash = "sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd"}, + {file = "responses-0.23.1.tar.gz", hash = "sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f"}, +] [package.dependencies] +pyyaml = "*" requests = ">=2.22.0,<3.0" -toml = "*" -types-toml = "*" +types-PyYAML = "*" urllib3 = ">=1.25.10" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] [[package]] name = "rjsmin" @@ -807,6 +1565,28 @@ description = "Javascript Minifier" category = "main" optional = false python-versions = "*" +files = [ + {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e18fe1a610fb105273bb369f61c2b0bd9e66a3f0792e27e4cac44e42ace1968b"}, + {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:6c395ffc130332cca744f081ed5efd5699038dcb7a5d30c3ff4bc6adb5b30a62"}, + {file = "rjsmin-1.2.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3b14f4c2933ec194eb816b71a0854ce461b6419a3d852bf360344731ab28c0a6"}, + {file = "rjsmin-1.2.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:54fc30519365841b27556ccc1cb94c5b4413c384ff6d467442fddba66e2e325a"}, + {file = "rjsmin-1.2.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:40e7211a25d9a11ac9ff50446e41268c978555676828af86fa1866615823bfff"}, + {file = "rjsmin-1.2.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:99e5597a812b60058baa1457387dc79cca7d273b2a700dc98bfd20d43d60711d"}, + {file = "rjsmin-1.2.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:993935654c1311280e69665367d7e6ff694ac9e1609168cf51cae8c0307df0db"}, + {file = "rjsmin-1.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c81229ffe5b0a0d5b3b5d5e6d0431f182572de9e9a077e85dbae5757db0ab75c"}, + {file = "rjsmin-1.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:1c93b29fd725e61718299ffe57de93ff32d71b313eaabbfcc7bd32ddb82831d5"}, + {file = "rjsmin-1.2.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:38a4474ed52e1575fb9da983ec8657faecd8ab3738508d36e04f87769411fd3d"}, + {file = "rjsmin-1.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1622fbb6c6a8daaf77da13cc83356539bfe79c1440f9664b02c7f7b150b9a18e"}, + {file = "rjsmin-1.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4387a00777faddf853eebdece9f2e56ebaf243c3f24676a9de6a20c5d4f3d731"}, + {file = "rjsmin-1.2.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:86c4da7285ddafe6888cb262da563570f28e4a31146b5164a7a6947b1222196b"}, + {file = "rjsmin-1.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d63e193a2f932a786ae82068aa76d1d126fcdff8582094caff9e5e66c4dcc124"}, + {file = "rjsmin-1.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:05efa485dfddb6418e3b86d8862463aa15641a61f6ae05e7e6de8f116ee77c69"}, + {file = "rjsmin-1.2.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:b6a7c8c8d19e154334f640954e43e57283e87bb4a2f6e23295db14eea8e9fc1d"}, + {file = "rjsmin-1.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2ed83aca637186bafdc894b4b7fc3657e2d74014ccca7d3d69122c1e82675216"}, + {file = "rjsmin-1.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:41c7c3910f7b8816e37366b293e576ddecf696c5f2197d53cf2c1526ac336646"}, + {file = "rjsmin-1.2.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8944a8a55ac825b8e5ec29f341ecb7574697691ef416506885898d2f780fb4ca"}, + {file = "rjsmin-1.2.0.tar.gz", hash = "sha256:6c529feb6c400984452494c52dd9fdf59185afeacca2afc5174a28ab37751a1b"}, +] [[package]] name = "rsa" @@ -815,17 +1595,25 @@ description = "Pure-Python RSA implementation" category = "main" optional = false python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] [package.dependencies] pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.6.0" +version = "0.6.1" description = "An Amazon S3 Transfer Manager" category = "main" optional = false python-versions = ">= 3.7" +files = [ + {file = "s3transfer-0.6.1-py3-none-any.whl", hash = "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346"}, + {file = "s3transfer-0.6.1.tar.gz", hash = "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"}, +] [package.dependencies] botocore = ">=1.12.36,<2.0a.0" @@ -833,6 +1621,23 @@ botocore = ">=1.12.36,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +[[package]] +name = "setuptools" +version = "65.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, + {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -840,6 +1645,10 @@ description = "Python 2 and 3 compatibility utilities" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] [[package]] name = "snowballstemmer" @@ -848,6 +1657,10 @@ description = "This package provides 29 stemmers for 28 languages generated from category = "dev" optional = false python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] [[package]] name = "sqlparse" @@ -856,6 +1669,10 @@ description = "A non-validating SQL parser." category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "sqlparse-0.4.3-py3-none-any.whl", hash = "sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34"}, + {file = "sqlparse-0.4.3.tar.gz", hash = "sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268"}, +] [[package]] name = "text-unidecode" @@ -864,14 +1681,29 @@ description = "The most basic Text::Unidecode port" category = "dev" optional = false python-versions = "*" +files = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] [[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" +name = "tinycss2" +version = "1.1.1" +description = "A tiny CSS parser" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" +files = [ + {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, + {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["coverage[toml]", "pytest", "pytest-cov", "pytest-flake8", "pytest-isort"] [[package]] name = "tomli" @@ -880,14 +1712,22 @@ description = "A lil' TOML parser" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] [[package]] -name = "types-toml" -version = "0.10.8.5" -description = "Typing stubs for toml" +name = "types-pyyaml" +version = "6.0.12.10" +description = "Typing stubs for PyYAML" category = "main" optional = false python-versions = "*" +files = [ + {file = "types-PyYAML-6.0.12.10.tar.gz", hash = "sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97"}, + {file = "types_PyYAML-6.0.12.10-py3-none-any.whl", hash = "sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f"}, +] [[package]] name = "tzdata" @@ -896,6 +1736,10 @@ description = "Provider of IANA time zone data" category = "main" optional = false python-versions = ">=2" +files = [ + {file = "tzdata-2022.6-py2.py3-none-any.whl", hash = "sha256:04a680bdc5b15750c39c12a448885a51134a27ec9af83667663f0b3a1bf3f342"}, + {file = "tzdata-2022.6.tar.gz", hash = "sha256:91f11db4503385928c15598c98573e3af07e7229181bee5375bd30f1695ddcae"}, +] [[package]] name = "uritemplate" @@ -904,6 +1748,10 @@ description = "Implementation of RFC 6570 URI Templates" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, + {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, +] [[package]] name = "urllib3" @@ -912,6 +1760,10 @@ description = "HTTP library with thread-safe connection pooling, file post, and category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +files = [ + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, +] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] @@ -925,20 +1777,39 @@ description = "The uWSGI server" category = "main" optional = true python-versions = "*" +files = [ + {file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"}, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] [[package]] name = "werkzeug" -version = "2.2.3" +version = "2.3.6" description = "The comprehensive WSGI web application library." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "Werkzeug-2.3.6-py3-none-any.whl", hash = "sha256:935539fa1413afbb9195b24880778422ed620c0fc09670945185cce4d91a8890"}, + {file = "Werkzeug-2.3.6.tar.gz", hash = "sha256:98c774df2f91b05550078891dee5f0eb0cb797a522c757a2452b9cee5b202330"}, +] [package.dependencies] MarkupSafe = ">=2.1.1" [package.extras] -watchdog = ["watchdog"] +watchdog = ["watchdog (>=2.3)"] [[package]] name = "wrapt" @@ -947,6 +1818,72 @@ description = "Module for decorators, wrappers and monkey patching." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] [[package]] name = "xmltodict" @@ -955,838 +1892,15 @@ description = "Makes working with XML feel like you are working with JSON" category = "main" optional = false python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] [extras] -production = ["uwsgi", "psycopg2-binary"] +production = ["psycopg2-binary", "uWSGI", "uwsgi"] [metadata] -lock-version = "1.1" +lock-version = "2.0" python-versions = "^3.10" -content-hash = "d812c41bd73a271e800f7a4969553f2b8b5a748e8d2f435c8ef5b1d953451f72" - -[metadata.files] -absl-py = [ - {file = "absl-py-1.3.0.tar.gz", hash = "sha256:463c38a08d2e4cef6c498b76ba5bd4858e4c6ef51da1a5a1f27139a022e20248"}, - {file = "absl_py-1.3.0-py3-none-any.whl", hash = "sha256:34995df9bd7a09b3b8749e230408f5a2a2dd7a68a0d33c12a3d0cb15a041a507"}, -] -admin-totals = [ - {file = "admin-totals-1.0.1.tar.gz", hash = "sha256:ba46e0307d35f3e6b2d10db8f23bb593e8024d59a074946d5a292b8de98f2509"}, - {file = "admin_totals-1.0.1-py2-none-any.whl", hash = "sha256:609017540245373afe78dca105760331a9c2aa6fbafca1b5b5f1d12c7e6b3895"}, - {file = "admin_totals-1.0.1-py3-none-any.whl", hash = "sha256:59c190e478f8d31d10117f75d4ff0a4dc73db8975ef9d6438515ff326019638d"}, -] -asgiref = [ - {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, - {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, -] -black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, -] -boto3 = [ - {file = "boto3-1.26.78-py3-none-any.whl", hash = "sha256:0c593017fa49dbc34dcdbd5659208f2daf293a499d5f4d7e61978cd6b5d72a97"}, - {file = "boto3-1.26.78.tar.gz", hash = "sha256:488bf63d65864ab7fcdf9337c5aa4d825d444e253738a60f80789916bacc47dc"}, -] -botocore = [ - {file = "botocore-1.29.78-py3-none-any.whl", hash = "sha256:656ac8822a1b6c887a8efe1172bcefa9c9c450face26dc39998a249e8c340a23"}, - {file = "botocore-1.29.78.tar.gz", hash = "sha256:2bee6ed037590ef1e4884d944486232871513915f12a8590c63e3bb6046479bf"}, -] -cachetools = [ - {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, - {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, -] -cffi = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -cryptography = [ - {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320"}, - {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0"}, - {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748"}, - {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146"}, - {file = "cryptography-38.0.3-cp36-abi3-win32.whl", hash = "sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0"}, - {file = "cryptography-38.0.3-cp36-abi3-win_amd64.whl", hash = "sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220"}, - {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd"}, - {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55"}, - {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a"}, - {file = "cryptography-38.0.3.tar.gz", hash = "sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd"}, -] -deprecated = [ - {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, - {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, -] -django = [ - {file = "Django-4.1.3-py3-none-any.whl", hash = "sha256:6b1de6886cae14c7c44d188f580f8ba8da05750f544c80ae5ad43375ab293cd5"}, - {file = "Django-4.1.3.tar.gz", hash = "sha256:678bbfc8604eb246ed54e2063f0765f13b321a50526bdc8cb1f943eda7fa31f1"}, -] -django-admin-autocomplete-filter = [] -django-appconf = [ - {file = "django-appconf-1.0.5.tar.gz", hash = "sha256:be3db0be6c81fa84742000b89a81c016d70ae66a7ccb620cdef592b1f1a6aaa4"}, - {file = "django_appconf-1.0.5-py3-none-any.whl", hash = "sha256:ae9f864ee1958c815a965ed63b3fba4874eec13de10236ba063a788f9a17389d"}, -] -django-bootstrap5 = [ - {file = "django-bootstrap5-22.1.tar.gz", hash = "sha256:70b51f020ef95a64780a0b5d5fdb1fade6e7b5e26c53355cc4f3648eca239cab"}, - {file = "django_bootstrap5-22.1-py3-none-any.whl", hash = "sha256:cf3f257abf750f19e47eddc106066ddb182576185494965d1408eddcb3a7380b"}, -] -django-compressor = [ - {file = "django_compressor-4.1-py2.py3-none-any.whl", hash = "sha256:61f313852b4c8d4ef2534cda3d2366f45ca3e399b3cbe10590e516cc6b45542d"}, - {file = "django_compressor-4.1.tar.gz", hash = "sha256:8ece621d2a98f6c6635480cb8b3701db890a99f793f95ca20cb00abc194d331d"}, -] -django-easy-admin-object-actions = [ - {file = "django-easy-admin-object-actions-1.1.0.tar.gz", hash = "sha256:a60fd5164c1299a6ed3c174a041c30e9613d9f7c28fe4789068d19cb7aa7cdf2"}, - {file = "django_easy_admin_object_actions-1.1.0-py3-none-any.whl", hash = "sha256:4bb371e0938af4ab5863b78d6100dbb07f960bd533f9dd9448c944d82fd18394"}, -] -django-sass-processor = [ - {file = "django-sass-processor-1.2.2.tar.gz", hash = "sha256:f6098c181cc95a21593df6bb502791e32015615222803de216fdcc8bb42c0f77"}, - {file = "django_sass_processor-1.2.2-py3-none-any.whl", hash = "sha256:d5e2970228ec9648da83d083a2b468fa682bef80357d0bab8e3f6c6df301681e"}, -] -faker = [ - {file = "Faker-8.16.0-py3-none-any.whl", hash = "sha256:bb10913b9d3ac2aa37180f816c82040e81f9e0c32cb08445533f293cec8930bf"}, - {file = "Faker-8.16.0.tar.gz", hash = "sha256:d70b375d0af0e4c3abd594003691a1055a96281a414884e623d27bccc7d781da"}, -] -flake8 = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, -] -flake8-import-order = [ - {file = "flake8-import-order-0.18.1.tar.gz", hash = "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92"}, - {file = "flake8_import_order-0.18.1-py2.py3-none-any.whl", hash = "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543"}, -] -freezegun = [ - {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, - {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, -] -google-api-core = [ - {file = "google-api-core-2.10.2.tar.gz", hash = "sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320"}, - {file = "google_api_core-2.10.2-py3-none-any.whl", hash = "sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e"}, -] -google-api-python-client = [ - {file = "google-api-python-client-2.65.0.tar.gz", hash = "sha256:b8a0ca8454ad57bc65199044717d3d214197ae1e2d666426bbcd4021b36762e0"}, - {file = "google_api_python_client-2.65.0-py2.py3-none-any.whl", hash = "sha256:2c6611530308b3f931dcf1360713aa3a20cf465d0bf2bac65f2ec99e8c9860de"}, -] -google-auth = [ - {file = "google-auth-2.14.0.tar.gz", hash = "sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d"}, - {file = "google_auth-2.14.0-py2.py3-none-any.whl", hash = "sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700"}, -] -google-auth-httplib2 = [ - {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, - {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, -] -google-auth-oauthlib = [ - {file = "google-auth-oauthlib-0.7.1.tar.gz", hash = "sha256:9940f543f77d1447432a93781d7c931fb53e418023351ad4bf9e92837a1154ec"}, - {file = "google_auth_oauthlib-0.7.1-py2.py3-none-any.whl", hash = "sha256:860e54c4b58b2664116c9cb44325bc0ec92bcd93e8211698ceea911b1b873b86"}, -] -googleapis-common-protos = [ - {file = "googleapis-common-protos-1.56.4.tar.gz", hash = "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"}, - {file = "googleapis_common_protos-1.56.4-py2.py3-none-any.whl", hash = "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394"}, -] -httplib2 = [ - {file = "httplib2-0.21.0-py3-none-any.whl", hash = "sha256:987c8bb3eb82d3fa60c68699510a692aa2ad9c4bd4f123e51dfb1488c14cdd01"}, - {file = "httplib2-0.21.0.tar.gz", hash = "sha256:fc144f091c7286b82bec71bdbd9b27323ba709cc612568d3000893bfd9cb4b34"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -jmespath = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] -libsass = [ - {file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"}, - {file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"}, - {file = "libsass-0.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:6b984510ed94993708c0d697b4fef2d118929bbfffc3b90037be0f5ccadf55e7"}, - {file = "libsass-0.21.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e25dd9047a9392d3c59a0b869e0404f2b325a03871ee45285ee33b3664f5613"}, - {file = "libsass-0.21.0-cp36-abi3-macosx_10_14_x86_64.whl", hash = "sha256:12f39712de38689a8b785b7db41d3ba2ea1d46f9379d81ea4595802d91fa6529"}, - {file = "libsass-0.21.0-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e2b1a7d093f2e76dc694c17c0c285e846d0b0deb0e8b21dc852ba1a3a4e2f1d6"}, - {file = "libsass-0.21.0-cp36-abi3-win32.whl", hash = "sha256:abc29357ee540849faf1383e1746d40d69ed5cb6d4c346df276b258f5aa8977a"}, - {file = "libsass-0.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:659ae41af8708681fa3ec73f47b9735a6725e71c3b66ff570bfce78952f2314e"}, - {file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"}, - {file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, -] -mccabe = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] -moto = [ - {file = "moto-4.1.3-py2.py3-none-any.whl", hash = "sha256:dcd1d06662982cf3c94f36d6348251ccdcf62a1c5de5650425cb4e6f260ae7a0"}, - {file = "moto-4.1.3.tar.gz", hash = "sha256:c8200ccaa9440c2e9daa0bd5e0bd768a719db5a2c82ea8d782f0e3fa09a3c5e2"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -numpy = [ - {file = "numpy-1.23.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d79ada05005f6f4f337d3bb9de8a7774f259341c70bc88047a1f7b96a4bcb2"}, - {file = "numpy-1.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:926db372bc4ac1edf81cfb6c59e2a881606b409ddc0d0920b988174b2e2a767f"}, - {file = "numpy-1.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c237129f0e732885c9a6076a537e974160482eab8f10db6292e92154d4c67d71"}, - {file = "numpy-1.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8365b942f9c1a7d0f0dc974747d99dd0a0cdfc5949a33119caf05cb314682d3"}, - {file = "numpy-1.23.4-cp310-cp310-win32.whl", hash = "sha256:2341f4ab6dba0834b685cce16dad5f9b6606ea8a00e6da154f5dbded70fdc4dd"}, - {file = "numpy-1.23.4-cp310-cp310-win_amd64.whl", hash = "sha256:d331afac87c92373826af83d2b2b435f57b17a5c74e6268b79355b970626e329"}, - {file = "numpy-1.23.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:488a66cb667359534bc70028d653ba1cf307bae88eab5929cd707c761ff037db"}, - {file = "numpy-1.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce03305dd694c4873b9429274fd41fc7eb4e0e4dea07e0af97a933b079a5814f"}, - {file = "numpy-1.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8981d9b5619569899666170c7c9748920f4a5005bf79c72c07d08c8a035757b0"}, - {file = "numpy-1.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a70a7d3ce4c0e9284e92285cba91a4a3f5214d87ee0e95928f3614a256a1488"}, - {file = "numpy-1.23.4-cp311-cp311-win32.whl", hash = "sha256:5e13030f8793e9ee42f9c7d5777465a560eb78fa7e11b1c053427f2ccab90c79"}, - {file = "numpy-1.23.4-cp311-cp311-win_amd64.whl", hash = "sha256:7607b598217745cc40f751da38ffd03512d33ec06f3523fb0b5f82e09f6f676d"}, - {file = "numpy-1.23.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ab46e4e7ec63c8a5e6dbf5c1b9e1c92ba23a7ebecc86c336cb7bf3bd2fb10e5"}, - {file = "numpy-1.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8aae2fb3180940011b4862b2dd3756616841c53db9734b27bb93813cd79fce6"}, - {file = "numpy-1.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c053d7557a8f022ec823196d242464b6955a7e7e5015b719e76003f63f82d0f"}, - {file = "numpy-1.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0882323e0ca4245eb0a3d0a74f88ce581cc33aedcfa396e415e5bba7bf05f68"}, - {file = "numpy-1.23.4-cp38-cp38-win32.whl", hash = "sha256:dada341ebb79619fe00a291185bba370c9803b1e1d7051610e01ed809ef3a4ba"}, - {file = "numpy-1.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:0fe563fc8ed9dc4474cbf70742673fc4391d70f4363f917599a7fa99f042d5a8"}, - {file = "numpy-1.23.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c67b833dbccefe97cdd3f52798d430b9d3430396af7cdb2a0c32954c3ef73894"}, - {file = "numpy-1.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f76025acc8e2114bb664294a07ede0727aa75d63a06d2fae96bf29a81747e4a7"}, - {file = "numpy-1.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12ac457b63ec8ded85d85c1e17d85efd3c2b0967ca39560b307a35a6703a4735"}, - {file = "numpy-1.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95de7dc7dc47a312f6feddd3da2500826defdccbc41608d0031276a24181a2c0"}, - {file = "numpy-1.23.4-cp39-cp39-win32.whl", hash = "sha256:f2f390aa4da44454db40a1f0201401f9036e8d578a25f01a6e237cea238337ef"}, - {file = "numpy-1.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:f260da502d7441a45695199b4e7fd8ca87db659ba1c78f2bbf31f934fe76ae0e"}, - {file = "numpy-1.23.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61be02e3bf810b60ab74e81d6d0d36246dbfb644a462458bb53b595791251911"}, - {file = "numpy-1.23.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296d17aed51161dbad3c67ed6d164e51fcd18dbcd5dd4f9d0a9c6055dce30810"}, - {file = "numpy-1.23.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4d52914c88b4930dafb6c48ba5115a96cbab40f45740239d9f4159c4ba779962"}, - {file = "numpy-1.23.4.tar.gz", hash = "sha256:ed2cc92af0efad20198638c69bb0fc2870a58dabfba6eb722c933b48556c686c"}, -] -oauthlib = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] -ortools = [ - {file = "ortools-9.4.1874-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c7452ef873959c5b778ab0bca9d07960bd678a02ce1f99c2900374483642958d"}, - {file = "ortools-9.4.1874-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1d5764186a04168777c89c27a752854e873c02d5f2f0f774ea0f4e98cfd9294"}, - {file = "ortools-9.4.1874-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:faae0765f83f0ab934f7429f4d266b13c1b804fcdac78862f766ef57606388ef"}, - {file = "ortools-9.4.1874-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a840bdcdc04b646953355d3a04c8b2ac08409edf103bce323bbe7caec28b2aa"}, - {file = "ortools-9.4.1874-cp310-cp310-win_amd64.whl", hash = "sha256:809474eed0b1b05489c0396a0e905dad15d66cd2672ed90e6a0382c1340123c3"}, - {file = "ortools-9.4.1874-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:cf01ca0fc6ce02460b485fa429cbc836585f685cf5269cd3c1f8af6995809659"}, - {file = "ortools-9.4.1874-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c13ed1d8297f996e8be778a88dda504bceb8b8de28a26a8a489f27faadd8ca8"}, - {file = "ortools-9.4.1874-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4e1e38911344ce265cb3b58ddc5fa14d9136a171a07a4ce6f668c14263d479"}, - {file = "ortools-9.4.1874-cp36-cp36m-win_amd64.whl", hash = "sha256:b7e8a08901f3e8b1e2b685fa338705c33b6bc13494b695587609eef7cec36d75"}, - {file = "ortools-9.4.1874-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:70863800eaa90eb72bf7057783e2c33e26c45a120c4b92fc2432efddfb5b7ea1"}, - {file = "ortools-9.4.1874-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b5e4131041e763c035747eaa942fe6f39a5ece7c4987c31927c42e0c2406126"}, - {file = "ortools-9.4.1874-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f8b8002068ff8ceed80512488fb03ce26fed46c7e8924820ea6816a30edabfc"}, - {file = "ortools-9.4.1874-cp37-cp37m-win_amd64.whl", hash = "sha256:c61b09d951027fef534065ea5752928a2a5519fb64c8f078434e33c110742813"}, - {file = "ortools-9.4.1874-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:6985ca897d9c412d85912b2fb8256e0e5b58dcbb921f4fe06f36df34a59be4ab"}, - {file = "ortools-9.4.1874-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eb7aeeadc2d17ca7170bda9dac5beedfb4d8359f6e6530be68d219525f30cc27"}, - {file = "ortools-9.4.1874-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52e49d97adc8684dce2185dbab9b772f5d65a1823a7e71959997aa14f34df2c4"}, - {file = "ortools-9.4.1874-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edff7689201afd24e63b5a1d5375ccbc75c6de3efe7e5ecc7fcbef22054c3a"}, - {file = "ortools-9.4.1874-cp38-cp38-win_amd64.whl", hash = "sha256:160b833363b5acbf53e998df3f031673ed98a879962bcafd59388cf8adc2fb47"}, - {file = "ortools-9.4.1874-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:98295c0f7ba0f363292775aab958aae5f6384e4b0e3f23565bf98a38215e9f41"}, - {file = "ortools-9.4.1874-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d6cd9ef6758377fc892026db51e73f5233727c5054deead7965ef57aa6812b91"}, - {file = "ortools-9.4.1874-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38d053961d0f804a8018bf6fa907f51ddb4259777e9d5994b78aed5dd01587f"}, - {file = "ortools-9.4.1874-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:283aa430551b0b6a0553ee5564f51cbb641ba6e534ca9822c72628a3cbcbb425"}, - {file = "ortools-9.4.1874-cp39-cp39-win_amd64.whl", hash = "sha256:78d7f69726a96af990e943b0df13e4af81faa8c8c2dc4ee9bc629a4fcc2bb066"}, -] -pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, -] -pillow = [ - {file = "Pillow-8.4.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:81f8d5c81e483a9442d72d182e1fb6dcb9723f289a57e8030811bac9ea3fef8d"}, - {file = "Pillow-8.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f97cfb1e5a392d75dd8b9fd274d205404729923840ca94ca45a0af57e13dbe6"}, - {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb9fc393f3c61f9054e1ed26e6fe912c7321af2f41ff49d3f83d05bacf22cc78"}, - {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d82cdb63100ef5eedb8391732375e6d05993b765f72cb34311fab92103314649"}, - {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62cc1afda735a8d109007164714e73771b499768b9bb5afcbbee9d0ff374b43f"}, - {file = "Pillow-8.4.0-cp310-cp310-win32.whl", hash = "sha256:e3dacecfbeec9a33e932f00c6cd7996e62f53ad46fbe677577394aaa90ee419a"}, - {file = "Pillow-8.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:620582db2a85b2df5f8a82ddeb52116560d7e5e6b055095f04ad828d1b0baa39"}, - {file = "Pillow-8.4.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:1bc723b434fbc4ab50bb68e11e93ce5fb69866ad621e3c2c9bdb0cd70e345f55"}, - {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cbcfd54df6caf85cc35264c77ede902452d6df41166010262374155947460c"}, - {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70ad9e5c6cb9b8487280a02c0ad8a51581dcbbe8484ce058477692a27c151c0a"}, - {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25a49dc2e2f74e65efaa32b153527fc5ac98508d502fa46e74fa4fd678ed6645"}, - {file = "Pillow-8.4.0-cp36-cp36m-win32.whl", hash = "sha256:93ce9e955cc95959df98505e4608ad98281fff037350d8c2671c9aa86bcf10a9"}, - {file = "Pillow-8.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2e4440b8f00f504ee4b53fe30f4e381aae30b0568193be305256b1462216feff"}, - {file = "Pillow-8.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8c803ac3c28bbc53763e6825746f05cc407b20e4a69d0122e526a582e3b5e153"}, - {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a17b5d948f4ceeceb66384727dde11b240736fddeda54ca740b9b8b1556b29"}, - {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1394a6ad5abc838c5cd8a92c5a07535648cdf6d09e8e2d6df916dfa9ea86ead8"}, - {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:792e5c12376594bfcb986ebf3855aa4b7c225754e9a9521298e460e92fb4a488"}, - {file = "Pillow-8.4.0-cp37-cp37m-win32.whl", hash = "sha256:d99ec152570e4196772e7a8e4ba5320d2d27bf22fdf11743dd882936ed64305b"}, - {file = "Pillow-8.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7b7017b61bbcdd7f6363aeceb881e23c46583739cb69a3ab39cb384f6ec82e5b"}, - {file = "Pillow-8.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:d89363f02658e253dbd171f7c3716a5d340a24ee82d38aab9183f7fdf0cdca49"}, - {file = "Pillow-8.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a0956fdc5defc34462bb1c765ee88d933239f9a94bc37d132004775241a7585"}, - {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b7bb9de00197fb4261825c15551adf7605cf14a80badf1761d61e59da347779"}, - {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72b9e656e340447f827885b8d7a15fc8c4e68d410dc2297ef6787eec0f0ea409"}, - {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a4532a12314149d8b4e4ad8ff09dde7427731fcfa5917ff16d0291f13609df"}, - {file = "Pillow-8.4.0-cp38-cp38-win32.whl", hash = "sha256:82aafa8d5eb68c8463b6e9baeb4f19043bb31fefc03eb7b216b51e6a9981ae09"}, - {file = "Pillow-8.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:066f3999cb3b070a95c3652712cffa1a748cd02d60ad7b4e485c3748a04d9d76"}, - {file = "Pillow-8.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:5503c86916d27c2e101b7f71c2ae2cddba01a2cf55b8395b0255fd33fa4d1f1a"}, - {file = "Pillow-8.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4acc0985ddf39d1bc969a9220b51d94ed51695d455c228d8ac29fcdb25810e6e"}, - {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b052a619a8bfcf26bd8b3f48f45283f9e977890263e4571f2393ed8898d331b"}, - {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:493cb4e415f44cd601fcec11c99836f707bb714ab03f5ed46ac25713baf0ff20"}, - {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8831cb7332eda5dc89b21a7bce7ef6ad305548820595033a4b03cf3091235ed"}, - {file = "Pillow-8.4.0-cp39-cp39-win32.whl", hash = "sha256:5e9ac5f66616b87d4da618a20ab0a38324dbe88d8a39b55be8964eb520021e02"}, - {file = "Pillow-8.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:3eb1ce5f65908556c2d8685a8f0a6e989d887ec4057326f6c22b24e8a172c66b"}, - {file = "Pillow-8.4.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ddc4d832a0f0b4c52fff973a0d44b6c99839a9d016fe4e6a1cb8f3eea96479c2"}, - {file = "Pillow-8.4.0-pp36-pypy36_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3e5ddc44c14042f0844b8cf7d2cd455f6cc80fd7f5eefbe657292cf601d9ad"}, - {file = "Pillow-8.4.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70e94281588ef053ae8998039610dbd71bc509e4acbc77ab59d7d2937b10698"}, - {file = "Pillow-8.4.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:3862b7256046fcd950618ed22d1d60b842e3a40a48236a5498746f21189afbbc"}, - {file = "Pillow-8.4.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4901622493f88b1a29bd30ec1a2f683782e57c3c16a2dbc7f2595ba01f639df"}, - {file = "Pillow-8.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c471a734240653a0ec91dec0996696eea227eafe72a33bd06c92697728046b"}, - {file = "Pillow-8.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:244cf3b97802c34c41905d22810846802a3329ddcb93ccc432870243211c79fc"}, - {file = "Pillow-8.4.0.tar.gz", hash = "sha256:b8e2f83c56e141920c39464b852de3719dfbfb6e3c99a2d8da0edf4fb33176ed"}, -] -platformdirs = [ - {file = "platformdirs-2.5.3-py3-none-any.whl", hash = "sha256:0cb405749187a194f444c25c82ef7225232f11564721eabffc6ec70df83b11cb"}, - {file = "platformdirs-2.5.3.tar.gz", hash = "sha256:6e52c21afff35cb659c6e52d8b4d61b9bd544557180440538f255d9382c8cbe0"}, -] -protobuf = [ - {file = "protobuf-4.21.9-cp310-abi3-win32.whl", hash = "sha256:6e0be9f09bf9b6cf497b27425487706fa48c6d1632ddd94dab1a5fe11a422392"}, - {file = "protobuf-4.21.9-cp310-abi3-win_amd64.whl", hash = "sha256:a7d0ea43949d45b836234f4ebb5ba0b22e7432d065394b532cdca8f98415e3cf"}, - {file = "protobuf-4.21.9-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b5ab0b8918c136345ff045d4b3d5f719b505b7c8af45092d7f45e304f55e50a1"}, - {file = "protobuf-4.21.9-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:2c9c2ed7466ad565f18668aa4731c535511c5d9a40c6da39524bccf43e441719"}, - {file = "protobuf-4.21.9-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:e575c57dc8b5b2b2caa436c16d44ef6981f2235eb7179bfc847557886376d740"}, - {file = "protobuf-4.21.9-cp37-cp37m-win32.whl", hash = "sha256:9227c14010acd9ae7702d6467b4625b6fe853175a6b150e539b21d2b2f2b409c"}, - {file = "protobuf-4.21.9-cp37-cp37m-win_amd64.whl", hash = "sha256:a419cc95fca8694804709b8c4f2326266d29659b126a93befe210f5bbc772536"}, - {file = "protobuf-4.21.9-cp38-cp38-win32.whl", hash = "sha256:5b0834e61fb38f34ba8840d7dcb2e5a2f03de0c714e0293b3963b79db26de8ce"}, - {file = "protobuf-4.21.9-cp38-cp38-win_amd64.whl", hash = "sha256:84ea107016244dfc1eecae7684f7ce13c788b9a644cd3fca5b77871366556444"}, - {file = "protobuf-4.21.9-cp39-cp39-win32.whl", hash = "sha256:f9eae277dd240ae19bb06ff4e2346e771252b0e619421965504bd1b1bba7c5fa"}, - {file = "protobuf-4.21.9-cp39-cp39-win_amd64.whl", hash = "sha256:6e312e280fbe3c74ea9e080d9e6080b636798b5e3939242298b591064470b06b"}, - {file = "protobuf-4.21.9-py2.py3-none-any.whl", hash = "sha256:7eb8f2cc41a34e9c956c256e3ac766cf4e1a4c9c925dc757a41a01be3e852965"}, - {file = "protobuf-4.21.9-py3-none-any.whl", hash = "sha256:48e2cd6b88c6ed3d5877a3ea40df79d08374088e89bedc32557348848dff250b"}, - {file = "protobuf-4.21.9.tar.gz", hash = "sha256:61f21493d96d2a77f9ca84fefa105872550ab5ef71d21c458eb80edcf4885a99"}, -] -psycopg2-binary = [ - {file = "psycopg2-binary-2.9.5.tar.gz", hash = "sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:0775d6252ccb22b15da3b5d7adbbf8cfe284916b14b6dc0ff503a23edb01ee85"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec46ed947801652c9643e0b1dc334cfb2781232e375ba97312c2fc256597632"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3520d7af1ebc838cc6084a3281145d5cd5bdd43fdef139e6db5af01b92596cb7"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cbc554ba47ecca8cd3396ddaca85e1ecfe3e48dd57dc5e415e59551affe568e"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:5d28ecdf191db558d0c07d0f16524ee9d67896edf2b7990eea800abeb23ebd61"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:b9c33d4aef08dfecbd1736ceab8b7b3c4358bf10a0121483e5cd60d3d308cc64"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:05b3d479425e047c848b9782cd7aac9c6727ce23181eb9647baf64ffdfc3da41"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e491e6489a6cb1d079df8eaa15957c277fdedb102b6a68cfbf40c4994412fd0"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:9e32cedc389bcb76d9f24ea8a012b3cb8385ee362ea437e1d012ffaed106c17d"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:46850a640df62ae940e34a163f72e26aca1f88e2da79148e1862faaac985c302"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-win32.whl", hash = "sha256:3d790f84201c3698d1bfb404c917f36e40531577a6dda02e45ba29b64d539867"}, - {file = "psycopg2_binary-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:1764546ffeaed4f9428707be61d68972eb5ede81239b46a45843e0071104d0dd"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-macosx_10_9_universal2.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:426c2ae999135d64e6a18849a7d1ad0e1bd007277e4a8f4752eaa40a96b550ff"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cf1d44e710ca3a9ce952bda2855830fe9f9017ed6259e01fcd71ea6287565f5"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024030b13bdcbd53d8a93891a2cf07719715724fc9fee40243f3bd78b4264b8f"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcda1c84a1c533c528356da5490d464a139b6e84eb77cc0b432e38c5c6dd7882"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:2ef892cabdccefe577088a79580301f09f2a713eb239f4f9f62b2b29cafb0577"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-manylinux_2_24_ppc64le.whl", hash = "sha256:af0516e1711995cb08dc19bbd05bec7dbdebf4185f68870595156718d237df3e"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e72c91bda9880f097c8aa3601a2c0de6c708763ba8128006151f496ca9065935"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-win32.whl", hash = "sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903"}, - {file = "psycopg2_binary-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:56b2957a145f816726b109ee3d4e6822c23f919a7d91af5a94593723ed667835"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:f95b8aca2703d6a30249f83f4fe6a9abf2e627aa892a5caaab2267d56be7ab69"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:70831e03bd53702c941da1a1ad36c17d825a24fbb26857b40913d58df82ec18b"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:dbc332beaf8492b5731229a881807cd7b91b50dbbbaf7fe2faf46942eda64a24"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:2d964eb24c8b021623df1c93c626671420c6efadbdb8655cb2bd5e0c6fa422ba"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:95076399ec3b27a8f7fa1cc9a83417b1c920d55cf7a97f718a94efbb96c7f503"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:3fc33295cfccad697a97a76dec3f1e94ad848b7b163c3228c1636977966b51e2"}, - {file = "psycopg2_binary-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:02551647542f2bf89073d129c73c05a25c372fc0a49aa50e0de65c3c143d8bd0"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:63e318dbe52709ed10d516a356f22a635e07a2e34c68145484ed96a19b0c4c68"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7e518a0911c50f60313cb9e74a169a65b5d293770db4770ebf004245f24b5c5"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d38a4656e4e715d637abdf7296e98d6267df0cc0a8e9a016f8ba07e4aa3eeb"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:68d81a2fe184030aa0c5c11e518292e15d342a667184d91e30644c9d533e53e1"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:7ee3095d02d6f38bd7d9a5358fcc9ea78fcdb7176921528dd709cc63f40184f5"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:46512486be6fbceef51d7660dec017394ba3e170299d1dc30928cbedebbf103a"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b911dfb727e247340d36ae20c4b9259e4a64013ab9888ccb3cbba69b77fd9636"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:422e3d43b47ac20141bc84b3d342eead8d8099a62881a501e97d15f6addabfe9"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c5682a45df7d9642eff590abc73157c887a68f016df0a8ad722dcc0f888f56d7"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:b8104f709590fff72af801e916817560dbe1698028cd0afe5a52d75ceb1fce5f"}, - {file = "psycopg2_binary-2.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:7b3751857da3e224f5629400736a7b11e940b5da5f95fa631d86219a1beaafec"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:043a9fd45a03858ff72364b4b75090679bd875ee44df9c0613dc862ca6b98460"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9ffdc51001136b699f9563b1c74cc1f8c07f66ef7219beb6417a4c8aaa896c28"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c15ba5982c177bc4b23a7940c7e4394197e2d6a424a2d282e7c236b66da6d896"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc85b3777068ed30aff8242be2813038a929f2084f69e43ef869daddae50f6ee"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:215d6bf7e66732a514f47614f828d8c0aaac9a648c46a831955cb103473c7147"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:7d07f552d1e412f4b4e64ce386d4c777a41da3b33f7098b6219012ba534fb2c2"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a0adef094c49f242122bb145c3c8af442070dc0e4312db17e49058c1702606d4"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:00475004e5ed3e3bf5e056d66e5dcdf41a0dc62efcd57997acd9135c40a08a50"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7d88db096fa19d94f433420eaaf9f3c45382da2dd014b93e4bf3215639047c16"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:902844f9c4fb19b17dfa84d9e2ca053d4a4ba265723d62ea5c9c26b38e0aa1e6"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-win32.whl", hash = "sha256:4e7904d1920c0c89105c0517dc7e3f5c20fb4e56ba9cdef13048db76947f1d79"}, - {file = "psycopg2_binary-2.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:a36a0e791805aa136e9cbd0ffa040d09adec8610453ee8a753f23481a0057af5"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-macosx_10_15_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:25382c7d174c679ce6927c16b6fbb68b10e56ee44b1acb40671e02d29f2fce7c"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9c38d3869238e9d3409239bc05bc27d6b7c99c2a460ea337d2814b35fb4fea1b"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c6527c8efa5226a9e787507652dd5ba97b62d29b53c371a85cd13f957fe4d42"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e59137cdb970249ae60be2a49774c6dfb015bd0403f05af1fe61862e9626642d"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:d4c7b3a31502184e856df1f7bbb2c3735a05a8ce0ade34c5277e1577738a5c91"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:b9a794cef1d9c1772b94a72eec6da144c18e18041d294a9ab47669bc77a80c1d"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5254cbd4f4855e11cebf678c1a848a3042d455a22a4ce61349c36aafd4c2267"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c5e65c6ac0ae4bf5bef1667029f81010b6017795dcb817ba5c7b8a8d61fab76f"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:74eddec4537ab1f701a1647214734bc52cee2794df748f6ae5908e00771f180a"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:01ad49d68dd8c5362e4bfb4158f2896dc6e0c02e87b8a3770fc003459f1a4425"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-win32.whl", hash = "sha256:937880290775033a743f4836aa253087b85e62784b63fd099ee725d567a48aa1"}, - {file = "psycopg2_binary-2.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:484405b883630f3e74ed32041a87456c5e0e63a8e3429aa93e8714c366d62bd1"}, -] -pyasn1 = [ - {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, - {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, - {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, - {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, - {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, - {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, - {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, - {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, - {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, - {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, -] -pyasn1-modules = [ - {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, - {file = "pyasn1_modules-0.2.8-py2.4.egg", hash = "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199"}, - {file = "pyasn1_modules-0.2.8-py2.5.egg", hash = "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"}, - {file = "pyasn1_modules-0.2.8-py2.6.egg", hash = "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb"}, - {file = "pyasn1_modules-0.2.8-py2.7.egg", hash = "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8"}, - {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, - {file = "pyasn1_modules-0.2.8-py3.1.egg", hash = "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d"}, - {file = "pyasn1_modules-0.2.8-py3.2.egg", hash = "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45"}, - {file = "pyasn1_modules-0.2.8-py3.3.egg", hash = "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4"}, - {file = "pyasn1_modules-0.2.8-py3.4.egg", hash = "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811"}, - {file = "pyasn1_modules-0.2.8-py3.5.egg", hash = "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed"}, - {file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"}, - {file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"}, -] -pycodestyle = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -pydocstyle = [ - {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, - {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, -] -pyflakes = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, -] -pygithub = [ - {file = "PyGithub-1.57-py3-none-any.whl", hash = "sha256:5822febeac2391f1306c55a99af2bc8f86c8bf82ded000030cd02c18f31b731f"}, - {file = "PyGithub-1.57.tar.gz", hash = "sha256:c273f252b278fb81f1769505cc6921bdb6791e1cebd6ac850cc97dad13c31ff3"}, -] -pyjwt = [ - {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, - {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, -] -pynacl = [ - {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, - {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] -rcssmin = [ - {file = "rcssmin-1.1.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2211a5c91ea14a5937b57904c9121f8bfef20987825e55368143da7d25446e3b"}, - {file = "rcssmin-1.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7085d1b51dd2556f3aae03947380f6e9e1da29fb1eeadfa6766b7f105c54c9ff"}, - {file = "rcssmin-1.1.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:1512223b6a687bb747e4e531187bd49a56ed71287e7ead9529cbaa1ca4718a0a"}, - {file = "rcssmin-1.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:6158d0d86cd611c5304d738dc3d6cfeb23864dd78ad0d83a633f443696ac5d77"}, - {file = "rcssmin-1.1.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:0a6aae7e119509445bf7aa6da6ca0f285cc198273c20f470ad999ff83bbadcf9"}, - {file = "rcssmin-1.1.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:506e33ab4c47051f7deae35b6d8dbb4a5c025f016e90a830929a1ecc7daa1682"}, - {file = "rcssmin-1.1.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:352dd3a78eb914bb1cb269ac2b66b3154f2490a52ab605558c681de3fb5194d2"}, - {file = "rcssmin-1.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:30f5522285065cae0164d20068377d84b5d10b414156115f8729b034d0ea5e8b"}, - {file = "rcssmin-1.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:49807735f26f59404194f1e6f93254b6d5b6f7748c2a954f4470a86a40ff4c13"}, - {file = "rcssmin-1.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f1a37bbd36b050813673e62ae6464467548628690bf4d48a938170e121e8616e"}, - {file = "rcssmin-1.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ddff3a41611664c7f1d9e3d8a9c1669e0e155ac0458e586ffa834dc5953e7d9f"}, - {file = "rcssmin-1.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8b659a88850e772c84cfac4520ec223de6807875e173d8ef3248ab7f90876066"}, - {file = "rcssmin-1.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:1d7c2719d014e4e4df4e33b75ae8067c7e246cf470eaec8585e06e2efac7586c"}, - {file = "rcssmin-1.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:37f1242e34ca273ed2c26cf778854e18dd11b31c6bfca60e23fce146c84667c1"}, - {file = "rcssmin-1.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f31c82d06ba2dbf33c20db9550157e80bb0c4cbd24575c098f0831d1d2e3c5df"}, - {file = "rcssmin-1.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7da63fee37edf204bbd86785edb4d7491642adbfd1d36fd230b7ccbbd8db1a6f"}, - {file = "rcssmin-1.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c28b9eb20982b45ebe6adef8bd2547e5ed314dafddfff4eba806b0f8c166cfd1"}, - {file = "rcssmin-1.1.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:32ccaebbbd4d56eab08cf26aed36f5d33389b9d1d3ca1fecf53eb6ab77760ddf"}, - {file = "rcssmin-1.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7c44002b79f3656348196005b9522ec5e04f182b466f66d72b16be0bd03c13d8"}, - {file = "rcssmin-1.1.0.tar.gz", hash = "sha256:27fc400627fd3d328b7fe95af2a01f5d0af6b5af39731af5d071826a1f08e362"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -requests-oauthlib = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, -] -responses = [ - {file = "responses-0.22.0-py3-none-any.whl", hash = "sha256:dcf294d204d14c436fddcc74caefdbc5764795a40ff4e6a7740ed8ddbf3294be"}, - {file = "responses-0.22.0.tar.gz", hash = "sha256:396acb2a13d25297789a5866b4881cf4e46ffd49cc26c43ab1117f40b973102e"}, -] -rjsmin = [ - {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e18fe1a610fb105273bb369f61c2b0bd9e66a3f0792e27e4cac44e42ace1968b"}, - {file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:6c395ffc130332cca744f081ed5efd5699038dcb7a5d30c3ff4bc6adb5b30a62"}, - {file = "rjsmin-1.2.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3b14f4c2933ec194eb816b71a0854ce461b6419a3d852bf360344731ab28c0a6"}, - {file = "rjsmin-1.2.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:54fc30519365841b27556ccc1cb94c5b4413c384ff6d467442fddba66e2e325a"}, - {file = "rjsmin-1.2.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:40e7211a25d9a11ac9ff50446e41268c978555676828af86fa1866615823bfff"}, - {file = "rjsmin-1.2.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:99e5597a812b60058baa1457387dc79cca7d273b2a700dc98bfd20d43d60711d"}, - {file = "rjsmin-1.2.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:993935654c1311280e69665367d7e6ff694ac9e1609168cf51cae8c0307df0db"}, - {file = "rjsmin-1.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c81229ffe5b0a0d5b3b5d5e6d0431f182572de9e9a077e85dbae5757db0ab75c"}, - {file = "rjsmin-1.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:1c93b29fd725e61718299ffe57de93ff32d71b313eaabbfcc7bd32ddb82831d5"}, - {file = "rjsmin-1.2.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:38a4474ed52e1575fb9da983ec8657faecd8ab3738508d36e04f87769411fd3d"}, - {file = "rjsmin-1.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1622fbb6c6a8daaf77da13cc83356539bfe79c1440f9664b02c7f7b150b9a18e"}, - {file = "rjsmin-1.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4387a00777faddf853eebdece9f2e56ebaf243c3f24676a9de6a20c5d4f3d731"}, - {file = "rjsmin-1.2.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:86c4da7285ddafe6888cb262da563570f28e4a31146b5164a7a6947b1222196b"}, - {file = "rjsmin-1.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d63e193a2f932a786ae82068aa76d1d126fcdff8582094caff9e5e66c4dcc124"}, - {file = "rjsmin-1.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:05efa485dfddb6418e3b86d8862463aa15641a61f6ae05e7e6de8f116ee77c69"}, - {file = "rjsmin-1.2.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:b6a7c8c8d19e154334f640954e43e57283e87bb4a2f6e23295db14eea8e9fc1d"}, - {file = "rjsmin-1.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2ed83aca637186bafdc894b4b7fc3657e2d74014ccca7d3d69122c1e82675216"}, - {file = "rjsmin-1.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:41c7c3910f7b8816e37366b293e576ddecf696c5f2197d53cf2c1526ac336646"}, - {file = "rjsmin-1.2.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8944a8a55ac825b8e5ec29f341ecb7574697691ef416506885898d2f780fb4ca"}, - {file = "rjsmin-1.2.0.tar.gz", hash = "sha256:6c529feb6c400984452494c52dd9fdf59185afeacca2afc5174a28ab37751a1b"}, -] -rsa = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] -s3transfer = [ - {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, - {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -snowballstemmer = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] -sqlparse = [ - {file = "sqlparse-0.4.3-py3-none-any.whl", hash = "sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34"}, - {file = "sqlparse-0.4.3.tar.gz", hash = "sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268"}, -] -text-unidecode = [ - {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, - {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -types-toml = [ - {file = "types-toml-0.10.8.5.tar.gz", hash = "sha256:bf80fce7d2d74be91148f47b88d9ae5adeb1024abef22aa2fdbabc036d6b8b3c"}, - {file = "types_toml-0.10.8.5-py3-none-any.whl", hash = "sha256:2432017febe43174af0f3c65f03116e3d3cf43e7e1406b8200e106da8cf98992"}, -] -tzdata = [ - {file = "tzdata-2022.6-py2.py3-none-any.whl", hash = "sha256:04a680bdc5b15750c39c12a448885a51134a27ec9af83667663f0b3a1bf3f342"}, - {file = "tzdata-2022.6.tar.gz", hash = "sha256:91f11db4503385928c15598c98573e3af07e7229181bee5375bd30f1695ddcae"}, -] -uritemplate = [ - {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, - {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, -] -urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, -] -uwsgi = [ - {file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"}, -] -werkzeug = [ - {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"}, - {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"}, -] -wrapt = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] -xmltodict = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, -] +content-hash = "ef818f0f3b03b6417ebd2b06dafe0aee16c2e041f24dd93ebd2cdffd61c2bb7a" diff --git a/pyproject.toml b/pyproject.toml index 7c75d701..d0311286 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,8 @@ django-bootstrap5 = "^22.1" django-easy-admin-object-actions = "^1.1.0" boto3 = "^1.26.78" moto = "^4.1.3" +django-tinymce = "^3.4.0" +django-bleach = "^3.0.1" [tool.poetry.extras] production = ["uwsgi", "psycopg2-binary"] diff --git a/website/giphousewebsite/settings/base.py b/website/giphousewebsite/settings/base.py index 99b8748b..15fa74e8 100644 --- a/website/giphousewebsite/settings/base.py +++ b/website/giphousewebsite/settings/base.py @@ -32,6 +32,8 @@ 'admin_auto_filters', 'admin_totals', 'django_easy_admin_object_actions', + 'tinymce', + 'django_bleach', 'questionnaires.apps.QuestionnairesConfig', 'github_oauth.apps.GithubConfig', @@ -146,3 +148,44 @@ "https://www.googleapis.com/auth/admin.directory.group", "https://www.googleapis.com/auth/apps.groups.settings", ] + +TINYMCE_DEFAULT_CONFIG = { + "max_height": 500, + "menubar": False, + "plugins": "autolink autoresize link image code media paste lists", + "toolbar": "h2 h3 | bold italic underline strikethrough | image | link unlink " + "| bullist numlist | undo redo | code", + "contextmenu": "bold italic underline strikethrough | link", + "paste_as_text": True, + "relative_urls": False, + "remove_script_host": False, + "autoresize_bottom_margin": 50, +} + +# HTML input sanitization settings for the bleach template filter +BLEACH_ALLOWED_TAGS = [ + "h2", + "h3", + "p", + "a", + "div", + "strong", + "em", + "i", + "b", + "ul", + "li", + "br", + "ol", + "img", + "span", +] + +BLEACH_ALLOWED_ATTRIBUTES = { + "*": ["class", "style"], + "a": ["href", "rel", "target", "title"], + "img": ["alt", "title", "src"], +} + +BLEACH_STRIP_TAGS = True +BLEACH_STRIP_COMMENTS = False diff --git a/website/giphousewebsite/urls.py b/website/giphousewebsite/urls.py index db11cedd..9c4c683d 100644 --- a/website/giphousewebsite/urls.py +++ b/website/giphousewebsite/urls.py @@ -38,4 +38,5 @@ def get_redirect_url(self, *args, **kwargs): path("projects/", include("projects.urls")), path("reservations/", include("room_reservation.urls")), path("lectures/", include("lecture_registrations.urls")), + path("tinymce/", include("tinymce.urls")), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) diff --git a/website/projects/migrations/0007_alter_project_description.py b/website/projects/migrations/0007_alter_project_description.py new file mode 100644 index 00000000..b816e5f0 --- /dev/null +++ b/website/projects/migrations/0007_alter_project_description.py @@ -0,0 +1,19 @@ +# Generated by Django 4.1.3 on 2023-02-15 20:50 + +from django.db import migrations +import tinymce.models + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0006_alter_project_unique_together_project_slug_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="project", + name="description", + field=tinymce.models.HTMLField(), + ), + ] diff --git a/website/projects/migrations/0017_merge_20230609_1748.py b/website/projects/migrations/0017_merge_20230609_1748.py new file mode 100644 index 00000000..b9b993d9 --- /dev/null +++ b/website/projects/migrations/0017_merge_20230609_1748.py @@ -0,0 +1,13 @@ +# Generated by Django 4.1.3 on 2023-06-09 15:48 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("projects", "0007_alter_project_description"), + ("projects", "0016_awspolicy_base_ou_id"), + ] + + operations = [] diff --git a/website/projects/models.py b/website/projects/models.py index 0c904bc5..11db9f55 100644 --- a/website/projects/models.py +++ b/website/projects/models.py @@ -3,6 +3,8 @@ from django.db.models.signals import pre_delete from django.dispatch import receiver +from tinymce.models import HTMLField + from courses.models import Semester from registrations.models import Employee @@ -68,7 +70,7 @@ class Meta: slug = models.SlugField("slug", max_length=50, blank=False, null=False) semester = models.ForeignKey(Semester, on_delete=models.CASCADE) - description = models.TextField() + description = HTMLField() client = models.ForeignKey(Client, on_delete=models.SET_NULL, blank=True, null=True) comments = models.TextField( diff --git a/website/projects/templates/projects/index.html b/website/projects/templates/projects/index.html index f74185fa..db43e3b2 100644 --- a/website/projects/templates/projects/index.html +++ b/website/projects/templates/projects/index.html @@ -1,4 +1,5 @@ {% extends 'base.html' %} +{% load bleach_tags %} {% block title %}Projects - {{ block.super }}{% endblock %} @@ -14,11 +15,11 @@
    No projects found.
    {% if project.client.logo %}{% endif %}

    {{ project.name }}

    - {% if project.client %}
    By {{ project.client.name}}
    {% endif %} -

    {{ project.description|linebreaks }}

    + {% if project.client %}
    By {{ project.client.name }}
    {% endif %} +

    {{ project.description | bleach }}

    {% endfor %} {% endif %} -{% endblock %} \ No newline at end of file +{% endblock %}