From 9b4f7a3617cdc2dcbec20eb39f63f4732505678d Mon Sep 17 00:00:00 2001
From: Henk Berendsen <61596108+hb140502@users.noreply.github.com>
Date: Fri, 24 Feb 2023 11:01:05 +0100
Subject: [PATCH 01/32] AWS synchronisation button (#8)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Removed unnecessary docstrings.
---
website/projects/admin.py | 8 ++++++++
website/projects/awssync.py | 15 +++++++++++++++
.../templates/admin/projects/change_list.html | 1 +
website/projects/tests/test_admin.py | 6 ++++++
website/projects/tests/test_awssync.py | 14 ++++++++++++++
5 files changed, 44 insertions(+)
create mode 100644 website/projects/awssync.py
create mode 100644 website/projects/tests/test_awssync.py
diff --git a/website/projects/admin.py b/website/projects/admin.py
index 0a39d439..7fae8a6d 100644
--- a/website/projects/admin.py
+++ b/website/projects/admin.py
@@ -12,6 +12,7 @@
from mailing_lists.models import MailingList
+from projects.awssync import AWSSync
from projects.forms import ProjectAdminForm, RepositoryInlineForm
from projects.githubsync import GitHubSync
from projects.models import Client, Project, Repository
@@ -171,6 +172,12 @@ def synchronise_current_projects_to_GitHub(self, request):
],
)
+ def synchronise_to_AWS(self, request):
+ """Synchronise to Amazon Web Services."""
+ sync = AWSSync()
+ sync.button_pressed()
+ return redirect("admin:projects_project_changelist")
+
def get_urls(self):
"""Get admin urls."""
urls = super().get_urls()
@@ -180,6 +187,7 @@ def get_urls(self):
self.admin_site.admin_view(self.synchronise_current_projects_to_GitHub),
name="synchronise_to_github",
),
+ path("sync-to-aws/", self.admin_site.admin_view(self.synchronise_to_AWS), name="synchronise_to_aws"),
]
return custom_urls + urls
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
new file mode 100644
index 00000000..6ebc8e1a
--- /dev/null
+++ b/website/projects/awssync.py
@@ -0,0 +1,15 @@
+class AWSSync:
+ """Synchronise with Amazon Web Services."""
+
+ def __init__(self):
+ """Create an AWSSync instance."""
+ print("Created AWSSync instance")
+
+ def button_pressed(self):
+ """
+ Print debug message to show that the button has been pressed.
+
+ :return: True if function executes successfully
+ """
+ print("Pressed button")
+ return True
diff --git a/website/projects/templates/admin/projects/change_list.html b/website/projects/templates/admin/projects/change_list.html
index 82da6c2e..9e2f1890 100644
--- a/website/projects/templates/admin/projects/change_list.html
+++ b/website/projects/templates/admin/projects/change_list.html
@@ -4,6 +4,7 @@
{% block object-tools-items %}
Synchronize projects of the current semester to GitHub
+ Synchronize projects of the current semester to AWS
{{ block.super }}
{% endblock %}
diff --git a/website/projects/tests/test_admin.py b/website/projects/tests/test_admin.py
index d8778d0c..25f0328a 100644
--- a/website/projects/tests/test_admin.py
+++ b/website/projects/tests/test_admin.py
@@ -84,6 +84,7 @@ def setUp(self):
self.sync_mock.users_removed = 1
self.sync_mock.repos_archived = 1
self.github_mock = MagicMock(return_value=self.sync_mock)
+ self.aws_mock = MagicMock()
messages.error = MagicMock()
messages.warning = MagicMock()
messages.success = MagicMock()
@@ -233,6 +234,11 @@ def test_synchronise_current_projects_to_GitHub(self):
self.assertNotIn(self.project_archived, args[1])
self.project_admin.synchronise_to_GitHub = original_sync_action
+ def test_synchronise_to_AWS(self):
+ with patch("projects.admin.AWSSync", self.aws_mock):
+ self.project_admin.synchronise_to_AWS(self.request)
+ self.aws_mock.assert_called_once()
+
def test_archive_all_repositories(self):
self.project_admin.archive_all_repositories(self.request, Project.objects.all())
self.repo1.refresh_from_db()
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
new file mode 100644
index 00000000..7983dc09
--- /dev/null
+++ b/website/projects/tests/test_awssync.py
@@ -0,0 +1,14 @@
+from django.test import TestCase
+
+from projects import awssync
+
+
+class AWSSyncTest(TestCase):
+ """Test AWSSync class."""
+
+ def setUp(self):
+ self.sync = awssync.AWSSync()
+
+ def test_button_pressed(self):
+ return_value = self.sync.button_pressed()
+ self.assertTrue(return_value)
From 2d4b50f78a77f7e1b44d3534be8fc46dd5c4350c Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Fri, 24 Feb 2023 12:07:16 +0100
Subject: [PATCH 02/32] Add boto3 and moto dependencies (#11)
---
poetry.lock | 296 +++++++++++++++++++++++++++++++++++++++++++++----
pyproject.toml | 2 +
2 files changed, 278 insertions(+), 20 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 6381833a..9e1c0056 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -49,6 +49,38 @@ d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "boto3"
+version = "1.26.78"
+description = "The AWS SDK for Python"
+category = "main"
+optional = false
+python-versions = ">= 3.7"
+
+[package.dependencies]
+botocore = ">=1.29.78,<1.30.0"
+jmespath = ">=0.7.1,<2.0.0"
+s3transfer = ">=0.6.0,<0.7.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
+
+[[package]]
+name = "botocore"
+version = "1.29.78"
+description = "Low-level, data-driven core of boto 3."
+category = "main"
+optional = false
+python-versions = ">= 3.7"
+
+[package.dependencies]
+jmespath = ">=0.7.1,<2.0.0"
+python-dateutil = ">=2.1,<3.0.0"
+urllib3 = ">=1.25.4,<1.27"
+
+[package.extras]
+crt = ["awscrt (==0.16.9)"]
+
[[package]]
name = "cachetools"
version = "5.2.0"
@@ -85,7 +117,7 @@ optional = false
python-versions = ">=3.6.0"
[package.extras]
-unicode-backport = ["unicodedata2"]
+unicode_backport = ["unicodedata2"]
[[package]]
name = "click"
@@ -151,7 +183,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
wrapt = ">=1.10,<2"
[package.extras]
-dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"]
+dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"]
[[package]]
name = "django"
@@ -280,7 +312,6 @@ python-versions = "*"
[package.dependencies]
pycodestyle = "*"
-setuptools = "*"
[[package]]
name = "freezegun"
@@ -343,7 +374,7 @@ six = ">=1.9.0"
[package.extras]
aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"]
-enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
+enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
pyopenssl = ["pyopenssl (>=20.0.0)"]
reauth = ["pyu2f (>=0.1.5)"]
@@ -408,6 +439,28 @@ category = "main"
optional = false
python-versions = ">=3.5"
+[[package]]
+name = "jinja2"
+version = "3.1.2"
+description = "A very fast and expressive template engine."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "jmespath"
+version = "1.0.1"
+description = "JSON Matching Expressions"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
[[package]]
name = "libsass"
version = "0.21.0"
@@ -419,6 +472,14 @@ python-versions = "*"
[package.dependencies]
six = "*"
+[[package]]
+name = "markupsafe"
+version = "2.1.2"
+description = "Safely add untrusted strings to HTML/XML markup."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
[[package]]
name = "mccabe"
version = "0.7.0"
@@ -427,6 +488,49 @@ category = "dev"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "moto"
+version = "4.1.3"
+description = ""
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+boto3 = ">=1.9.201"
+botocore = ">=1.12.201"
+cryptography = ">=3.3.1"
+Jinja2 = ">=2.10.1"
+python-dateutil = ">=2.1,<3.0.0"
+requests = ">=2.5"
+responses = ">=0.13.0"
+werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1"
+xmltodict = "*"
+
+[package.extras]
+all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
+apigatewayv2 = ["PyYAML (>=5.1)"]
+appsync = ["graphql-core"]
+awslambda = ["docker (>=2.5.1)"]
+batch = ["docker (>=2.5.1)"]
+cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
+ds = ["sshpubkeys (>=3.1.0)"]
+dynamodb = ["docker (>=2.5.1)"]
+dynamodbstreams = ["docker (>=2.5.1)"]
+ebs = ["sshpubkeys (>=3.1.0)"]
+ec2 = ["sshpubkeys (>=3.1.0)"]
+efs = ["sshpubkeys (>=3.1.0)"]
+eks = ["sshpubkeys (>=3.1.0)"]
+glue = ["pyparsing (>=3.0.7)"]
+iotdata = ["jsondiff (>=1.1.2)"]
+route53resolver = ["sshpubkeys (>=3.1.0)"]
+s3 = ["PyYAML (>=5.1)"]
+server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+ssm = ["PyYAML (>=5.1)"]
+xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"]
+
[[package]]
name = "mypy-extensions"
version = "0.4.3"
@@ -613,7 +717,7 @@ python-versions = ">=3.6"
cffi = ">=1.4.1"
[package.extras]
-docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"]
+docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
[[package]]
@@ -631,7 +735,7 @@ diagrams = ["jinja2", "railroad-diagrams"]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
-category = "dev"
+category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
@@ -662,7 +766,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
-use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-oauthlib"
@@ -679,6 +783,23 @@ requests = ">=2.0.0"
[package.extras]
rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
+[[package]]
+name = "responses"
+version = "0.22.0"
+description = "A utility library for mocking out the `requests` Python library."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+requests = ">=2.22.0,<3.0"
+toml = "*"
+types-toml = "*"
+urllib3 = ">=1.25.10"
+
+[package.extras]
+tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "types-requests"]
+
[[package]]
name = "rjsmin"
version = "1.2.0"
@@ -699,17 +820,18 @@ python-versions = ">=3.6,<4"
pyasn1 = ">=0.1.3"
[[package]]
-name = "setuptools"
-version = "65.5.1"
-description = "Easily download, build, install, upgrade, and uninstall Python packages"
-category = "dev"
+name = "s3transfer"
+version = "0.6.0"
+description = "An Amazon S3 Transfer Manager"
+category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">= 3.7"
+
+[package.dependencies]
+botocore = ">=1.12.36,<2.0a.0"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
-testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
[[package]]
name = "six"
@@ -743,6 +865,14 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "toml"
+version = "0.10.2"
+description = "Python Library for Tom's Obvious, Minimal Language"
+category = "main"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+
[[package]]
name = "tomli"
version = "2.0.1"
@@ -751,6 +881,14 @@ category = "dev"
optional = false
python-versions = ">=3.7"
+[[package]]
+name = "types-toml"
+version = "0.10.8.5"
+description = "Typing stubs for toml"
+category = "main"
+optional = false
+python-versions = "*"
+
[[package]]
name = "tzdata"
version = "2022.6"
@@ -788,6 +926,20 @@ category = "main"
optional = true
python-versions = "*"
+[[package]]
+name = "werkzeug"
+version = "2.2.3"
+description = "The comprehensive WSGI web application library."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog"]
+
[[package]]
name = "wrapt"
version = "1.14.1"
@@ -796,13 +948,21 @@ category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+[[package]]
+name = "xmltodict"
+version = "0.13.0"
+description = "Makes working with XML feel like you are working with JSON"
+category = "main"
+optional = false
+python-versions = ">=3.4"
+
[extras]
-production = ["uwsgi", "uWSGI", "psycopg2-binary"]
+production = ["uwsgi", "psycopg2-binary"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
-content-hash = "cb906894eb8ca0d6f28ab7ea2ca335aff0ff3f43f32e0cabc9ab323703715abf"
+content-hash = "d812c41bd73a271e800f7a4969553f2b8b5a748e8d2f435c8ef5b1d953451f72"
[metadata.files]
absl-py = [
@@ -841,6 +1001,14 @@ black = [
{file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
{file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
]
+boto3 = [
+ {file = "boto3-1.26.78-py3-none-any.whl", hash = "sha256:0c593017fa49dbc34dcdbd5659208f2daf293a499d5f4d7e61978cd6b5d72a97"},
+ {file = "boto3-1.26.78.tar.gz", hash = "sha256:488bf63d65864ab7fcdf9337c5aa4d825d444e253738a60f80789916bacc47dc"},
+]
+botocore = [
+ {file = "botocore-1.29.78-py3-none-any.whl", hash = "sha256:656ac8822a1b6c887a8efe1172bcefa9c9c450face26dc39998a249e8c340a23"},
+ {file = "botocore-1.29.78.tar.gz", hash = "sha256:2bee6ed037590ef1e4884d944486232871513915f12a8590c63e3bb6046479bf"},
+]
cachetools = [
{file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"},
{file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"},
@@ -1084,6 +1252,14 @@ idna = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
+jinja2 = [
+ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
+ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+]
+jmespath = [
+ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
+ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
+]
libsass = [
{file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"},
{file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"},
@@ -1096,10 +1272,66 @@ libsass = [
{file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"},
{file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"},
]
+markupsafe = [
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
+ {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
+]
mccabe = [
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
+moto = [
+ {file = "moto-4.1.3-py2.py3-none-any.whl", hash = "sha256:dcd1d06662982cf3c94f36d6348251ccdcf62a1c5de5650425cb4e6f260ae7a0"},
+ {file = "moto-4.1.3.tar.gz", hash = "sha256:c8200ccaa9440c2e9daa0bd5e0bd768a719db5a2c82ea8d782f0e3fa09a3c5e2"},
+]
mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
@@ -1226,7 +1458,9 @@ protobuf = [
{file = "protobuf-4.21.9-cp38-cp38-win_amd64.whl", hash = "sha256:84ea107016244dfc1eecae7684f7ce13c788b9a644cd3fca5b77871366556444"},
{file = "protobuf-4.21.9-cp39-cp39-win32.whl", hash = "sha256:f9eae277dd240ae19bb06ff4e2346e771252b0e619421965504bd1b1bba7c5fa"},
{file = "protobuf-4.21.9-cp39-cp39-win_amd64.whl", hash = "sha256:6e312e280fbe3c74ea9e080d9e6080b636798b5e3939242298b591064470b06b"},
+ {file = "protobuf-4.21.9-py2.py3-none-any.whl", hash = "sha256:7eb8f2cc41a34e9c956c256e3ac766cf4e1a4c9c925dc757a41a01be3e852965"},
{file = "protobuf-4.21.9-py3-none-any.whl", hash = "sha256:48e2cd6b88c6ed3d5877a3ea40df79d08374088e89bedc32557348848dff250b"},
+ {file = "protobuf-4.21.9.tar.gz", hash = "sha256:61f21493d96d2a77f9ca84fefa105872550ab5ef71d21c458eb80edcf4885a99"},
]
psycopg2-binary = [
{file = "psycopg2-binary-2.9.5.tar.gz", hash = "sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c"},
@@ -1252,6 +1486,8 @@ psycopg2-binary = [
{file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b"},
{file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32"},
{file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-win32.whl", hash = "sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720"},
{file = "psycopg2_binary-2.9.5-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60"},
{file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd"},
{file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8"},
@@ -1403,6 +1639,10 @@ requests-oauthlib = [
{file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"},
{file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"},
]
+responses = [
+ {file = "responses-0.22.0-py3-none-any.whl", hash = "sha256:dcf294d204d14c436fddcc74caefdbc5764795a40ff4e6a7740ed8ddbf3294be"},
+ {file = "responses-0.22.0.tar.gz", hash = "sha256:396acb2a13d25297789a5866b4881cf4e46ffd49cc26c43ab1117f40b973102e"},
+]
rjsmin = [
{file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e18fe1a610fb105273bb369f61c2b0bd9e66a3f0792e27e4cac44e42ace1968b"},
{file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:6c395ffc130332cca744f081ed5efd5699038dcb7a5d30c3ff4bc6adb5b30a62"},
@@ -1429,9 +1669,9 @@ rsa = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
]
-setuptools = [
- {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"},
- {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"},
+s3transfer = [
+ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
+ {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
@@ -1449,10 +1689,18 @@ text-unidecode = [
{file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"},
{file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"},
]
+toml = [
+ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
+ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
+]
tomli = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
+types-toml = [
+ {file = "types-toml-0.10.8.5.tar.gz", hash = "sha256:bf80fce7d2d74be91148f47b88d9ae5adeb1024abef22aa2fdbabc036d6b8b3c"},
+ {file = "types_toml-0.10.8.5-py3-none-any.whl", hash = "sha256:2432017febe43174af0f3c65f03116e3d3cf43e7e1406b8200e106da8cf98992"},
+]
tzdata = [
{file = "tzdata-2022.6-py2.py3-none-any.whl", hash = "sha256:04a680bdc5b15750c39c12a448885a51134a27ec9af83667663f0b3a1bf3f342"},
{file = "tzdata-2022.6.tar.gz", hash = "sha256:91f11db4503385928c15598c98573e3af07e7229181bee5375bd30f1695ddcae"},
@@ -1468,6 +1716,10 @@ urllib3 = [
uwsgi = [
{file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"},
]
+werkzeug = [
+ {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"},
+ {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"},
+]
wrapt = [
{file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},
@@ -1534,3 +1786,7 @@ wrapt = [
{file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
{file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
]
+xmltodict = [
+ {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"},
+ {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"},
+]
diff --git a/pyproject.toml b/pyproject.toml
index ca487767..7c75d701 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -30,6 +30,8 @@ uWSGI = {version = "^2.0.19", optional = true}
admin-totals = "^1.0.1"
django-bootstrap5 = "^22.1"
django-easy-admin-object-actions = "^1.1.0"
+boto3 = "^1.26.78"
+moto = "^4.1.3"
[tool.poetry.extras]
production = ["uwsgi", "psycopg2-binary"]
From a23790698aaee29faedd10c9c8bb3511b8e59a24 Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Sun, 26 Feb 2023 12:23:23 +0100
Subject: [PATCH 03/32] Add logger and replace prints with logs
---
website/projects/awssync.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 6ebc8e1a..c7f2dd14 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -1,9 +1,12 @@
+import logging
+
class AWSSync:
"""Synchronise with Amazon Web Services."""
def __init__(self):
"""Create an AWSSync instance."""
- print("Created AWSSync instance")
+ self.logger = logging.getLogger("django.aws")
+ self.logger.info("Created AWSSync instance.")
def button_pressed(self):
"""
@@ -11,5 +14,5 @@ def button_pressed(self):
:return: True if function executes successfully
"""
- print("Pressed button")
+ self.logger.info("Pressed button")
return True
From 02c2243d35a7e7cbf8be2d0633dde427011525dd Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Sun, 26 Feb 2023 12:32:14 +0100
Subject: [PATCH 04/32] Add function to create AWS organization
---
website/projects/awssync.py | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index c7f2dd14..36722c28 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -1,11 +1,18 @@
import logging
+import boto3
+
+from botocore.exceptions import ClientError
+
+
class AWSSync:
"""Synchronise with Amazon Web Services."""
def __init__(self):
"""Create an AWSSync instance."""
self.logger = logging.getLogger("django.aws")
+ self.org_info = None
+ self.fail = False
self.logger.info("Created AWSSync instance.")
def button_pressed(self):
@@ -16,3 +23,16 @@ def button_pressed(self):
"""
self.logger.info("Pressed button")
return True
+
+ def create_aws_organization(self):
+ """Create an AWS organization with the current user as the management account."""
+ client = boto3.client("organizations")
+ try:
+ response = client.create_organization(FeatureSet="ALL")
+ self.org_info = response["Organization"]
+ self.logger.info("Created an AWS organization and saved organization info.")
+ except ClientError as error:
+ self.fail = True
+ self.logger.error("Something went wrong creating an AWS organization.")
+ self.logger.debug(f"{error}")
+ self.logger.debug(f"{error.response}")
From 6b55b191808a418e7014474b4e6164d532d4348f Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Sun, 26 Feb 2023 12:54:36 +0100
Subject: [PATCH 05/32] Add unit tests for creating AWS organization
---
website/projects/tests/test_awssync.py | 48 ++++++++++++++++++++++++++
1 file changed, 48 insertions(+)
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index 7983dc09..6822fc14 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -1,5 +1,13 @@
+from unittest.mock import patch
+
+import boto3
+
+from botocore.exceptions import ClientError
+
from django.test import TestCase
+from moto import mock_organizations
+
from projects import awssync
@@ -12,3 +20,43 @@ def setUp(self):
def test_button_pressed(self):
return_value = self.sync.button_pressed()
self.assertTrue(return_value)
+
+ def mock_api(self, operation_name, kwarg):
+ if operation_name == "CreateOrganization":
+ raise ClientError(
+ {
+ "Error": {
+ "Message": "The AWS account is already a member of an organization.",
+ "Code": "AlreadyInOrganizationException",
+ },
+ "ResponseMetadata": {
+ "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "HTTPStatusCode": 400,
+ "HTTPHeaders": {
+ "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "content-type": "application/x-amz-json-1.1",
+ "content-length": "111",
+ "date": "Sun, 01 Jan 2023 00:00:00 GMT",
+ "connection": "close",
+ },
+ "RetryAttempts": 0,
+ },
+ "Message": "The AWS account is already a member of an organization.",
+ },
+ "create_organization",
+ )
+
+ @mock_organizations
+ def test_create_aws_organization(self):
+ moto_client = boto3.client("organizations")
+ org = self.sync
+ org.create_aws_organization()
+ describe_org = moto_client.describe_organization()["Organization"]
+ self.assertEqual(describe_org, org.org_info)
+
+ @patch("botocore.client.BaseClient._make_api_call", mock_api)
+ def test_create_aws_organization__exception(self):
+ org = self.sync
+ org.create_aws_organization()
+ self.assertTrue(org.fail)
+ self.assertIsNone(org.org_info)
From 2bb9d9fcb55487f413b2a791a4da1e7ea97617f0 Mon Sep 17 00:00:00 2001
From: Henk Berendsen <61596108+hb140502@users.noreply.github.com>
Date: Fri, 3 Mar 2023 10:29:03 +0100
Subject: [PATCH 06/32] Deliverable sprint 1 (#19)
* AWS synchronisation button (#8)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Removed unnecessary docstrings.
* Add boto3 and moto dependencies (#11)
* Add logger and replace prints with logs
* Add function to create AWS organization
* Add unit tests for creating AWS organization
* bugfix (#619)
Co-authored-by: nvoers
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com>
Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com>
Co-authored-by: nvoers
---
poetry.lock | 296 ++++++++++++++++--
pyproject.toml | 2 +
website/projects/admin.py | 8 +
website/projects/awssync.py | 38 +++
.../templates/admin/projects/change_list.html | 1 +
website/projects/tests/test_admin.py | 6 +
website/projects/tests/test_awssync.py | 62 ++++
website/room_reservation/views.py | 2 +-
8 files changed, 394 insertions(+), 21 deletions(-)
create mode 100644 website/projects/awssync.py
create mode 100644 website/projects/tests/test_awssync.py
diff --git a/poetry.lock b/poetry.lock
index 6381833a..9e1c0056 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -49,6 +49,38 @@ d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "boto3"
+version = "1.26.78"
+description = "The AWS SDK for Python"
+category = "main"
+optional = false
+python-versions = ">= 3.7"
+
+[package.dependencies]
+botocore = ">=1.29.78,<1.30.0"
+jmespath = ">=0.7.1,<2.0.0"
+s3transfer = ">=0.6.0,<0.7.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
+
+[[package]]
+name = "botocore"
+version = "1.29.78"
+description = "Low-level, data-driven core of boto 3."
+category = "main"
+optional = false
+python-versions = ">= 3.7"
+
+[package.dependencies]
+jmespath = ">=0.7.1,<2.0.0"
+python-dateutil = ">=2.1,<3.0.0"
+urllib3 = ">=1.25.4,<1.27"
+
+[package.extras]
+crt = ["awscrt (==0.16.9)"]
+
[[package]]
name = "cachetools"
version = "5.2.0"
@@ -85,7 +117,7 @@ optional = false
python-versions = ">=3.6.0"
[package.extras]
-unicode-backport = ["unicodedata2"]
+unicode_backport = ["unicodedata2"]
[[package]]
name = "click"
@@ -151,7 +183,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
wrapt = ">=1.10,<2"
[package.extras]
-dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"]
+dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"]
[[package]]
name = "django"
@@ -280,7 +312,6 @@ python-versions = "*"
[package.dependencies]
pycodestyle = "*"
-setuptools = "*"
[[package]]
name = "freezegun"
@@ -343,7 +374,7 @@ six = ">=1.9.0"
[package.extras]
aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"]
-enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
+enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
pyopenssl = ["pyopenssl (>=20.0.0)"]
reauth = ["pyu2f (>=0.1.5)"]
@@ -408,6 +439,28 @@ category = "main"
optional = false
python-versions = ">=3.5"
+[[package]]
+name = "jinja2"
+version = "3.1.2"
+description = "A very fast and expressive template engine."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "jmespath"
+version = "1.0.1"
+description = "JSON Matching Expressions"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
[[package]]
name = "libsass"
version = "0.21.0"
@@ -419,6 +472,14 @@ python-versions = "*"
[package.dependencies]
six = "*"
+[[package]]
+name = "markupsafe"
+version = "2.1.2"
+description = "Safely add untrusted strings to HTML/XML markup."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
[[package]]
name = "mccabe"
version = "0.7.0"
@@ -427,6 +488,49 @@ category = "dev"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "moto"
+version = "4.1.3"
+description = ""
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+boto3 = ">=1.9.201"
+botocore = ">=1.12.201"
+cryptography = ">=3.3.1"
+Jinja2 = ">=2.10.1"
+python-dateutil = ">=2.1,<3.0.0"
+requests = ">=2.5"
+responses = ">=0.13.0"
+werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1"
+xmltodict = "*"
+
+[package.extras]
+all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
+apigatewayv2 = ["PyYAML (>=5.1)"]
+appsync = ["graphql-core"]
+awslambda = ["docker (>=2.5.1)"]
+batch = ["docker (>=2.5.1)"]
+cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
+ds = ["sshpubkeys (>=3.1.0)"]
+dynamodb = ["docker (>=2.5.1)"]
+dynamodbstreams = ["docker (>=2.5.1)"]
+ebs = ["sshpubkeys (>=3.1.0)"]
+ec2 = ["sshpubkeys (>=3.1.0)"]
+efs = ["sshpubkeys (>=3.1.0)"]
+eks = ["sshpubkeys (>=3.1.0)"]
+glue = ["pyparsing (>=3.0.7)"]
+iotdata = ["jsondiff (>=1.1.2)"]
+route53resolver = ["sshpubkeys (>=3.1.0)"]
+s3 = ["PyYAML (>=5.1)"]
+server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
+ssm = ["PyYAML (>=5.1)"]
+xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"]
+
[[package]]
name = "mypy-extensions"
version = "0.4.3"
@@ -613,7 +717,7 @@ python-versions = ">=3.6"
cffi = ">=1.4.1"
[package.extras]
-docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"]
+docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
[[package]]
@@ -631,7 +735,7 @@ diagrams = ["jinja2", "railroad-diagrams"]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
-category = "dev"
+category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
@@ -662,7 +766,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
-use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-oauthlib"
@@ -679,6 +783,23 @@ requests = ">=2.0.0"
[package.extras]
rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
+[[package]]
+name = "responses"
+version = "0.22.0"
+description = "A utility library for mocking out the `requests` Python library."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+requests = ">=2.22.0,<3.0"
+toml = "*"
+types-toml = "*"
+urllib3 = ">=1.25.10"
+
+[package.extras]
+tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "types-requests"]
+
[[package]]
name = "rjsmin"
version = "1.2.0"
@@ -699,17 +820,18 @@ python-versions = ">=3.6,<4"
pyasn1 = ">=0.1.3"
[[package]]
-name = "setuptools"
-version = "65.5.1"
-description = "Easily download, build, install, upgrade, and uninstall Python packages"
-category = "dev"
+name = "s3transfer"
+version = "0.6.0"
+description = "An Amazon S3 Transfer Manager"
+category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">= 3.7"
+
+[package.dependencies]
+botocore = ">=1.12.36,<2.0a.0"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
-testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
[[package]]
name = "six"
@@ -743,6 +865,14 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "toml"
+version = "0.10.2"
+description = "Python Library for Tom's Obvious, Minimal Language"
+category = "main"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+
[[package]]
name = "tomli"
version = "2.0.1"
@@ -751,6 +881,14 @@ category = "dev"
optional = false
python-versions = ">=3.7"
+[[package]]
+name = "types-toml"
+version = "0.10.8.5"
+description = "Typing stubs for toml"
+category = "main"
+optional = false
+python-versions = "*"
+
[[package]]
name = "tzdata"
version = "2022.6"
@@ -788,6 +926,20 @@ category = "main"
optional = true
python-versions = "*"
+[[package]]
+name = "werkzeug"
+version = "2.2.3"
+description = "The comprehensive WSGI web application library."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog"]
+
[[package]]
name = "wrapt"
version = "1.14.1"
@@ -796,13 +948,21 @@ category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+[[package]]
+name = "xmltodict"
+version = "0.13.0"
+description = "Makes working with XML feel like you are working with JSON"
+category = "main"
+optional = false
+python-versions = ">=3.4"
+
[extras]
-production = ["uwsgi", "uWSGI", "psycopg2-binary"]
+production = ["uwsgi", "psycopg2-binary"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
-content-hash = "cb906894eb8ca0d6f28ab7ea2ca335aff0ff3f43f32e0cabc9ab323703715abf"
+content-hash = "d812c41bd73a271e800f7a4969553f2b8b5a748e8d2f435c8ef5b1d953451f72"
[metadata.files]
absl-py = [
@@ -841,6 +1001,14 @@ black = [
{file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
{file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
]
+boto3 = [
+ {file = "boto3-1.26.78-py3-none-any.whl", hash = "sha256:0c593017fa49dbc34dcdbd5659208f2daf293a499d5f4d7e61978cd6b5d72a97"},
+ {file = "boto3-1.26.78.tar.gz", hash = "sha256:488bf63d65864ab7fcdf9337c5aa4d825d444e253738a60f80789916bacc47dc"},
+]
+botocore = [
+ {file = "botocore-1.29.78-py3-none-any.whl", hash = "sha256:656ac8822a1b6c887a8efe1172bcefa9c9c450face26dc39998a249e8c340a23"},
+ {file = "botocore-1.29.78.tar.gz", hash = "sha256:2bee6ed037590ef1e4884d944486232871513915f12a8590c63e3bb6046479bf"},
+]
cachetools = [
{file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"},
{file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"},
@@ -1084,6 +1252,14 @@ idna = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
+jinja2 = [
+ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
+ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+]
+jmespath = [
+ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
+ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
+]
libsass = [
{file = "libsass-0.21.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:06c8776417fe930714bdc930a3d7e795ae3d72be6ac883ff72a1b8f7c49e5ffb"},
{file = "libsass-0.21.0-cp27-cp27m-win32.whl", hash = "sha256:a005f298f64624f313a3ac618ab03f844c71d84ae4f4a4aec4b68d2a4ffe75eb"},
@@ -1096,10 +1272,66 @@ libsass = [
{file = "libsass-0.21.0-cp38-abi3-macosx_12_0_arm64.whl", hash = "sha256:c9ec490609752c1d81ff6290da33485aa7cb6d7365ac665b74464c1b7d97f7da"},
{file = "libsass-0.21.0.tar.gz", hash = "sha256:d5ba529d9ce668be9380563279f3ffe988f27bc5b299c5a28453df2e0b0fbaf2"},
]
+markupsafe = [
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
+ {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
+]
mccabe = [
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
+moto = [
+ {file = "moto-4.1.3-py2.py3-none-any.whl", hash = "sha256:dcd1d06662982cf3c94f36d6348251ccdcf62a1c5de5650425cb4e6f260ae7a0"},
+ {file = "moto-4.1.3.tar.gz", hash = "sha256:c8200ccaa9440c2e9daa0bd5e0bd768a719db5a2c82ea8d782f0e3fa09a3c5e2"},
+]
mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
@@ -1226,7 +1458,9 @@ protobuf = [
{file = "protobuf-4.21.9-cp38-cp38-win_amd64.whl", hash = "sha256:84ea107016244dfc1eecae7684f7ce13c788b9a644cd3fca5b77871366556444"},
{file = "protobuf-4.21.9-cp39-cp39-win32.whl", hash = "sha256:f9eae277dd240ae19bb06ff4e2346e771252b0e619421965504bd1b1bba7c5fa"},
{file = "protobuf-4.21.9-cp39-cp39-win_amd64.whl", hash = "sha256:6e312e280fbe3c74ea9e080d9e6080b636798b5e3939242298b591064470b06b"},
+ {file = "protobuf-4.21.9-py2.py3-none-any.whl", hash = "sha256:7eb8f2cc41a34e9c956c256e3ac766cf4e1a4c9c925dc757a41a01be3e852965"},
{file = "protobuf-4.21.9-py3-none-any.whl", hash = "sha256:48e2cd6b88c6ed3d5877a3ea40df79d08374088e89bedc32557348848dff250b"},
+ {file = "protobuf-4.21.9.tar.gz", hash = "sha256:61f21493d96d2a77f9ca84fefa105872550ab5ef71d21c458eb80edcf4885a99"},
]
psycopg2-binary = [
{file = "psycopg2-binary-2.9.5.tar.gz", hash = "sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c"},
@@ -1252,6 +1486,8 @@ psycopg2-binary = [
{file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b"},
{file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32"},
{file = "psycopg2_binary-2.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-win32.whl", hash = "sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903"},
+ {file = "psycopg2_binary-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720"},
{file = "psycopg2_binary-2.9.5-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60"},
{file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd"},
{file = "psycopg2_binary-2.9.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8"},
@@ -1403,6 +1639,10 @@ requests-oauthlib = [
{file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"},
{file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"},
]
+responses = [
+ {file = "responses-0.22.0-py3-none-any.whl", hash = "sha256:dcf294d204d14c436fddcc74caefdbc5764795a40ff4e6a7740ed8ddbf3294be"},
+ {file = "responses-0.22.0.tar.gz", hash = "sha256:396acb2a13d25297789a5866b4881cf4e46ffd49cc26c43ab1117f40b973102e"},
+]
rjsmin = [
{file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e18fe1a610fb105273bb369f61c2b0bd9e66a3f0792e27e4cac44e42ace1968b"},
{file = "rjsmin-1.2.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:6c395ffc130332cca744f081ed5efd5699038dcb7a5d30c3ff4bc6adb5b30a62"},
@@ -1429,9 +1669,9 @@ rsa = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
]
-setuptools = [
- {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"},
- {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"},
+s3transfer = [
+ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
+ {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
@@ -1449,10 +1689,18 @@ text-unidecode = [
{file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"},
{file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"},
]
+toml = [
+ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
+ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
+]
tomli = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
+types-toml = [
+ {file = "types-toml-0.10.8.5.tar.gz", hash = "sha256:bf80fce7d2d74be91148f47b88d9ae5adeb1024abef22aa2fdbabc036d6b8b3c"},
+ {file = "types_toml-0.10.8.5-py3-none-any.whl", hash = "sha256:2432017febe43174af0f3c65f03116e3d3cf43e7e1406b8200e106da8cf98992"},
+]
tzdata = [
{file = "tzdata-2022.6-py2.py3-none-any.whl", hash = "sha256:04a680bdc5b15750c39c12a448885a51134a27ec9af83667663f0b3a1bf3f342"},
{file = "tzdata-2022.6.tar.gz", hash = "sha256:91f11db4503385928c15598c98573e3af07e7229181bee5375bd30f1695ddcae"},
@@ -1468,6 +1716,10 @@ urllib3 = [
uwsgi = [
{file = "uwsgi-2.0.21.tar.gz", hash = "sha256:35a30d83791329429bc04fe44183ce4ab512fcf6968070a7bfba42fc5a0552a9"},
]
+werkzeug = [
+ {file = "Werkzeug-2.2.3-py3-none-any.whl", hash = "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"},
+ {file = "Werkzeug-2.2.3.tar.gz", hash = "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe"},
+]
wrapt = [
{file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},
@@ -1534,3 +1786,7 @@ wrapt = [
{file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
{file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
]
+xmltodict = [
+ {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"},
+ {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"},
+]
diff --git a/pyproject.toml b/pyproject.toml
index ca487767..7c75d701 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -30,6 +30,8 @@ uWSGI = {version = "^2.0.19", optional = true}
admin-totals = "^1.0.1"
django-bootstrap5 = "^22.1"
django-easy-admin-object-actions = "^1.1.0"
+boto3 = "^1.26.78"
+moto = "^4.1.3"
[tool.poetry.extras]
production = ["uwsgi", "psycopg2-binary"]
diff --git a/website/projects/admin.py b/website/projects/admin.py
index 0a39d439..7fae8a6d 100644
--- a/website/projects/admin.py
+++ b/website/projects/admin.py
@@ -12,6 +12,7 @@
from mailing_lists.models import MailingList
+from projects.awssync import AWSSync
from projects.forms import ProjectAdminForm, RepositoryInlineForm
from projects.githubsync import GitHubSync
from projects.models import Client, Project, Repository
@@ -171,6 +172,12 @@ def synchronise_current_projects_to_GitHub(self, request):
],
)
+ def synchronise_to_AWS(self, request):
+ """Synchronise to Amazon Web Services."""
+ sync = AWSSync()
+ sync.button_pressed()
+ return redirect("admin:projects_project_changelist")
+
def get_urls(self):
"""Get admin urls."""
urls = super().get_urls()
@@ -180,6 +187,7 @@ def get_urls(self):
self.admin_site.admin_view(self.synchronise_current_projects_to_GitHub),
name="synchronise_to_github",
),
+ path("sync-to-aws/", self.admin_site.admin_view(self.synchronise_to_AWS), name="synchronise_to_aws"),
]
return custom_urls + urls
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
new file mode 100644
index 00000000..36722c28
--- /dev/null
+++ b/website/projects/awssync.py
@@ -0,0 +1,38 @@
+import logging
+
+import boto3
+
+from botocore.exceptions import ClientError
+
+
+class AWSSync:
+ """Synchronise with Amazon Web Services."""
+
+ def __init__(self):
+ """Create an AWSSync instance."""
+ self.logger = logging.getLogger("django.aws")
+ self.org_info = None
+ self.fail = False
+ self.logger.info("Created AWSSync instance.")
+
+ def button_pressed(self):
+ """
+ Print debug message to show that the button has been pressed.
+
+ :return: True if function executes successfully
+ """
+ self.logger.info("Pressed button")
+ return True
+
+ def create_aws_organization(self):
+ """Create an AWS organization with the current user as the management account."""
+ client = boto3.client("organizations")
+ try:
+ response = client.create_organization(FeatureSet="ALL")
+ self.org_info = response["Organization"]
+ self.logger.info("Created an AWS organization and saved organization info.")
+ except ClientError as error:
+ self.fail = True
+ self.logger.error("Something went wrong creating an AWS organization.")
+ self.logger.debug(f"{error}")
+ self.logger.debug(f"{error.response}")
diff --git a/website/projects/templates/admin/projects/change_list.html b/website/projects/templates/admin/projects/change_list.html
index 82da6c2e..9e2f1890 100644
--- a/website/projects/templates/admin/projects/change_list.html
+++ b/website/projects/templates/admin/projects/change_list.html
@@ -4,6 +4,7 @@
{% block object-tools-items %}
Synchronize projects of the current semester to GitHub
+ Synchronize projects of the current semester to AWS
{{ block.super }}
{% endblock %}
diff --git a/website/projects/tests/test_admin.py b/website/projects/tests/test_admin.py
index d8778d0c..25f0328a 100644
--- a/website/projects/tests/test_admin.py
+++ b/website/projects/tests/test_admin.py
@@ -84,6 +84,7 @@ def setUp(self):
self.sync_mock.users_removed = 1
self.sync_mock.repos_archived = 1
self.github_mock = MagicMock(return_value=self.sync_mock)
+ self.aws_mock = MagicMock()
messages.error = MagicMock()
messages.warning = MagicMock()
messages.success = MagicMock()
@@ -233,6 +234,11 @@ def test_synchronise_current_projects_to_GitHub(self):
self.assertNotIn(self.project_archived, args[1])
self.project_admin.synchronise_to_GitHub = original_sync_action
+ def test_synchronise_to_AWS(self):
+ with patch("projects.admin.AWSSync", self.aws_mock):
+ self.project_admin.synchronise_to_AWS(self.request)
+ self.aws_mock.assert_called_once()
+
def test_archive_all_repositories(self):
self.project_admin.archive_all_repositories(self.request, Project.objects.all())
self.repo1.refresh_from_db()
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
new file mode 100644
index 00000000..6822fc14
--- /dev/null
+++ b/website/projects/tests/test_awssync.py
@@ -0,0 +1,62 @@
+from unittest.mock import patch
+
+import boto3
+
+from botocore.exceptions import ClientError
+
+from django.test import TestCase
+
+from moto import mock_organizations
+
+from projects import awssync
+
+
+class AWSSyncTest(TestCase):
+ """Test AWSSync class."""
+
+ def setUp(self):
+ self.sync = awssync.AWSSync()
+
+ def test_button_pressed(self):
+ return_value = self.sync.button_pressed()
+ self.assertTrue(return_value)
+
+ def mock_api(self, operation_name, kwarg):
+ if operation_name == "CreateOrganization":
+ raise ClientError(
+ {
+ "Error": {
+ "Message": "The AWS account is already a member of an organization.",
+ "Code": "AlreadyInOrganizationException",
+ },
+ "ResponseMetadata": {
+ "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "HTTPStatusCode": 400,
+ "HTTPHeaders": {
+ "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "content-type": "application/x-amz-json-1.1",
+ "content-length": "111",
+ "date": "Sun, 01 Jan 2023 00:00:00 GMT",
+ "connection": "close",
+ },
+ "RetryAttempts": 0,
+ },
+ "Message": "The AWS account is already a member of an organization.",
+ },
+ "create_organization",
+ )
+
+ @mock_organizations
+ def test_create_aws_organization(self):
+ moto_client = boto3.client("organizations")
+ org = self.sync
+ org.create_aws_organization()
+ describe_org = moto_client.describe_organization()["Organization"]
+ self.assertEqual(describe_org, org.org_info)
+
+ @patch("botocore.client.BaseClient._make_api_call", mock_api)
+ def test_create_aws_organization__exception(self):
+ org = self.sync
+ org.create_aws_organization()
+ self.assertTrue(org.fail)
+ self.assertIsNone(org.org_info)
diff --git a/website/room_reservation/views.py b/website/room_reservation/views.py
index f617d2ee..eeef5b36 100644
--- a/website/room_reservation/views.py
+++ b/website/room_reservation/views.py
@@ -134,7 +134,7 @@ def get_context_data(self, **kwargs):
}
for reservation in Reservation.objects.filter(
start_time__date__gte=timezone.now() - self.time_window_past,
- start_time__date__lt=timezone.now() + self.time_window_future,
+ start_time__date__lte=timezone.now() + self.time_window_future,
)
]
)
From 784c16c4e6416e63fc1ce7c9bded4035d121deb1 Mon Sep 17 00:00:00 2001
From: Jer111 <82157107+Jer111@users.noreply.github.com>
Date: Fri, 3 Mar 2023 10:49:13 +0100
Subject: [PATCH 07/32] Added logger setlevel (#20)
---
website/projects/awssync.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 36722c28..e0f44734 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -11,6 +11,7 @@ class AWSSync:
def __init__(self):
"""Create an AWSSync instance."""
self.logger = logging.getLogger("django.aws")
+ self.logger.setLevel(logging.DEBUG)
self.org_info = None
self.fail = False
self.logger.info("Created AWSSync instance.")
From 8c83d5d915d03a998f20cf4c18a83dd15665ecba Mon Sep 17 00:00:00 2001
From: Henk Berendsen <61596108+hb140502@users.noreply.github.com>
Date: Fri, 3 Mar 2023 12:38:38 +0100
Subject: [PATCH 08/32] Updated deliverable sprint 1 (#22)
* AWS synchronisation button (#8)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Removed unnecessary docstrings.
* Add boto3 and moto dependencies (#11)
* Add logger and replace prints with logs
* Add function to create AWS organization
* Add unit tests for creating AWS organization
* bugfix (#619)
Co-authored-by: nvoers
* Added logger setlevel (#20)
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com>
Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com>
Co-authored-by: nvoers
Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com>
---
website/projects/awssync.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 36722c28..e0f44734 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -11,6 +11,7 @@ class AWSSync:
def __init__(self):
"""Create an AWSSync instance."""
self.logger = logging.getLogger("django.aws")
+ self.logger.setLevel(logging.DEBUG)
self.org_info = None
self.fail = False
self.logger.info("Created AWSSync instance.")
From baf6f285374b161cd7f8d5b4c9a663192ecd7332 Mon Sep 17 00:00:00 2001
From: Jer111 <82157107+Jer111@users.noreply.github.com>
Date: Tue, 14 Mar 2023 10:35:27 +0100
Subject: [PATCH 09/32] Db sync (#16)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Added get mailinglist to the awssync file
* Added first version of get_all_managers
* Added test case for mailing lists
* Removed some prints
* reformatted using black
* flake8 intentation added
* flake8 intentation correction
* Removed get manager
* Linting added
* unused import removed
* Added get_teamid_from_email
* Changed function email with teamid
* Updated get_emails_with_teamids, working now
* Added test for get_emails_with_ids
* Added linting
* linting
* Added more test
* Linting in awssync and its test file
* Moved the imports around
* moved the imports around
* Black linting
* switched imports around
* Switched imports around part 2
* Switched imports around part 3
* Switched imports around part 4
* Fixed when no project exist for mailing list
* Added some more tests
* Removed exeption try expect
* Black linting
* Changed get_email_with_teamid to new format
* Changed get_emails_with_teamids to go over project
* Added tests for get_emails_with_teamids
* changed info for get_emails_with_teamids
---------
Co-authored-by: Henk
---
website/projects/awssync.py | 45 +++++++++++++++++++++
website/projects/tests/test_awssync.py | 55 ++++++++++++++++++++++++++
2 files changed, 100 insertions(+)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index e0f44734..90017cbc 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -1,9 +1,17 @@
+"""Framework for synchronisation with Amazon Web Services (AWS)."""
+
import logging
import boto3
from botocore.exceptions import ClientError
+from courses.models import Semester
+
+from mailing_lists.models import MailingList
+
+from projects.models import Project
+
class AWSSync:
"""Synchronise with Amazon Web Services."""
@@ -23,8 +31,45 @@ def button_pressed(self):
:return: True if function executes successfully
"""
self.logger.info("Pressed button")
+ self.logger.info(self.get_emails_with_teamids())
return True
+ def get_all_mailing_lists(self):
+ """
+ Get all mailing lists from the database.
+
+ :return: List of mailing lists
+ """
+ mailing_lists = MailingList.objects.all()
+ mailing_list_names = [ml.email_address for ml in mailing_lists]
+ return mailing_list_names
+
+ def get_emails_with_teamids(self):
+ """
+ Create a list of dictionaries containing email, slug and semester.
+
+ Slug and semester combined are together an uniqueness constraint.
+
+ :return: list of dictionaries of email, slug and semester
+ """
+ email_ids = []
+
+ for project in (
+ Project.objects.filter(mailinglist__isnull=False)
+ .filter(semester=Semester.objects.get_or_create_current_semester())
+ .values("slug", "semester", "mailinglist")
+ ):
+ project_slug = project["slug"]
+ project_semester = str(Semester.objects.get(pk=project["semester"]))
+ project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address
+ email_dict = {
+ "project_email": project_email,
+ "project_slug": project_slug,
+ "project_semester": project_semester,
+ }
+ email_ids.append(email_dict)
+ return email_ids
+
def create_aws_organization(self):
"""Create an AWS organization with the current user as the management account."""
client = boto3.client("organizations")
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index 6822fc14..6cf4ab3d 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -1,3 +1,5 @@
+"""Tests for awssync.py."""
+
from unittest.mock import patch
import boto3
@@ -8,19 +10,72 @@
from moto import mock_organizations
+from courses.models import Semester
+
+from mailing_lists.models import MailingList
+
from projects import awssync
+from projects.models import Project
class AWSSyncTest(TestCase):
"""Test AWSSync class."""
def setUp(self):
+ """Set up testing environment."""
self.sync = awssync.AWSSync()
+ self.semester = Semester.objects.create(year=2023, season=Semester.SPRING)
+ self.mailing_list = MailingList.objects.create(address="test1")
+ self.project = Project.objects.create(id=1, name="test1", semester=self.semester, slug="test1")
+ self.mailing_list.projects.add(self.project)
def test_button_pressed(self):
+ """Test button_pressed function."""
return_value = self.sync.button_pressed()
self.assertTrue(return_value)
+ def test_get_all_mailing_lists(self):
+ """Test get_all_mailing_lists function."""
+ mailing_lists = self.sync.get_all_mailing_lists()
+ self.assertIsInstance(mailing_lists, list)
+
+ def test_get_emails_with_teamids_normal(self):
+ """Test get_emails_with_teamids function."""
+ email_id = self.sync.get_emails_with_teamids()
+ self.assertIsInstance(email_id, list)
+ self.assertIsInstance(email_id[0], dict)
+ expected_result = [
+ {"project_email": "test1@giphouse.nl", "project_slug": "test1", "project_semester": "Spring 2023"}
+ ]
+ self.assertEqual(email_id, expected_result)
+
+ def test_get_emails_with_teamids_no_project(self):
+ """Test get_emails_with_teamids function."""
+ MailingList.objects.all().delete()
+ self.mailing_list = MailingList.objects.create(address="test2")
+ email_id = self.sync.get_emails_with_teamids()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
+ def test_get_emails_with_teamids_no_mailing_list(self):
+ """Test get_emails_with_teamids function."""
+ MailingList.objects.all().delete()
+ Project.objects.all().delete()
+ email_id = self.sync.get_emails_with_teamids()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
+ def test_get_emails_with_teamids_different_semester(self):
+ """Test get_emails_with_teamids function."""
+ MailingList.objects.all().delete()
+ new_semester = Semester.objects.create(year=2022, season=Semester.FALL)
+ self.mailing_list = MailingList.objects.create(address="test2")
+ self.project = Project.objects.create(id=2, name="test2", semester=new_semester, slug="test2")
+ self.mailing_list.projects.add(self.project)
+ email_id = self.sync.get_emails_with_teamids()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
def mock_api(self, operation_name, kwarg):
if operation_name == "CreateOrganization":
raise ClientError(
From 65d1048aadf2ff33cd2ac5c67411a2b4dbc4c752 Mon Sep 17 00:00:00 2001
From: mitchellboes <49476235+mitchellboes@users.noreply.github.com>
Date: Tue, 14 Mar 2023 12:20:59 +0100
Subject: [PATCH 10/32] Db sync (#25)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Added get mailinglist to the awssync file
* Added first version of get_all_managers
* Added test case for mailing lists
* Removed some prints
* reformatted using black
* flake8 intentation added
* flake8 intentation correction
* Removed get manager
* Linting added
* unused import removed
* Added get_teamid_from_email
* Changed function email with teamid
* Updated get_emails_with_teamids, working now
* Added test for get_emails_with_ids
* Added linting
* linting
* Added more test
* Linting in awssync and its test file
* Moved the imports around
* moved the imports around
* Black linting
* switched imports around
* Switched imports around part 2
* Switched imports around part 3
* Switched imports around part 4
* Fixed when no project exist for mailing list
* Added some more tests
* Removed exeption try expect
* Black linting
* Changed get_email_with_teamid to new format
* Changed get_emails_with_teamids to go over project
* Added tests for get_emails_with_teamids
* changed info for get_emails_with_teamids
* Changed email data dict to struct
* added test for TypeError exception for eq operator
* resolved linting errors
* changed comment to correct datatype
* dramatically improved test class name
---------
Co-authored-by: Henk
Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com>
Co-authored-by: Jer111
---
website/projects/awssync.py | 33 +++++++++++++++++++-------
website/projects/tests/test_awssync.py | 21 ++++++++++++----
2 files changed, 42 insertions(+), 12 deletions(-)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 90017cbc..ac9bddda 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -13,6 +13,26 @@
from projects.models import Project
+class SyncData:
+ """Structure for AWS giphouse sync data."""
+
+ def __init__(self, project_email, project_slug, project_semester):
+ """Create SyncData instance."""
+ self.project_email = project_email
+ self.project_slug = project_slug
+ self.project_semester = project_semester
+
+ def __eq__(self, other):
+ """Overload equals for SyncData type."""
+ if not isinstance(other, SyncData):
+ raise TypeError("Must compare to object of type SyncData")
+ return (
+ self.project_email == other.project_email
+ and self.project_slug == other.project_slug
+ and self.project_semester == other.project_semester
+ )
+
+
class AWSSync:
"""Synchronise with Amazon Web Services."""
@@ -46,11 +66,11 @@ def get_all_mailing_lists(self):
def get_emails_with_teamids(self):
"""
- Create a list of dictionaries containing email, slug and semester.
+ Create a list of SyncData struct containing email, slug and semester.
Slug and semester combined are together an uniqueness constraint.
- :return: list of dictionaries of email, slug and semester
+ :return: list of SyncData structs with email, slug and semester
"""
email_ids = []
@@ -62,12 +82,9 @@ def get_emails_with_teamids(self):
project_slug = project["slug"]
project_semester = str(Semester.objects.get(pk=project["semester"]))
project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address
- email_dict = {
- "project_email": project_email,
- "project_slug": project_slug,
- "project_semester": project_semester,
- }
- email_ids.append(email_dict)
+
+ sync_data = SyncData(project_email, project_slug, project_semester)
+ email_ids.append(sync_data)
return email_ids
def create_aws_organization(self):
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index 6cf4ab3d..f9aa1353 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -18,6 +18,20 @@
from projects.models import Project
+class SyncDataTest(TestCase):
+ """Test SyncData class (struct)."""
+
+ def setUp(self):
+ """setup test environment."""
+ self.sync = awssync.SyncData
+
+ def test_throw_type_error_SyncData_class(self):
+ """Test Type Error when equals is called on wrong type."""
+ with self.assertRaises(TypeError) as context:
+ self.sync("", "", "") == []
+ self.assertTrue("Must compare to object of type SyncData" in str(context.exception))
+
+
class AWSSyncTest(TestCase):
"""Test AWSSync class."""
@@ -42,11 +56,10 @@ def test_get_all_mailing_lists(self):
def test_get_emails_with_teamids_normal(self):
"""Test get_emails_with_teamids function."""
email_id = self.sync.get_emails_with_teamids()
+
self.assertIsInstance(email_id, list)
- self.assertIsInstance(email_id[0], dict)
- expected_result = [
- {"project_email": "test1@giphouse.nl", "project_slug": "test1", "project_semester": "Spring 2023"}
- ]
+ self.assertIsInstance(email_id[0], awssync.SyncData)
+ expected_result = [awssync.SyncData("test1@giphouse.nl", "test1", "Spring 2023")]
self.assertEqual(email_id, expected_result)
def test_get_emails_with_teamids_no_project(self):
From c562c6940877bb9d07f0792b4e7a0e4c038f4370 Mon Sep 17 00:00:00 2001
From: mitchellboes <49476235+mitchellboes@users.noreply.github.com>
Date: Tue, 14 Mar 2023 14:56:12 +0100
Subject: [PATCH 11/32] Added function to generate which users have to be
invited after the sync button is pressed (#23)
* Added 'generate_aws_sync_list' function and tests
* solved black errors
* changed generate_aws_sync_list to use SyncData structure
---
website/projects/awssync.py | 9 ++++++++
website/projects/tests/test_awssync.py | 32 ++++++++++++++++++++++++++
2 files changed, 41 insertions(+)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index ac9bddda..29499b58 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -99,3 +99,12 @@ def create_aws_organization(self):
self.logger.error("Something went wrong creating an AWS organization.")
self.logger.debug(f"{error}")
self.logger.debug(f"{error.response}")
+
+ def generate_aws_sync_list(self, giphouse_data, aws_data):
+ """
+ Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS.
+
+ This includes their ID and email address, to be able to put users in the correct AWS orginization later.
+ """
+ sync_list = [x for x in giphouse_data if x not in aws_data]
+ return sync_list
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index f9aa1353..f48eaf4e 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -128,3 +128,35 @@ def test_create_aws_organization__exception(self):
org.create_aws_organization()
self.assertTrue(org.fail)
self.assertIsNone(org.org_info)
+
+
+class AWSSyncListTest(TestCase):
+ """Test AWSSyncList class."""
+
+ def setUp(self):
+ self.sync = awssync.AWSSync()
+ self.syncData = awssync.SyncData
+
+ self.test1 = self.syncData("test1@test1.test1", "test1", "test1")
+ self.test2 = self.syncData("test2@test2.test2", "test2", "test2")
+ self.test3 = self.syncData("test3@test3.test3", "test3", "test3")
+
+ def test_AWS_sync_list_both_empty(self):
+ gip_list = []
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_empty_AWS(self):
+ gip_list = [self.test1, self.test2]
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list)
+
+ def test_AWS_sync_list_empty_GiP(self):
+ gip_list = []
+ aws_list = [self.test1, self.test2]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_both_full(self):
+ gip_list = [self.test1, self.test2]
+ aws_list = [self.test2, self.test3]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1])
From 2bf3048b88fdba006005fae87ffbfccb62dace51 Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Tue, 4 Apr 2023 12:44:04 +0200
Subject: [PATCH 12/32] Create and attach SCP policies (#29)
* Add functions for creating and attaching SCP policies
* Improve test cases
---
website/projects/awssync.py | 42 ++++++++++++
website/projects/tests/test_awssync.py | 92 ++++++++++++++++++++++++++
2 files changed, 134 insertions(+)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 29499b58..688bbd8a 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -1,5 +1,6 @@
"""Framework for synchronisation with Amazon Web Services (AWS)."""
+import json
import logging
import boto3
@@ -108,3 +109,44 @@ def generate_aws_sync_list(self, giphouse_data, aws_data):
"""
sync_list = [x for x in giphouse_data if x not in aws_data]
return sync_list
+
+ def create_scp_policy(self, policy_name, policy_description, policy_content):
+ """
+ Create a SCP policy.
+
+ :param policy_name: The policy name.
+ :param policy_description: The policy description.
+ :param policy_content: The policy configuration as a dictionary. The policy is automatically
+ converted to JSON format, including escaped quotation marks.
+ :return: Details of newly created policy as a dict on success and NoneType object otherwise.
+ """
+ client = boto3.client("organizations")
+ try:
+ response = client.create_policy(
+ Content=json.dumps(policy_content),
+ Description=policy_description,
+ Name=policy_name,
+ Type="SERVICE_CONTROL_POLICY",
+ )
+ except ClientError as error:
+ self.fail = True
+ self.logger.error("Something went wrong creating an SCP policy.")
+ self.logger.error(error)
+ else:
+ return response["Policy"]
+
+ def attach_scp_policy(self, policy_id, target_id):
+ """
+ Attaches a SCP policy to a target (root, OU, or member account).
+
+ :param policy_id: The ID of the policy to be attached.
+ :param target_id: The ID of the target root, OU, or member account.
+ """
+ client = boto3.client("organizations")
+ try:
+ client.attach_policy(PolicyId=policy_id, TargetId=target_id)
+ except ClientError as error:
+ self.fail = True
+ self.logger.error("Something went wrong attaching an SCP policy to a target.")
+ self.logger.debug(f"{error}")
+ self.logger.debug(f"{error.response}")
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index f48eaf4e..83823aa6 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -1,5 +1,6 @@
"""Tests for awssync.py."""
+import json
from unittest.mock import patch
import boto3
@@ -114,6 +115,32 @@ def mock_api(self, operation_name, kwarg):
"create_organization",
)
+ if operation_name == "CreatePolicy":
+ raise ClientError(
+ {
+ "Error": {
+ "Message": """The provided policy document does not meet the
+ requirements of the specified policy type.""",
+ "Code": "MalformedPolicyDocumentException",
+ },
+ "ResponseMetadata": {
+ "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "HTTPStatusCode": 400,
+ "HTTPHeaders": {
+ "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "content-type": "application/x-amz-json-1.1",
+ "content-length": "147",
+ "date": "Sun, 01 Jan 2023 00:00:00 GMT",
+ "connection": "close",
+ },
+ "RetryAttempts": 0,
+ },
+ "Message": """The provided policy document does not meet the
+ requirements of the specified policy type.""",
+ },
+ "create_policy",
+ )
+
@mock_organizations
def test_create_aws_organization(self):
moto_client = boto3.client("organizations")
@@ -129,6 +156,71 @@ def test_create_aws_organization__exception(self):
self.assertTrue(org.fail)
self.assertIsNone(org.org_info)
+ @mock_organizations
+ def test_create_scp_policy(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+
+ self.assertFalse(self.sync.fail)
+ self.assertEqual(policy["PolicySummary"]["Name"], policy_name)
+ self.assertEqual(policy["PolicySummary"]["Description"], policy_description)
+ self.assertEqual(policy["Content"], json.dumps(policy_content))
+
+ @mock_organizations
+ def test_create_scp_policy__exception(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {
+ "Version": "2012-10-17",
+ "Statement": [{"Effect": "NonExistentEffect", "Action": "*", "Resource": "*"}],
+ }
+ with patch("botocore.client.BaseClient._make_api_call", self.mock_api):
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+
+ self.assertTrue(self.sync.fail)
+ self.assertIsNone(policy)
+
+ @mock_organizations
+ def test_attach_scp_policy(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+
+ policy_id = policy["PolicySummary"]["Id"]
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ self.sync.attach_scp_policy(policy_id, root_id)
+
+ current_scp_policies = moto_client.list_policies_for_target(TargetId=root_id, Filter="SERVICE_CONTROL_POLICY")
+ current_scp_policy_ids = [scp_policy["Id"] for scp_policy in current_scp_policies["Policies"]]
+
+ self.assertIn(policy_id, current_scp_policy_ids)
+ self.assertFalse(self.sync.fail)
+
+ @mock_organizations
+ def test_attach_scp_policy__exception(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+
+ policy_id = policy["PolicySummary"]["Id"]
+ root_id = self.sync.org_info["Id"] # Retrieves organization ID, not root ID, resulting in ClientError.
+ self.sync.attach_scp_policy(policy_id, root_id)
+
+ self.assertTrue(self.sync.fail)
+
class AWSSyncListTest(TestCase):
"""Test AWSSyncList class."""
From 9661415499801b756a90f237fb7afeabe456dca4 Mon Sep 17 00:00:00 2001
From: Jer111 <82157107+Jer111@users.noreply.github.com>
Date: Tue, 11 Apr 2023 12:27:41 +0200
Subject: [PATCH 13/32] 12 moto helper (#36)
* merged with development and added create_c_i_OU
* Added some tests for create_c_i_OU
* Added some tests for create_c_i_ou
* Linting
* Changed the mock_api call back to orginal
* Added create_team_ou with tests
* Fix problems with moto testing
* Worked on tests and added apitalkerclass
* Make test asserts more meaningful
* black
* Added tests for create_ou's without parts
* Added one test that gets all children under OU
* Fix linting
* Changed return of response
create team ou did not save the name of the team OU
* Fix test create team OU
* Resolved linting issues
* Fix flake8
* remove create_team_ou
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: Fouad Lamsettef
---
website/projects/awssync.py | 28 ++++
website/projects/tests/test_awssync.py | 190 ++++++++++++++++---------
website/room_reservation/views.py | 2 +-
3 files changed, 148 insertions(+), 72 deletions(-)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 688bbd8a..dca36633 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -42,6 +42,7 @@ def __init__(self):
self.logger = logging.getLogger("django.aws")
self.logger.setLevel(logging.DEBUG)
self.org_info = None
+ self.iterationOU_info = None
self.fail = False
self.logger.info("Created AWSSync instance.")
@@ -101,6 +102,33 @@ def create_aws_organization(self):
self.logger.debug(f"{error}")
self.logger.debug(f"{error.response}")
+ def create_course_iteration_OU(self, iteration_id):
+ """
+ Create an OU for the course iteration.
+
+ :param iteration_id: The ID of the course iteration
+
+ :return: The ID of the OU
+ """
+ client = boto3.client("organizations")
+ if self.org_info is None:
+ self.logger.info("No organization info found. Creating an AWS organization.")
+ self.fail = True
+ else:
+ try:
+ response = client.create_organizational_unit(
+ ParentId=self.org_info["Id"],
+ Name=f"Course Iteration {iteration_id}",
+ )
+ self.logger.info(f"Created an OU for course iteration {iteration_id}.")
+ self.iterationOU_info = response["OrganizationalUnit"]
+ return response["OrganizationalUnit"]["Id"]
+ except ClientError as error:
+ self.fail = True
+ self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_id}.")
+ self.logger.debug(f"{error}")
+ self.logger.debug(f"{error.response}")
+
def generate_aws_sync_list(self, giphouse_data, aws_data):
"""
Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS.
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index 83823aa6..5fc1f583 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -5,6 +5,7 @@
import boto3
+import botocore
from botocore.exceptions import ClientError
from django.test import TestCase
@@ -43,12 +44,53 @@ def setUp(self):
self.mailing_list = MailingList.objects.create(address="test1")
self.project = Project.objects.create(id=1, name="test1", semester=self.semester, slug="test1")
self.mailing_list.projects.add(self.project)
+ self.mock_org = mock_organizations()
+ self.mock_org.start()
+
+ def tearDown(self):
+ self.mock_org.stop()
def test_button_pressed(self):
"""Test button_pressed function."""
return_value = self.sync.button_pressed()
self.assertTrue(return_value)
+ def test_create_aws_organization(self):
+ moto_client = boto3.client("organizations")
+ org = self.sync
+ org.create_aws_organization()
+ describe_org = moto_client.describe_organization()["Organization"]
+ self.assertEqual(describe_org, org.org_info)
+
+ def test_create_aws_organization__exception(self):
+ org = self.sync
+ with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
+ org.create_aws_organization()
+ self.assertTrue(org.fail)
+ self.assertIsNone(org.org_info)
+
+ def test_create_course_iteration_OU(self):
+ moto_client = boto3.client("organizations")
+ org = self.sync
+ org.create_aws_organization()
+ org.create_course_iteration_OU(1)
+ describe_unit = moto_client.describe_organizational_unit(OrganizationalUnitId=org.iterationOU_info["Id"])[
+ "OrganizationalUnit"
+ ]
+ self.assertEqual(describe_unit, org.iterationOU_info)
+
+ def test_create_course_iteration_OU_without_organization(self):
+ org = self.sync
+ org.create_course_iteration_OU(1)
+ self.assertTrue(org.fail)
+
+ def test_create_course_iteration_OU__exception(self):
+ org = self.sync
+ org.create_aws_organization()
+ with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
+ org.create_course_iteration_OU(1)
+ self.assertTrue(org.fail)
+
def test_get_all_mailing_lists(self):
"""Test get_all_mailing_lists function."""
mailing_lists = self.sync.get_all_mailing_lists()
@@ -90,73 +132,6 @@ def test_get_emails_with_teamids_different_semester(self):
self.assertIsInstance(email_id, list)
self.assertEqual(email_id, [])
- def mock_api(self, operation_name, kwarg):
- if operation_name == "CreateOrganization":
- raise ClientError(
- {
- "Error": {
- "Message": "The AWS account is already a member of an organization.",
- "Code": "AlreadyInOrganizationException",
- },
- "ResponseMetadata": {
- "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "HTTPStatusCode": 400,
- "HTTPHeaders": {
- "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "content-type": "application/x-amz-json-1.1",
- "content-length": "111",
- "date": "Sun, 01 Jan 2023 00:00:00 GMT",
- "connection": "close",
- },
- "RetryAttempts": 0,
- },
- "Message": "The AWS account is already a member of an organization.",
- },
- "create_organization",
- )
-
- if operation_name == "CreatePolicy":
- raise ClientError(
- {
- "Error": {
- "Message": """The provided policy document does not meet the
- requirements of the specified policy type.""",
- "Code": "MalformedPolicyDocumentException",
- },
- "ResponseMetadata": {
- "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "HTTPStatusCode": 400,
- "HTTPHeaders": {
- "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "content-type": "application/x-amz-json-1.1",
- "content-length": "147",
- "date": "Sun, 01 Jan 2023 00:00:00 GMT",
- "connection": "close",
- },
- "RetryAttempts": 0,
- },
- "Message": """The provided policy document does not meet the
- requirements of the specified policy type.""",
- },
- "create_policy",
- )
-
- @mock_organizations
- def test_create_aws_organization(self):
- moto_client = boto3.client("organizations")
- org = self.sync
- org.create_aws_organization()
- describe_org = moto_client.describe_organization()["Organization"]
- self.assertEqual(describe_org, org.org_info)
-
- @patch("botocore.client.BaseClient._make_api_call", mock_api)
- def test_create_aws_organization__exception(self):
- org = self.sync
- org.create_aws_organization()
- self.assertTrue(org.fail)
- self.assertIsNone(org.org_info)
-
- @mock_organizations
def test_create_scp_policy(self):
self.sync.create_aws_organization()
@@ -170,7 +145,6 @@ def test_create_scp_policy(self):
self.assertEqual(policy["PolicySummary"]["Description"], policy_description)
self.assertEqual(policy["Content"], json.dumps(policy_content))
- @mock_organizations
def test_create_scp_policy__exception(self):
self.sync.create_aws_organization()
@@ -180,13 +154,12 @@ def test_create_scp_policy__exception(self):
"Version": "2012-10-17",
"Statement": [{"Effect": "NonExistentEffect", "Action": "*", "Resource": "*"}],
}
- with patch("botocore.client.BaseClient._make_api_call", self.mock_api):
+ with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
self.assertTrue(self.sync.fail)
self.assertIsNone(policy)
- @mock_organizations
def test_attach_scp_policy(self):
moto_client = boto3.client("organizations")
self.sync.create_aws_organization()
@@ -206,7 +179,6 @@ def test_attach_scp_policy(self):
self.assertIn(policy_id, current_scp_policy_ids)
self.assertFalse(self.sync.fail)
- @mock_organizations
def test_attach_scp_policy__exception(self):
self.sync.create_aws_organization()
@@ -252,3 +224,79 @@ def test_AWS_sync_list_both_full(self):
gip_list = [self.test1, self.test2]
aws_list = [self.test2, self.test3]
self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1])
+
+
+class AWSAPITalkerTest(TestCase):
+ def mock_api(self, operation_name, kwarg):
+ if operation_name == "CreateOrganization":
+ raise ClientError(
+ {
+ "Error": {
+ "Message": "The AWS account is already a member of an organization.",
+ "Code": "AlreadyInOrganizationException",
+ },
+ "ResponseMetadata": {
+ "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "HTTPStatusCode": 400,
+ "HTTPHeaders": {
+ "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "content-type": "application/x-amz-json-1.1",
+ "content-length": "111",
+ "date": "Sun, 01 Jan 2023 00:00:00 GMT",
+ "connection": "close",
+ },
+ "RetryAttempts": 0,
+ },
+ "Message": "The AWS account is already a member of an organization.",
+ },
+ "create_organization",
+ )
+ if operation_name == "CreateOrganizationalUnit":
+ raise ClientError(
+ {
+ "Error": {
+ "Message": "The OU already exists.",
+ "Code": "ParentNotFoundException",
+ },
+ "ResponseMetadata": {
+ "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "HTTPStatusCode": 400,
+ "HTTPHeaders": {
+ "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "content-type": "application/x-amz-json-1.1",
+ "content-length": "111",
+ "date": "Sun, 01 Jan 2023 00:00:00 GMT",
+ "connection": "close",
+ },
+ "RetryAttempts": 0,
+ },
+ "Message": "The OU already exists.",
+ },
+ "create_organizational_unit",
+ )
+ if operation_name == "CreatePolicy":
+ raise ClientError(
+ {
+ "Error": {
+ "Message": """The provided policy document does not meet the
+ requirements of the specified policy type.""",
+ "Code": "MalformedPolicyDocumentException",
+ },
+ "ResponseMetadata": {
+ "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "HTTPStatusCode": 400,
+ "HTTPHeaders": {
+ "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "content-type": "application/x-amz-json-1.1",
+ "content-length": "147",
+ "date": "Sun, 01 Jan 2023 00:00:00 GMT",
+ "connection": "close",
+ },
+ "RetryAttempts": 0,
+ },
+ "Message": """The provided policy document does not meet the
+ requirements of the specified policy type.""",
+ },
+ "create_policy",
+ )
+ return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg)
diff --git a/website/room_reservation/views.py b/website/room_reservation/views.py
index eeef5b36..f617d2ee 100644
--- a/website/room_reservation/views.py
+++ b/website/room_reservation/views.py
@@ -134,7 +134,7 @@ def get_context_data(self, **kwargs):
}
for reservation in Reservation.objects.filter(
start_time__date__gte=timezone.now() - self.time_window_past,
- start_time__date__lte=timezone.now() + self.time_window_future,
+ start_time__date__lt=timezone.now() + self.time_window_future,
)
]
)
From 45e70df8f0681247bd5a923760fb87956f3658d1 Mon Sep 17 00:00:00 2001
From: mitchellboes <49476235+mitchellboes@users.noreply.github.com>
Date: Thu, 13 Apr 2023 11:17:25 +0200
Subject: [PATCH 14/32] Add checks for edge cases between AWS and Giphouse
databases (#37)
* added double user check (partly)
* added some checks and made two new fancy classes for the storage of AWS tree dictionaries
* added tests
* added equals for AWSTree and Iteration objects
* test stupid error
* does it work now?
* resolved merge conflicts with rebasing on development
* cleaned up code based on pull request comments
---
website/projects/awssync.py | 116 +++++++++++++++++-
website/projects/tests/test_awssync.py | 162 +++++++++++++++++++++++++
2 files changed, 277 insertions(+), 1 deletion(-)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index dca36633..517cd083 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -1,4 +1,5 @@
"""Framework for synchronisation with Amazon Web Services (AWS)."""
+from __future__ import annotations
import json
import logging
@@ -33,6 +34,60 @@ def __eq__(self, other):
and self.project_semester == other.project_semester
)
+ def __repr__(self):
+ """Overload to string function for SyncData type."""
+ return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')"
+
+
+class Iteration:
+ """Datatype for AWS data in the Course iteration OU."""
+
+ def __init__(self, name, ou_id, members: list[SyncData]):
+ """Initialize Iteration object."""
+ self.name = name
+ self.ou_id = ou_id
+ self.members = members
+
+ def __repr__(self):
+ """Overload to string function for Iteration datatype."""
+ return f"Iteration('{self.name}', '{self.ou_id}', {self.members})"
+
+ def __eq__(self, other: Iteration) -> bool:
+ """Overload equals operator for Iteration objects."""
+ if not isinstance(other, Iteration):
+ raise TypeError("Must compare to object of type Iteration")
+ return self.name == other.name and self.ou_id == other.ou_id and self.members == other.members
+
+
+class AWSTree:
+ """Tree structure for AWS data."""
+
+ def __init__(self, name, ou_id, iterations: list[Iteration]):
+ """Initialize AWSTree object."""
+ self.name = name
+ self.ou_id = ou_id
+ self.iterations = iterations
+
+ def __repr__(self):
+ """Overload to string function for AWSTree object."""
+ return f"AWSTree('{self.name}', '{self.ou_id}', {self.iterations})"
+
+ def __eq__(self, other: AWSTree) -> bool:
+ """Overload equals operator for AWSTree objects."""
+ if not isinstance(other, AWSTree):
+ raise TypeError("Must compare to object of type AWSTree")
+ return self.name == other.name and self.ou_id == other.ou_id and self.iterations == other.iterations
+
+ def awstree_to_syncdata_list(self):
+ """Convert AWSTree to list of SyncData elements."""
+ awslist = []
+
+ for iteration in self.iterations:
+ for member in iteration.members:
+ awslist.append(member)
+
+ return awslist
+
class AWSSync:
"""Synchronise with Amazon Web Services."""
@@ -129,7 +184,7 @@ def create_course_iteration_OU(self, iteration_id):
self.logger.debug(f"{error}")
self.logger.debug(f"{error.response}")
- def generate_aws_sync_list(self, giphouse_data, aws_data):
+ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]):
"""
Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS.
@@ -178,3 +233,62 @@ def attach_scp_policy(self, policy_id, target_id):
self.logger.error("Something went wrong attaching an SCP policy to a target.")
self.logger.debug(f"{error}")
self.logger.debug(f"{error.response}")
+
+ # TODO: check if this function is really needed
+
+ def check_for_double_member_email(self, aws_list: list[SyncData], sync_list: list[SyncData]):
+ """Check if no users are assigned to multiple projects."""
+ sync_emails = [x.project_email for x in sync_list]
+ aws_emails = [x.project_email for x in aws_list]
+
+ duplicates = [email for email in sync_emails if email in aws_emails]
+
+ for duplicate in duplicates:
+ error = f"Email address {duplicate} is already in the list of members in AWS"
+ self.logger.info("An email clash occured while syncing.")
+ self.logger.debug(error)
+
+ if duplicates != []:
+ return True
+ return False
+
+ def check_current_ou_exists(self, AWSdata: AWSTree):
+ """
+ Check if the the OU (organizational unit) for the current semester already exists in AWS.
+
+ Get data in tree structure (dictionary) defined in the function that retrieves the AWS data
+ """
+ current = Semester.objects.get_or_create_current_semester()
+
+ for iteration in AWSdata.iterations:
+ if current == iteration.name:
+ return (True, iteration.ou_id)
+
+ return (False, None)
+
+ # TODO: Do we want to check for this?
+ def check_members_in_correct_iteration(self, AWSdata: AWSTree):
+ """Check if the data from the member tag matches the semester OU it is in."""
+ incorrect_emails = []
+ for iteration in AWSdata.iterations:
+ for member in iteration.members:
+ if member.project_semester != iteration.name:
+ incorrect_emails.append(member.project_email)
+
+ if incorrect_emails != []:
+ return (False, incorrect_emails)
+
+ return (True, None)
+
+ def check_double_iteration_names(self, AWSdata: AWSTree):
+ """Check if there are multiple OU's with the same name in AWS."""
+ names = [iteration.name for iteration in AWSdata.iterations]
+ doubles = []
+
+ for name in names:
+ if names.count(name) != 1 and name not in doubles:
+ doubles.append(name)
+
+ if doubles != []:
+ return (True, doubles)
+ return (False, None)
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index 5fc1f583..a172612d 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -300,3 +300,165 @@ def mock_api(self, operation_name, kwarg):
"create_policy",
)
return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg)
+
+
+class AWSTreeChecksTest(TestCase):
+ """Test checks done on AWSTree data struncture."""
+
+ def setUp(self):
+ self.sync = awssync.AWSSync()
+ self.awstree = awssync.AWSTree("Name", "1234", [])
+ self.iteration = awssync.Iteration("Name", "1234", [])
+ self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020")
+
+ self.sync_list = [
+ awssync.SyncData("email1@example.com", "Spring 2022", "Project A"),
+ awssync.SyncData("email2@example.com", "Fall 2022", "Project B"),
+ awssync.SyncData("email3@example.com", "Spring 2022", "Project C"),
+ ]
+ self.aws_list = [
+ awssync.SyncData("email4@example.com", "Fall 2021", "Project D"),
+ awssync.SyncData("email5@example.com", "Spring 2022", "Project E"),
+ awssync.SyncData("email6@example.com", "Fall 2022", "Project F"),
+ ]
+
+ self.treelist = [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ awssync.SyncData("email3@example.com", "project3", "Spring 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ]
+
+ self.aws_tree1 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Spring 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree2 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Fall 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree3 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Fall 2020",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Fall 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ def test_repr_AWSTree(self):
+ self.assertEquals(str(self.awstree), "AWSTree('Name', '1234', [])")
+
+ def test_repr_Iteration(self):
+ self.assertEquals(str(self.iteration), "Iteration('Name', '1234', [])")
+
+ def test_repr_SyncData(self):
+ self.assertEquals(str(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')")
+
+ def test_awstree_to_syncdata_list(self):
+ self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist)
+
+ def test_check_for_double_member_email(self):
+ # Test when there are no duplicate emails
+ self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
+
+ # Test when there is a duplicate email
+ self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G"))
+ self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
+
+ def test_check_current_ou_exists(self):
+ # Test when current semester OU does not exist
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"):
+ self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022")
+ val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
+ self.assertEqual((val1, val2), (False, None))
+
+ # Test when current semester OU exists
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"):
+ self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021")
+ val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
+ self.assertEqual((val1, val2), (True, "98765"))
+
+ def test_check_members_in_correct_iteration(self):
+ # Test when correct
+ val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1)
+ self.assertEqual((val1, val2), (True, None))
+
+ # Test when incorrect
+ val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2)
+ self.assertEqual((val1, val2), (False, ["email3@example.com"]))
+
+ def test_check_double_iteration_names(self):
+ # Test when correct
+ val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1)
+ self.assertEqual((val1, val2), (False, None))
+
+ # Test when double
+ val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3)
+ self.assertEqual((val1, val2), (True, ["Fall 2020"]))
+
+ def test_AWSTree_equals(self):
+ self.assertEqual(self.aws_tree1, self.aws_tree1)
+ self.assertNotEqual(self.aws_tree1, self.aws_tree2)
+ with self.assertRaises(TypeError):
+ awssync.AWSTree("", "", []) == []
+ self.assertRaises(TypeError)
+
+ def test_Iteration_equals(self):
+ self.assertEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[0])
+ self.assertNotEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[1])
+ with self.assertRaises(TypeError):
+ awssync.Iteration("", "", []) == []
+ self.assertRaises(TypeError)
From e8086b12227159d986a1be57e103e9341d66d5c3 Mon Sep 17 00:00:00 2001
From: flam123
Date: Fri, 14 Apr 2023 10:32:23 +0200
Subject: [PATCH 15/32] Extraction of AWS data
* Added function for extracting data
* Added mock format
* finished function (testing needed)
* Linting fix
* fix no return of tree
* Fix AttributeError
* Unwrap tuple instead of accessing by element to increase code readability
* Fixed for new struct
* Implementation bug fixes
* added tests (not done)
* Removed classes for merge
* Added function for extracting data
* Added mock format
* finished function (testing needed)
* Linting fix
* fix no return of tree
* Fix AttributeError
* Unwrap tuple instead of accessing by element to increase code readability
* Fixed for new struct
* Implementation bug fixes
* added tests (not done)
* Linting fix
* git fixes
* Black fix
* pydocstyle fix
* Black fix again
* removed flake8 'fix'
* Final flake8 fix
* Final final flake8 fix
* spelling error fix
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
---
website/projects/awssync.py | 47 ++++++++++++++++++++++++--
website/projects/tests/test_awssync.py | 46 +++++++++++++++++++++++++
2 files changed, 90 insertions(+), 3 deletions(-)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 517cd083..b4fb8d84 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -188,7 +188,7 @@ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[S
"""
Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS.
- This includes their ID and email address, to be able to put users in the correct AWS orginization later.
+ This includes their ID and email address, to be able to put users in the correct AWS organization later.
"""
sync_list = [x for x in giphouse_data if x not in aws_data]
return sync_list
@@ -199,8 +199,8 @@ def create_scp_policy(self, policy_name, policy_description, policy_content):
:param policy_name: The policy name.
:param policy_description: The policy description.
- :param policy_content: The policy configuration as a dictionary. The policy is automatically
- converted to JSON format, including escaped quotation marks.
+ :param policy_content: The policy configuration as a dictionary.
+ The policy is automatically converted to JSON format, including escaped quotation marks.
:return: Details of newly created policy as a dict on success and NoneType object otherwise.
"""
client = boto3.client("organizations")
@@ -292,3 +292,44 @@ def check_double_iteration_names(self, AWSdata: AWSTree):
if doubles != []:
return (True, doubles)
return (False, None)
+
+ def extract_aws_setup(self, parent_ou_id):
+ """
+ Give a list of all the children of the parent OU.
+
+ :param parent_ou_id: The ID of the parent OU.
+ """
+ client = boto3.client("organizations")
+ try:
+ response = client.list_organizational_units_for_parent(ParentId=parent_ou_id)
+ aws_tree = AWSTree("root", parent_ou_id, [])
+ for iteration in response["OrganizationalUnits"]:
+ ou_id = iteration["Id"]
+ ou_name = iteration["Name"]
+ response = client.list_accounts_for_parent(ParentId=ou_id)
+ children = response["Accounts"]
+ syncData = []
+ for child in children:
+ account_id = child["Id"]
+ account_email = child["Email"]
+ response = client.list_tags_for_resource(ResourceId=account_id)
+ tags = response["Tags"]
+ merged_tags = {d["Key"]: d["Value"] for d in tags}
+ self.logger.debug(merged_tags)
+ if all(key in merged_tags for key in ["project_slug", "project_semester"]):
+ syncData.append(
+ SyncData(account_email, merged_tags["project_slug"], merged_tags["project_semester"])
+ )
+ else:
+ self.logger.error(
+ "Could not find project_slug or project_semester tag for account with ID: " + account_id
+ )
+ self.fail = True
+
+ aws_tree.iterations.append(Iteration(ou_name, ou_id, syncData))
+ return aws_tree
+ except ClientError as error:
+ self.fail = True
+ self.logger.error("Something went wrong extracting the AWS setup.")
+ self.logger.debug(f"{error}")
+ self.logger.debug(f"{error.response}")
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index a172612d..8a0c1b12 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -193,6 +193,52 @@ def test_attach_scp_policy__exception(self):
self.assertTrue(self.sync.fail)
+ @mock_organizations
+ def test_get_aws_data(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+
+ response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1")
+ OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
+ response_account_1 = moto_client.create_account(
+ Email="account_1@gmail.com",
+ AccountName="account_1",
+ Tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}],
+ )
+ account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
+ moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id)
+
+ aws_tree = self.sync.extract_aws_setup(root_id)
+ iteration_test = awssync.Iteration("OU_1", OU_1_id, [awssync.SyncData("account_1@gmail.com", "test1", "2021")])
+ aws_tree_test = awssync.AWSTree("root", root_id, [iteration_test])
+ self.assertEquals(aws_tree, aws_tree_test)
+
+ @mock_organizations
+ def test_get_aws_data_no_root(self):
+ boto3.client("organizations")
+ self.sync.create_aws_organization()
+ self.sync.extract_aws_setup("NonExistentRootID")
+ self.assertTrue(self.sync.fail)
+
+ @mock_organizations
+ def test_get_aws_data_no_slugs(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+
+ response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1")
+ OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
+ response_account_1 = moto_client.create_account(
+ Email="account_1@gmail.com",
+ AccountName="account_1",
+ Tags=[],
+ )
+ account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
+ moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id)
+ self.sync.extract_aws_setup(root_id)
+ self.assertTrue(self.sync.fail)
+
class AWSSyncListTest(TestCase):
"""Test AWSSyncList class."""
From d99c9c2a7bf29ef990e9f9fe636bfd94db5fd8f4 Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Sat, 22 Apr 2023 13:09:33 +0000
Subject: [PATCH 16/32] AWS synchronization pipeline (and integration bug
fixes) (#42)
* Create and attach SCP policies (#29)
* Add functions for creating and attaching SCP policies
* Improve test cases
* Add function template
* Fix 'a/an' grammar mistake
* Add pipeline preconditions
* Add pipeline preconditions tests
* Add checks for required API actions
* Add test cases for checking required API actions
* Added implementation of creating and attaching policy in the pipeline
* Remove double API actions
* Added implementation of creating and moving accounts in the pipeline. This should still be divided into smaller functions.
* Increase code readability
* Stop checking account request status after known failure
* Fixed small typos and added account details to a debug message about account creation failure
* Added tests for pipeline policy and fixed typos in debug messages.
* Split creating and moving accounts into multiple functions, and handle exceptions
* added update_course_itation_ou with tests
* updated pipeline_update_current_course_iteration
* Add test cases for creating and moving member accounts
* Bug fixes for pipeline dependencies that arose from integration sprint 2 tasks
* Revised pipeline policy function and corresponding tests so that it should not fail after first pipeline run
* Change duplicate policy attachment to soft-fail; replace organization ID with root ID
---------
Co-authored-by: Henk
Co-authored-by: Jer111
---
website/projects/awssync.py | 408 ++++++++++++++-
website/projects/tests/test_awssync.py | 673 ++++++++++++++++++++++++-
2 files changed, 1062 insertions(+), 19 deletions(-)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index b4fb8d84..96e71327 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -3,10 +3,12 @@
import json
import logging
+import time
import boto3
from botocore.exceptions import ClientError
+from botocore.exceptions import NoCredentialsError
from courses.models import Semester
@@ -94,11 +96,72 @@ class AWSSync:
def __init__(self):
"""Create an AWSSync instance."""
+ self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5
+ self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3
+
self.logger = logging.getLogger("django.aws")
self.logger.setLevel(logging.DEBUG)
self.org_info = None
self.iterationOU_info = None
+ self.policy_id = "p-examplepolicyid111"
self.fail = False
+ self.required_aws_actions = [
+ # "organizations:AcceptHandshake",
+ "organizations:AttachPolicy",
+ # "organizations:CancelHandshake",
+ # "organizations:CloseAccount",
+ "organizations:CreateAccount",
+ # "organizations:CreateGovCloudAccount",
+ "organizations:CreateOrganization",
+ "organizations:CreateOrganizationalUnit",
+ "organizations:CreatePolicy",
+ # "organizations:DeclineHandshake",
+ # "organizations:DeleteOrganization",
+ "organizations:DeleteOrganizationalUnit",
+ "organizations:DeletePolicy",
+ "organizations:DeleteResourcePolicy",
+ # "organizations:DeregisterDelegatedAdministrator",
+ "organizations:DescribeAccount",
+ "organizations:DescribeCreateAccountStatus",
+ "organizations:DescribeEffectivePolicy",
+ # "organizations:DescribeHandshake",
+ "organizations:DescribeOrganization",
+ "organizations:DescribeOrganizationalUnit",
+ "organizations:DescribePolicy",
+ "organizations:DescribeResourcePolicy",
+ "organizations:DetachPolicy",
+ # "organizations:DisableAWSServiceAccess",
+ "organizations:DisablePolicyType",
+ # "organizations:EnableAWSServiceAccess",
+ # "organizations:EnableAllFeatures",
+ "organizations:EnablePolicyType",
+ # "organizations:InviteAccountToOrganization",
+ # "organizations:LeaveOrganization",
+ # "organizations:ListAWSServiceAccessForOrganization",
+ "organizations:ListAccounts",
+ "organizations:ListAccountsForParent",
+ "organizations:ListChildren",
+ "organizations:ListCreateAccountStatus",
+ # "organizations:ListDelegatedAdministrators",
+ # "organizations:ListDelegatedServicesForAccount",
+ # "organizations:ListHandshakesForAccount",
+ # "organizations:ListHandshakesForOrganization",
+ "organizations:ListOrganizationalUnitsForParent",
+ "organizations:ListParents",
+ "organizations:ListPolicies",
+ "organizations:ListPoliciesForTarget",
+ "organizations:ListRoots",
+ "organizations:ListTagsForResource",
+ "organizations:ListTargetsForPolicy",
+ "organizations:MoveAccount",
+ "organizations:PutResourcePolicy",
+ # "organizations:RegisterDelegatedAdministrator",
+ # "organizations:RemoveAccountFromOrganization",
+ "organizations:TagResource",
+ "organizations:UntagResource",
+ "organizations:UpdateOrganizationalUnit",
+ "organizations:UpdatePolicy",
+ ]
self.logger.info("Created AWSSync instance.")
def button_pressed(self):
@@ -108,7 +171,7 @@ def button_pressed(self):
:return: True if function executes successfully
"""
self.logger.info("Pressed button")
- self.logger.info(self.get_emails_with_teamids())
+ self.logger.debug(f"Pipeline result: {self.pipeline()}")
return True
def get_all_mailing_lists(self):
@@ -157,11 +220,11 @@ def create_aws_organization(self):
self.logger.debug(f"{error}")
self.logger.debug(f"{error.response}")
- def create_course_iteration_OU(self, iteration_id):
+ def create_course_iteration_OU(self, iteration_name):
"""
Create an OU for the course iteration.
- :param iteration_id: The ID of the course iteration
+ :param iteration_name: The name of the course iteration OU
:return: The ID of the OU
"""
@@ -171,16 +234,17 @@ def create_course_iteration_OU(self, iteration_id):
self.fail = True
else:
try:
+ root_id = client.list_roots()["Roots"][0]["Id"]
response = client.create_organizational_unit(
- ParentId=self.org_info["Id"],
- Name=f"Course Iteration {iteration_id}",
+ ParentId=root_id,
+ Name=iteration_name,
)
- self.logger.info(f"Created an OU for course iteration {iteration_id}.")
+ self.logger.info(f"Created an OU for course iteration {iteration_name}.")
self.iterationOU_info = response["OrganizationalUnit"]
return response["OrganizationalUnit"]["Id"]
except ClientError as error:
self.fail = True
- self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_id}.")
+ self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_name}.")
self.logger.debug(f"{error}")
self.logger.debug(f"{error.response}")
@@ -195,7 +259,7 @@ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[S
def create_scp_policy(self, policy_name, policy_description, policy_content):
"""
- Create a SCP policy.
+ Create an SCP policy.
:param policy_name: The policy name.
:param policy_description: The policy description.
@@ -220,7 +284,7 @@ def create_scp_policy(self, policy_name, policy_description, policy_content):
def attach_scp_policy(self, policy_id, target_id):
"""
- Attaches a SCP policy to a target (root, OU, or member account).
+ Attaches an SCP policy to a target (root, OU, or member account).
:param policy_id: The ID of the policy to be attached.
:param target_id: The ID of the target root, OU, or member account.
@@ -229,12 +293,331 @@ def attach_scp_policy(self, policy_id, target_id):
try:
client.attach_policy(PolicyId=policy_id, TargetId=target_id)
except ClientError as error:
- self.fail = True
+ if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException":
+ self.fail = True
self.logger.error("Something went wrong attaching an SCP policy to a target.")
self.logger.debug(f"{error}")
self.logger.debug(f"{error.response}")
- # TODO: check if this function is really needed
+ def check_aws_api_connection(self):
+ """
+ Check whether boto3 can connect to AWS API with current credentials.
+
+ :returns: First tuple element always exists and indicates success.
+ Second tuple element is contains information about the entity
+ who made the successful API call and None otherwise.
+ """
+ client_sts = boto3.client("sts")
+ try:
+ caller_identity_info = client_sts.get_caller_identity()
+ except (NoCredentialsError, ClientError) as error:
+ self.logger.info("Establishing AWS API connection failed.")
+ self.logger.debug(error)
+ return False, None
+ else:
+ self.logger.info("Establishing AWS API connection succeeded.")
+
+ return True, caller_identity_info
+
+ def check_iam_policy(self, iam_user_arn, desired_actions):
+ """
+ Check for the specified IAM user ARN whether the actions in list \
+ desired_actions are allowed according to its IAM policy.
+
+ :param iam_user_arn: ARN of the IAM user being checked.
+ :param iam_actions: List of AWS API actions to check.
+ :returns: True iff all actions in desired_actions are allowed.
+ """
+ client_iam = boto3.client("iam")
+
+ try:
+ response = client_iam.simulate_principal_policy(PolicySourceArn=iam_user_arn, ActionNames=desired_actions)
+ except ClientError as error:
+ self.logger.info("AWS API actions check failed.")
+ self.logger.debug(error)
+ return False
+
+ success = True
+ for evaluation_result in response["EvaluationResults"]:
+ action_name = evaluation_result["EvalActionName"]
+ if evaluation_result["EvalDecision"] != "allowed":
+ self.logger.debug(f"The AWS API action {action_name} is denied for IAM user {iam_user_arn}.")
+ success = False
+
+ if success:
+ self.logger.info("AWS API actions check succeeded.")
+
+ return success
+
+ def check_organization_existence(self):
+ """
+ Check whether an AWS organization exists for the AWS API caller's account.
+
+ :returns: First tuple element always exists and indicates success.
+ Second tuple element is describes properties of the organization and None otherwise.
+ """
+ client_organizations = boto3.client("organizations")
+
+ try:
+ response_org = client_organizations.describe_organization()
+ except ClientError as error:
+ self.logger.info("AWS organization existence check failed.")
+ self.logger.debug(error)
+ return False, None
+ else:
+ self.logger.info("AWS organization existence check succeeded.")
+
+ return True, response_org["Organization"]
+
+ def check_is_management_account(self, api_caller_info, organization_info):
+ """
+ Check whether caller of AWS API has organization's management account ID.
+
+ :returns: True iff the current organization's management account ID equals the AWS API caller's account ID.
+ """
+ management_account_id = organization_info["MasterAccountId"]
+ api_caller_account_id = api_caller_info["Account"]
+ is_management_account = management_account_id == api_caller_account_id
+
+ if is_management_account:
+ self.logger.info("Management account check succeeded.")
+ else:
+ self.logger.info("Management account check failed.")
+ self.logger.debug(f"The organization's management account ID is: '{management_account_id}'.")
+ self.logger.debug(f"The AWS API caller account ID is: '{api_caller_account_id}'.")
+
+ return is_management_account
+
+ def check_scp_enabled(self, organization_info):
+ """
+ Check whether the SCP policy type is an enabled feature for the AWS organization.
+
+ :returns: True iff the SCP policy type feature is enabled for the organization.
+ """
+ scp_is_enabled = False
+ for policy in organization_info["AvailablePolicyTypes"]:
+ if policy["Type"] == "SERVICE_CONTROL_POLICY" and policy["Status"] == "ENABLED":
+ scp_is_enabled = True
+ break
+
+ if not scp_is_enabled:
+ self.logger.info("The SCP policy type is disabled for the organization.")
+ self.logger.debug(organization_info["AvailablePolicyTypes"])
+ else:
+ self.logger.info("Organization SCP policy status check succeeded.")
+
+ return scp_is_enabled
+
+ def pipeline_preconditions(self):
+ """
+ Check all crucial pipeline preconditions.
+
+ 1. Locatable boto3 credentials and successful AWS API connection
+ 2. Check allowed AWS API actions based on IAM policy of caller
+ 3. Existing organization for AWS API caller
+ 4. AWS API caller acts under same account ID as organization's management account ID
+ 5. SCP policy type feature enabled for organization
+
+ :return: True iff all pipeline preconditions are met.
+ """
+ check_api_connection, api_caller_info = self.check_aws_api_connection()
+ if not check_api_connection:
+ return False
+
+ check_api_actions = self.check_iam_policy(api_caller_info["Arn"], self.required_aws_actions)
+ if not check_api_actions:
+ return False
+
+ check_org_existence, organization_info = self.check_organization_existence()
+ self.org_info = organization_info
+ if not check_org_existence:
+ return False
+
+ check_acc_management = self.check_is_management_account(api_caller_info, organization_info)
+ if not check_acc_management:
+ return False
+
+ check_scp_enabled = self.check_scp_enabled(organization_info)
+ if not check_scp_enabled:
+ return False
+
+ return True
+
+ def pipeline_policy(self, ou_id):
+ """
+ Create an SCP policy and attaches it to the organizational unit of the current semester.
+
+ :param ou_id: ID of the organizational unit for the current semester.
+ :return: True iff the policy to be attached to the OU already exists and is successfully attached.
+ """
+ client = boto3.client("organizations")
+ try:
+ client.describe_policy(PolicyId=self.policy_id)
+ except ClientError as error:
+ self.logger.debug(error)
+ return False
+
+ self.attach_scp_policy(self.policy_id, ou_id)
+ if self.fail:
+ return False
+ return True
+
+ def pipeline_create_account(self, sync_data):
+ """
+ Create a single new AWS member account in the organization of the API caller.
+
+ The status of the member account request is repeatedly checked based on the class' attributes:
+ self.ACCOUNT_REQUEST_INTERVAL_SECONDS: thread sleeping time before each status check
+ self.ACCOUNT_REQUEST_MAX_ATTEMPTS: maximum number of times to thread sleep and check
+
+ :param email: The e-mail address of the new member account.
+ :param username: The username of the new member account.
+ :returns: (True, account_id) on success and otherwise (False, failure_reason).
+ """
+ client = boto3.client("organizations")
+
+ # Request new member account.
+ try:
+ response_create = client.create_account(
+ Email=sync_data.project_email,
+ AccountName=sync_data.project_slug,
+ IamUserAccessToBilling="DENY",
+ Tags=[
+ {"Key": "project_slug", "Value": sync_data.project_slug},
+ {"Key": "project_semester", "Value": sync_data.project_semester},
+ ],
+ )
+ except ClientError as error:
+ self.logger.debug(error)
+ return False, "CLIENTERROR_CREATE_ACCOUNT"
+
+ # Repeatedly check status of new member account request.
+ request_id = response_create["CreateAccountStatus"]["Id"]
+ for _ in range(1, self.ACCOUNT_REQUEST_MAX_ATTEMPTS + 1):
+ time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS)
+
+ try:
+ response_status = client.describe_create_account_status(CreateAccountRequestId=request_id)
+ except ClientError as error:
+ self.logger.debug(error)
+ return False, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS"
+
+ request_state = response_status["CreateAccountStatus"]["State"]
+ if request_state == "FAILED":
+ return False, response_status["CreateAccountStatus"]["FailureReason"]
+ elif request_state == "SUCCEEDED":
+ return True, response_status["CreateAccountStatus"]["AccountId"]
+
+ return False, "STILL_IN_PROGRESS"
+
+ def pipeline_create_and_move_accounts(self, new_member_accounts, root_id, destination_ou_id):
+ """
+ Create multiple accounts in the organization of the API caller and move them from the root to a destination OU.
+
+ :param new_member_accounts: List of SyncData objects.
+ :param root_id: The organization's root ID.
+ :param destination_ou_id: The organization's destination OU ID.
+ :returns: True iff **all** new member accounts were created and moved successfully.
+ """
+ client = boto3.client("organizations")
+ overall_success = True
+
+ for new_member in new_member_accounts:
+ success, response = self.pipeline_create_account(new_member)
+ if success:
+ account_id = response
+ try:
+ root_id = client.list_roots()["Roots"][0]["Id"]
+ client.move_account(
+ AccountId=account_id, SourceParentId=root_id, DestinationParentId=destination_ou_id
+ )
+ except ClientError as error:
+ self.logger.debug(error)
+ overall_success = False
+ else:
+ failure_reason = response
+ self.logger.debug(failure_reason)
+ overall_success = False
+
+ return overall_success
+
+ def pipeline_update_current_course_iteration_ou(self, aws_tree):
+ """
+ Update the AWS tree with the new course iteration OU's.
+
+ :param aws_tree: The AWS tree to be checked.
+ :returns: True, iteration_id on success and otherwise False, failure_reason.
+ """
+ is_current_iteration, iteration_ou_id = self.check_current_ou_exists(aws_tree)
+
+ if not is_current_iteration:
+ iteration_name = str(Semester.objects.get_or_create_current_semester())
+ iteration_ou_id = self.create_course_iteration_OU(iteration_name)
+
+ if not self.fail:
+ return True, iteration_ou_id
+ else:
+ return False, "ITERATION_OU_CREATION_FAILED"
+
+ def pipeline(self):
+ """
+ Single pipeline that integrates all buildings blocks for the AWS integration process.
+
+ :return: True iff all pipeline stages successfully executed.
+ """
+ # Check preconditions.
+ if not self.pipeline_preconditions():
+ return False
+
+ # Get synchronization data.
+ client = boto3.client("organizations")
+ try:
+ root_id = client.list_roots()["Roots"][0]["Id"]
+ except ClientError as error:
+ self.logger.debug("Failed to retrieve root ID of organization.")
+ self.logger.debug(error)
+ return False
+
+ aws_tree = self.extract_aws_setup(root_id)
+ if self.fail:
+ self.logger.debug("Extracting AWS setup failed.")
+ return False
+
+ aws_sync_data = aws_tree.awstree_to_syncdata_list()
+ giphouse_sync_data = self.get_emails_with_teamids()
+ merged_sync_data = self.generate_aws_sync_list(giphouse_sync_data, aws_sync_data)
+
+ # Check edge cases.
+ if self.check_for_double_member_email(aws_sync_data, merged_sync_data):
+ return False
+
+ success, incorrect_emails = self.check_members_in_correct_iteration(aws_tree)
+ if not success:
+ self.logger.debug(f"Got incorrectly placed AWS member accounts: {incorrect_emails}.")
+ return False
+
+ failure, double_iteration_names = self.check_double_iteration_names(aws_tree)
+ if failure:
+ self.logger.debug(f"Found double iteration names: {double_iteration_names}.")
+ return False
+
+ # Check/create course iteration OU.
+ current_course_iteration_exists, response = self.pipeline_update_current_course_iteration_ou(aws_tree)
+ if not current_course_iteration_exists:
+ failure_reason = response
+ self.logger.debug(failure_reason)
+ return False
+ course_iteration_ou_id = response
+
+ # Create and attach SCP policy to course iteration OU.
+ if not self.pipeline_policy(course_iteration_ou_id):
+ return False
+
+ # Create new member accounts and move to course iteration OU.
+ if not self.pipeline_create_and_move_accounts(merged_sync_data, root_id, course_iteration_ou_id):
+ return False
+
+ return True
def check_for_double_member_email(self, aws_list: list[SyncData], sync_list: list[SyncData]):
"""Check if no users are assigned to multiple projects."""
@@ -258,7 +641,7 @@ def check_current_ou_exists(self, AWSdata: AWSTree):
Get data in tree structure (dictionary) defined in the function that retrieves the AWS data
"""
- current = Semester.objects.get_or_create_current_semester()
+ current = str(Semester.objects.get_or_create_current_semester())
for iteration in AWSdata.iterations:
if current == iteration.name:
@@ -266,7 +649,6 @@ def check_current_ou_exists(self, AWSdata: AWSTree):
return (False, None)
- # TODO: Do we want to check for this?
def check_members_in_correct_iteration(self, AWSdata: AWSTree):
"""Check if the data from the member tag matches the semester OU it is in."""
incorrect_emails = []
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index 8a0c1b12..d2e7cec7 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -1,7 +1,7 @@
"""Tests for awssync.py."""
import json
-from unittest.mock import patch
+from unittest.mock import MagicMock, patch
import boto3
@@ -10,7 +10,7 @@
from django.test import TestCase
-from moto import mock_organizations
+from moto import mock_organizations, mock_sts
from courses.models import Semester
@@ -50,6 +50,9 @@ def setUp(self):
def tearDown(self):
self.mock_org.stop()
+ def simulateFailure(self):
+ self.sync.fail = True
+
def test_button_pressed(self):
"""Test button_pressed function."""
return_value = self.sync.button_pressed()
@@ -73,7 +76,7 @@ def test_create_course_iteration_OU(self):
moto_client = boto3.client("organizations")
org = self.sync
org.create_aws_organization()
- org.create_course_iteration_OU(1)
+ org.create_course_iteration_OU("1")
describe_unit = moto_client.describe_organizational_unit(OrganizationalUnitId=org.iterationOU_info["Id"])[
"OrganizationalUnit"
]
@@ -81,14 +84,15 @@ def test_create_course_iteration_OU(self):
def test_create_course_iteration_OU_without_organization(self):
org = self.sync
- org.create_course_iteration_OU(1)
+ org.create_course_iteration_OU("1")
self.assertTrue(org.fail)
def test_create_course_iteration_OU__exception(self):
org = self.sync
org.create_aws_organization()
- with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
- org.create_course_iteration_OU(1)
+ with patch("boto3.client") as mocker:
+ mocker().list_roots.side_effect = ClientError({}, "list_roots")
+ org.create_course_iteration_OU("1")
self.assertTrue(org.fail)
def test_get_all_mailing_lists(self):
@@ -193,6 +197,663 @@ def test_attach_scp_policy__exception(self):
self.assertTrue(self.sync.fail)
+ @mock_sts
+ def test_check_aws_api_connection(self):
+ success, caller_identity_info = self.sync.check_aws_api_connection()
+
+ self.assertTrue(success)
+ self.assertIsNotNone(caller_identity_info)
+
+ @mock_sts
+ def test_check_aws_api_connection__exception(self):
+ with patch("boto3.client") as mocker:
+ mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity")
+ mocker.return_value = mocker
+ success, caller_identity_info = self.sync.check_aws_api_connection()
+
+ self.assertFalse(success)
+ self.assertIsNone(caller_identity_info)
+
+ # IAM simulate_principal_policy is not covered by moto.
+ def test_check_iam_policy(self):
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ # success == True
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+ self.assertTrue(success)
+
+ # success == False
+ mock_evaluation_results["EvaluationResults"][0]["EvalDecision"] = "implicitDeny"
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+ self.assertFalse(success)
+
+ def test_check_iam_policy__exception(self):
+ iam_user_arn = "daddy"
+ desired_actions = []
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.side_effect = ClientError({}, "simulate_principal_policy")
+ success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ self.assertFalse(success)
+
+ def test_check_organization_existence(self):
+ moto_client = boto3.client("organizations")
+ organization_create_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ success, organization_describe_info = self.sync.check_organization_existence()
+
+ self.assertTrue(success)
+ self.assertEqual(organization_create_info, organization_describe_info)
+
+ def test_check_organization_existence__exception(self):
+ with patch("boto3.client") as mocker:
+ mocker.describe_organization.side_effect = ClientError({}, "describe_organization")
+ mocker.return_value = mocker
+ success, organization_info = self.sync.check_organization_existence()
+
+ self.assertFalse(success)
+ self.assertIsNone(organization_info)
+
+ @mock_sts
+ def test_check_is_management_account(self):
+ moto_client = boto3.client("organizations")
+
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ _, caller_identity_info = self.sync.check_aws_api_connection()
+ _, organization_info = self.sync.check_organization_existence()
+
+ # is_management_account == True
+ success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info)
+ self.assertTrue(success_acc)
+
+ # is_management_account == False
+ caller_identity_info["Account"] = "daddy"
+ success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info)
+ self.assertFalse(success_acc)
+
+ def test_check_scp_enabled(self):
+ moto_client = boto3.client("organizations")
+
+ # SCP enabled.
+ organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ scp_is_enabled = self.sync.check_scp_enabled(organization_info)
+ self.assertTrue(scp_is_enabled)
+
+ # SCP semi-disabled (pending).
+ organization_info["AvailablePolicyTypes"][0]["Status"] = "PENDING_DISABLE"
+ scp_is_enabled = self.sync.check_scp_enabled(organization_info)
+ self.assertFalse(scp_is_enabled)
+
+ # SCP disabled (empty list).
+ organization_info["AvailablePolicyTypes"] = []
+ scp_is_enabled = self.sync.check_scp_enabled(organization_info)
+ self.assertFalse(scp_is_enabled)
+
+ @mock_sts
+ def test_pipeline_preconditions__all_success(self):
+ # Create organization.
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ mocker.return_value = check_iam_policy
+ success = self.sync.pipeline_preconditions()
+
+ self.assertTrue(success)
+
+ @mock_sts
+ def test_pipeline_preconditions__no_connection(self):
+ with patch("boto3.client") as mocker:
+ mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity")
+ mocker.return_value = mocker
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ def test_pipeline_preconditions__no_iam(self):
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "implicitDeny",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_api_actions = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ mocker.return_value = check_api_actions
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ @mock_sts
+ def test_pipeline_preconditions__no_organization(self):
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ mocker.return_value = check_iam_policy
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ @mock_sts
+ def test_pipeline_preconditions__no_management(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")
+
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam:
+ mocker_iam.return_value = check_iam_policy
+ with patch("projects.awssync.AWSSync.check_aws_api_connection") as mocker_api:
+ mocker_api.return_value = True, {"Account": "daddy", "Arn": "01234567890123456789"}
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ @mock_sts
+ def test_pipeline_preconditions__no_scp(self):
+ moto_client = boto3.client("organizations")
+
+ organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam:
+ mocker_iam.return_value = check_iam_policy
+
+ # Mock return value of check_organization_existence with no SCP policy enabled.
+ organization_info["AvailablePolicyTypes"] = []
+ with patch("projects.awssync.AWSSync.check_organization_existence") as mocker:
+ mocker.return_value = True, organization_info
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ """
+ def test_pipeline_create_scp_policy(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+
+ policy = self.sync.pipeline_create_scp_policy()
+
+ self.assertFalse(self.sync.fail)
+ self.assertEqual(policy["PolicySummary"]["Name"], policy_name)
+ self.assertEqual(policy["PolicySummary"]["Description"], policy_description)
+ self.assertEqual(policy["Content"], json.dumps(policy_content))
+
+ def test_pipeline_create_scp_policy__exception(self):
+ self.sync.create_aws_organization()
+
+ with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
+ policy = self.sync.pipeline_create_scp_policy()
+
+ self.assertTrue(self.sync.fail)
+ self.assertIsNone(policy)
+ """
+
+ def test_pipeline_policy(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+ self.sync.policy_id = policy["PolicySummary"]["Id"]
+
+ ou_id = self.sync.create_course_iteration_OU("Test")
+
+ success = self.sync.pipeline_policy(ou_id)
+ self.assertTrue(success)
+
+ def test_pipeline_policy__exception(self):
+ self.sync.create_aws_organization()
+
+ ou_id = self.sync.create_course_iteration_OU("Test")
+
+ success = self.sync.pipeline_policy(ou_id)
+ self.assertFalse(success)
+
+ def test_pipeline_policy__failure_attach(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+ self.sync.policy_id = policy["PolicySummary"]["Id"]
+
+ ou_id = self.sync.create_course_iteration_OU("Test")
+
+ self.sync.attach_scp_policy = MagicMock(side_effect=self.simulateFailure())
+
+ success = self.sync.pipeline_policy(ou_id)
+ self.assertFalse(success)
+
+ @mock_sts
+ def test_pipeline(self):
+ moto_client = boto3.client("organizations")
+
+ # pipeline_preconditions() == False
+ success = self.sync.pipeline()
+ self.assertFalse(success)
+
+ # pipeline_preconditions() == True
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+ self.sync.policy_id = policy["PolicySummary"]["Id"]
+
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ mocker.return_value = check_iam_policy
+ success = self.sync.pipeline()
+
+ self.assertTrue(success)
+
+ def test_pipeline__exception_list_roots(self):
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().list_roots.side_effect = ClientError({}, "list_roots")
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__edge_case_double_emails(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ aws_tree = awssync.AWSTree(
+ "Root",
+ "123",
+ [
+ awssync.Iteration(
+ "Spring 2023",
+ "456",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
+ ],
+ )
+ ],
+ )
+
+ gip_teams = [
+ awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
+ awssync.SyncData("email1@example.com", "project2", "Spring 2023"),
+ ]
+
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
+ self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__edge_case_incorrectly_placed(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ aws_tree = awssync.AWSTree(
+ "Root",
+ "123",
+ [
+ awssync.Iteration(
+ "Fall 2023",
+ "456",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
+ ],
+ )
+ ],
+ )
+
+ gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
+
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
+ self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__edge_case_double_iteration_names(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ aws_tree = awssync.AWSTree(
+ "Root",
+ "123",
+ [
+ awssync.Iteration(
+ "Spring 2023", "456", [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
+ ),
+ awssync.Iteration("Spring 2023", "789", []),
+ ],
+ )
+
+ gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
+
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
+ self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__failed_creating_iteration_ou(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ with patch("boto3.client") as mocker:
+ mocker().create_organizational_unit.side_effect = ClientError({}, "create_organizational_unit")
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__exception_attaching_policy(self):
+ self.sync.create_aws_organization()
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().attach_policy.side_effect = ClientError(
+ {"Error": {"Code": "PolicyTypeNotEnabledException"}}, "attach_policy"
+ )
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__already_attached_policy(self):
+ self.sync.create_aws_organization()
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().attach_policy.side_effect = ClientError(
+ {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy"
+ )
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__failed_create_and_move_account(self):
+ self.sync.create_aws_organization()
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().move_account.side_effect = ClientError({}, "move_account")
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__exception_extract_aws_setup(self):
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().list_organizational_units_for_parent.side_effect = ClientError(
+ {}, "list_organizational_units_for_parent"
+ )
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline_update_current_course_iteration_ou___failure_check_current_ou(self):
+
+ self.sync.check_current_ou_exists = MagicMock(return_value=(False, None))
+
+ self.sync.create_aws_organization()
+ success, id = self.sync.pipeline_update_current_course_iteration_ou(None)
+ self.assertTrue(success)
+ self.assertFalse(id is None)
+
+ def test_pipeline_update_current_course_iteration_ou___success(self):
+
+ self.sync.check_current_ou_exists = MagicMock(return_value=(True, "1234"))
+
+ self.sync.create_aws_organization()
+ success, id = self.sync.pipeline_update_current_course_iteration_ou(None)
+ self.assertTrue(success)
+ self.assertEquals(id, "1234")
+
+ def test_pipeline_update_current_course_iteration_ou___failure_create_ou(self):
+
+ self.sync.check_current_ou_exists = MagicMock(return_value=(False, None))
+ self.sync.create_course_iteration_OU = MagicMock(side_effect=self.simulateFailure())
+
+ self.sync.create_aws_organization()
+ success, failure_reason = self.sync.pipeline_update_current_course_iteration_ou(None)
+
+ self.assertFalse(success)
+ self.assertEquals(failure_reason, "ITERATION_OU_CREATION_FAILED")
+ self.assertTrue(self.sync.fail)
+
+ def test_pipeline_create_account(self):
+ self.sync.create_aws_organization()
+
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertTrue(success)
+ self.assertIsNotNone(response)
+
+ def test_pipeline_create_account__exception_create_account(self):
+ self.sync.create_aws_organization()
+
+ with patch("boto3.client") as mocker:
+ mocker().create_account.side_effect = ClientError({}, "create_account")
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertFalse(success)
+ self.assertEquals(response, "CLIENTERROR_CREATE_ACCOUNT")
+
+ def test_pipeline_create_account__exception_describe_account_status(self):
+ self.sync.create_aws_organization()
+
+ with patch("boto3.client") as mocker:
+ mocker().describe_create_account_status.side_effect = ClientError({}, "describe_create_account_status")
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertFalse(success)
+ self.assertEquals(response, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS")
+
+ def test_pipeline_create_account__state_failed(self):
+ self.sync.create_aws_organization()
+
+ with patch("boto3.client") as mocker:
+ response = {"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}}
+ mocker().describe_create_account_status.return_value = response
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertFalse(success)
+ self.assertEquals(response, "EMAIL_ALREADY_EXISTS")
+
+ def test_pipeline_create_account__state_in_progress(self):
+ self.sync.create_aws_organization()
+
+ with patch("boto3.client") as mocker:
+ response = {
+ "CreateAccountStatus": {
+ "State": "IN_PROGRESS",
+ }
+ }
+ mocker().describe_create_account_status.return_value = response
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertFalse(success)
+ self.assertEquals(response, "STILL_IN_PROGRESS")
+
+ def test_pipeline_create_and_move_accounts(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+
+ new_member_accounts = [
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023"),
+ awssync.SyncData("bob@example.com", "bob", "Spring 2023"),
+ ]
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ course_iteration_id = self.sync.create_course_iteration_OU("Spring 2023")
+
+ success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
+ self.assertTrue(success)
+
+ def test_pipeline_create_and_move_accounts__email_exists(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+
+ new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")]
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ course_iteration_id = self.sync.create_course_iteration_OU("2023Fall")
+
+ with patch("projects.awssync.AWSSync.pipeline_create_account") as mocker:
+ mocker.return_value = False, "EMAIL_ALREADY_EXISTS"
+ success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
+
+ self.assertFalse(success)
+
+ def test_pipeline_create_and_move_accounts__exception_move_account(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+
+ new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")]
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ course_iteration_id = self.sync.create_course_iteration_OU("2023Fall")
+
+ self.sync.pipeline_create_account = MagicMock(return_value=(True, 1234))
+ with patch("boto3.client") as mocker:
+ mocker().move_account.side_effect = ClientError({}, "move_account")
+ success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
+
+ self.assertFalse(success)
+
@mock_organizations
def test_get_aws_data(self):
moto_client = boto3.client("organizations")
From 5ae9396671f5946d5d76074aa5cf54f6720528b2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Filip=20=C5=81ysak?=
<92109241+FilipLysak001@users.noreply.github.com>
Date: Sat, 22 Apr 2023 15:38:50 +0200
Subject: [PATCH 17/32] Sprint 2 deliverable (#43)
* AWS synchronisation button (#8)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Removed unnecessary docstrings.
* Add boto3 and moto dependencies (#11)
* Add logger and replace prints with logs
* Add function to create AWS organization
* Add unit tests for creating AWS organization
* bugfix (#619)
Co-authored-by: nvoers
* Added logger setlevel (#20)
* Db sync (#16)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Added get mailinglist to the awssync file
* Added first version of get_all_managers
* Added test case for mailing lists
* Removed some prints
* reformatted using black
* flake8 intentation added
* flake8 intentation correction
* Removed get manager
* Linting added
* unused import removed
* Added get_teamid_from_email
* Changed function email with teamid
* Updated get_emails_with_teamids, working now
* Added test for get_emails_with_ids
* Added linting
* linting
* Added more test
* Linting in awssync and its test file
* Moved the imports around
* moved the imports around
* Black linting
* switched imports around
* Switched imports around part 2
* Switched imports around part 3
* Switched imports around part 4
* Fixed when no project exist for mailing list
* Added some more tests
* Removed exeption try expect
* Black linting
* Changed get_email_with_teamid to new format
* Changed get_emails_with_teamids to go over project
* Added tests for get_emails_with_teamids
* changed info for get_emails_with_teamids
---------
Co-authored-by: Henk
* Db sync (#25)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Added get mailinglist to the awssync file
* Added first version of get_all_managers
* Added test case for mailing lists
* Removed some prints
* reformatted using black
* flake8 intentation added
* flake8 intentation correction
* Removed get manager
* Linting added
* unused import removed
* Added get_teamid_from_email
* Changed function email with teamid
* Updated get_emails_with_teamids, working now
* Added test for get_emails_with_ids
* Added linting
* linting
* Added more test
* Linting in awssync and its test file
* Moved the imports around
* moved the imports around
* Black linting
* switched imports around
* Switched imports around part 2
* Switched imports around part 3
* Switched imports around part 4
* Fixed when no project exist for mailing list
* Added some more tests
* Removed exeption try expect
* Black linting
* Changed get_email_with_teamid to new format
* Changed get_emails_with_teamids to go over project
* Added tests for get_emails_with_teamids
* changed info for get_emails_with_teamids
* Changed email data dict to struct
* added test for TypeError exception for eq operator
* resolved linting errors
* changed comment to correct datatype
* dramatically improved test class name
---------
Co-authored-by: Henk
Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com>
Co-authored-by: Jer111
* Added function to generate which users have to be invited after the sync button is pressed (#23)
* Added 'generate_aws_sync_list' function and tests
* solved black errors
* changed generate_aws_sync_list to use SyncData structure
* Create and attach SCP policies (#29)
* Add functions for creating and attaching SCP policies
* Improve test cases
* 12 moto helper (#36)
* merged with development and added create_c_i_OU
* Added some tests for create_c_i_OU
* Added some tests for create_c_i_ou
* Linting
* Changed the mock_api call back to orginal
* Added create_team_ou with tests
* Fix problems with moto testing
* Worked on tests and added apitalkerclass
* Make test asserts more meaningful
* black
* Added tests for create_ou's without parts
* Added one test that gets all children under OU
* Fix linting
* Changed return of response
create team ou did not save the name of the team OU
* Fix test create team OU
* Resolved linting issues
* Fix flake8
* remove create_team_ou
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: Fouad Lamsettef
* Add checks for edge cases between AWS and Giphouse databases (#37)
* added double user check (partly)
* added some checks and made two new fancy classes for the storage of AWS tree dictionaries
* added tests
* added equals for AWSTree and Iteration objects
* test stupid error
* does it work now?
* resolved merge conflicts with rebasing on development
* cleaned up code based on pull request comments
* Extraction of AWS data
* Added function for extracting data
* Added mock format
* finished function (testing needed)
* Linting fix
* fix no return of tree
* Fix AttributeError
* Unwrap tuple instead of accessing by element to increase code readability
* Fixed for new struct
* Implementation bug fixes
* added tests (not done)
* Removed classes for merge
* Added function for extracting data
* Added mock format
* finished function (testing needed)
* Linting fix
* fix no return of tree
* Fix AttributeError
* Unwrap tuple instead of accessing by element to increase code readability
* Fixed for new struct
* Implementation bug fixes
* added tests (not done)
* Linting fix
* git fixes
* Black fix
* pydocstyle fix
* Black fix again
* removed flake8 'fix'
* Final flake8 fix
* Final final flake8 fix
* spelling error fix
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
* AWS synchronization pipeline (and integration bug fixes) (#42)
* Create and attach SCP policies (#29)
* Add functions for creating and attaching SCP policies
* Improve test cases
* Add function template
* Fix 'a/an' grammar mistake
* Add pipeline preconditions
* Add pipeline preconditions tests
* Add checks for required API actions
* Add test cases for checking required API actions
* Added implementation of creating and attaching policy in the pipeline
* Remove double API actions
* Added implementation of creating and moving accounts in the pipeline. This should still be divided into smaller functions.
* Increase code readability
* Stop checking account request status after known failure
* Fixed small typos and added account details to a debug message about account creation failure
* Added tests for pipeline policy and fixed typos in debug messages.
* Split creating and moving accounts into multiple functions, and handle exceptions
* added update_course_itation_ou with tests
* updated pipeline_update_current_course_iteration
* Add test cases for creating and moving member accounts
* Bug fixes for pipeline dependencies that arose from integration sprint 2 tasks
* Revised pipeline policy function and corresponding tests so that it should not fail after first pipeline run
* Change duplicate policy attachment to soft-fail; replace organization ID with root ID
---------
Co-authored-by: Henk
Co-authored-by: Jer111
---------
Co-authored-by: Henk Berendsen <61596108+hb140502@users.noreply.github.com>
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com>
Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com>
Co-authored-by: nvoers
Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com>
Co-authored-by: Henk
Co-authored-by: mitchellboes <49476235+mitchellboes@users.noreply.github.com>
Co-authored-by: Jer111
Co-authored-by: Fouad Lamsettef
---
website/projects/awssync.py | 678 ++++++++++++++
website/projects/tests/test_awssync.py | 1139 +++++++++++++++++++++++-
website/room_reservation/views.py | 2 +-
3 files changed, 1803 insertions(+), 16 deletions(-)
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index e0f44734..96e71327 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -1,8 +1,94 @@
+"""Framework for synchronisation with Amazon Web Services (AWS)."""
+from __future__ import annotations
+
+import json
import logging
+import time
import boto3
from botocore.exceptions import ClientError
+from botocore.exceptions import NoCredentialsError
+
+from courses.models import Semester
+
+from mailing_lists.models import MailingList
+
+from projects.models import Project
+
+
+class SyncData:
+ """Structure for AWS giphouse sync data."""
+
+ def __init__(self, project_email, project_slug, project_semester):
+ """Create SyncData instance."""
+ self.project_email = project_email
+ self.project_slug = project_slug
+ self.project_semester = project_semester
+
+ def __eq__(self, other):
+ """Overload equals for SyncData type."""
+ if not isinstance(other, SyncData):
+ raise TypeError("Must compare to object of type SyncData")
+ return (
+ self.project_email == other.project_email
+ and self.project_slug == other.project_slug
+ and self.project_semester == other.project_semester
+ )
+
+ def __repr__(self):
+ """Overload to string function for SyncData type."""
+ return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')"
+
+
+class Iteration:
+ """Datatype for AWS data in the Course iteration OU."""
+
+ def __init__(self, name, ou_id, members: list[SyncData]):
+ """Initialize Iteration object."""
+ self.name = name
+ self.ou_id = ou_id
+ self.members = members
+
+ def __repr__(self):
+ """Overload to string function for Iteration datatype."""
+ return f"Iteration('{self.name}', '{self.ou_id}', {self.members})"
+
+ def __eq__(self, other: Iteration) -> bool:
+ """Overload equals operator for Iteration objects."""
+ if not isinstance(other, Iteration):
+ raise TypeError("Must compare to object of type Iteration")
+ return self.name == other.name and self.ou_id == other.ou_id and self.members == other.members
+
+
+class AWSTree:
+ """Tree structure for AWS data."""
+
+ def __init__(self, name, ou_id, iterations: list[Iteration]):
+ """Initialize AWSTree object."""
+ self.name = name
+ self.ou_id = ou_id
+ self.iterations = iterations
+
+ def __repr__(self):
+ """Overload to string function for AWSTree object."""
+ return f"AWSTree('{self.name}', '{self.ou_id}', {self.iterations})"
+
+ def __eq__(self, other: AWSTree) -> bool:
+ """Overload equals operator for AWSTree objects."""
+ if not isinstance(other, AWSTree):
+ raise TypeError("Must compare to object of type AWSTree")
+ return self.name == other.name and self.ou_id == other.ou_id and self.iterations == other.iterations
+
+ def awstree_to_syncdata_list(self):
+ """Convert AWSTree to list of SyncData elements."""
+ awslist = []
+
+ for iteration in self.iterations:
+ for member in iteration.members:
+ awslist.append(member)
+
+ return awslist
class AWSSync:
@@ -10,10 +96,72 @@ class AWSSync:
def __init__(self):
"""Create an AWSSync instance."""
+ self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5
+ self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3
+
self.logger = logging.getLogger("django.aws")
self.logger.setLevel(logging.DEBUG)
self.org_info = None
+ self.iterationOU_info = None
+ self.policy_id = "p-examplepolicyid111"
self.fail = False
+ self.required_aws_actions = [
+ # "organizations:AcceptHandshake",
+ "organizations:AttachPolicy",
+ # "organizations:CancelHandshake",
+ # "organizations:CloseAccount",
+ "organizations:CreateAccount",
+ # "organizations:CreateGovCloudAccount",
+ "organizations:CreateOrganization",
+ "organizations:CreateOrganizationalUnit",
+ "organizations:CreatePolicy",
+ # "organizations:DeclineHandshake",
+ # "organizations:DeleteOrganization",
+ "organizations:DeleteOrganizationalUnit",
+ "organizations:DeletePolicy",
+ "organizations:DeleteResourcePolicy",
+ # "organizations:DeregisterDelegatedAdministrator",
+ "organizations:DescribeAccount",
+ "organizations:DescribeCreateAccountStatus",
+ "organizations:DescribeEffectivePolicy",
+ # "organizations:DescribeHandshake",
+ "organizations:DescribeOrganization",
+ "organizations:DescribeOrganizationalUnit",
+ "organizations:DescribePolicy",
+ "organizations:DescribeResourcePolicy",
+ "organizations:DetachPolicy",
+ # "organizations:DisableAWSServiceAccess",
+ "organizations:DisablePolicyType",
+ # "organizations:EnableAWSServiceAccess",
+ # "organizations:EnableAllFeatures",
+ "organizations:EnablePolicyType",
+ # "organizations:InviteAccountToOrganization",
+ # "organizations:LeaveOrganization",
+ # "organizations:ListAWSServiceAccessForOrganization",
+ "organizations:ListAccounts",
+ "organizations:ListAccountsForParent",
+ "organizations:ListChildren",
+ "organizations:ListCreateAccountStatus",
+ # "organizations:ListDelegatedAdministrators",
+ # "organizations:ListDelegatedServicesForAccount",
+ # "organizations:ListHandshakesForAccount",
+ # "organizations:ListHandshakesForOrganization",
+ "organizations:ListOrganizationalUnitsForParent",
+ "organizations:ListParents",
+ "organizations:ListPolicies",
+ "organizations:ListPoliciesForTarget",
+ "organizations:ListRoots",
+ "organizations:ListTagsForResource",
+ "organizations:ListTargetsForPolicy",
+ "organizations:MoveAccount",
+ "organizations:PutResourcePolicy",
+ # "organizations:RegisterDelegatedAdministrator",
+ # "organizations:RemoveAccountFromOrganization",
+ "organizations:TagResource",
+ "organizations:UntagResource",
+ "organizations:UpdateOrganizationalUnit",
+ "organizations:UpdatePolicy",
+ ]
self.logger.info("Created AWSSync instance.")
def button_pressed(self):
@@ -23,8 +171,42 @@ def button_pressed(self):
:return: True if function executes successfully
"""
self.logger.info("Pressed button")
+ self.logger.debug(f"Pipeline result: {self.pipeline()}")
return True
+ def get_all_mailing_lists(self):
+ """
+ Get all mailing lists from the database.
+
+ :return: List of mailing lists
+ """
+ mailing_lists = MailingList.objects.all()
+ mailing_list_names = [ml.email_address for ml in mailing_lists]
+ return mailing_list_names
+
+ def get_emails_with_teamids(self):
+ """
+ Create a list of SyncData struct containing email, slug and semester.
+
+ Slug and semester combined are together an uniqueness constraint.
+
+ :return: list of SyncData structs with email, slug and semester
+ """
+ email_ids = []
+
+ for project in (
+ Project.objects.filter(mailinglist__isnull=False)
+ .filter(semester=Semester.objects.get_or_create_current_semester())
+ .values("slug", "semester", "mailinglist")
+ ):
+ project_slug = project["slug"]
+ project_semester = str(Semester.objects.get(pk=project["semester"]))
+ project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address
+
+ sync_data = SyncData(project_email, project_slug, project_semester)
+ email_ids.append(sync_data)
+ return email_ids
+
def create_aws_organization(self):
"""Create an AWS organization with the current user as the management account."""
client = boto3.client("organizations")
@@ -37,3 +219,499 @@ def create_aws_organization(self):
self.logger.error("Something went wrong creating an AWS organization.")
self.logger.debug(f"{error}")
self.logger.debug(f"{error.response}")
+
+ def create_course_iteration_OU(self, iteration_name):
+ """
+ Create an OU for the course iteration.
+
+ :param iteration_name: The name of the course iteration OU
+
+ :return: The ID of the OU
+ """
+ client = boto3.client("organizations")
+ if self.org_info is None:
+ self.logger.info("No organization info found. Creating an AWS organization.")
+ self.fail = True
+ else:
+ try:
+ root_id = client.list_roots()["Roots"][0]["Id"]
+ response = client.create_organizational_unit(
+ ParentId=root_id,
+ Name=iteration_name,
+ )
+ self.logger.info(f"Created an OU for course iteration {iteration_name}.")
+ self.iterationOU_info = response["OrganizationalUnit"]
+ return response["OrganizationalUnit"]["Id"]
+ except ClientError as error:
+ self.fail = True
+ self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_name}.")
+ self.logger.debug(f"{error}")
+ self.logger.debug(f"{error.response}")
+
+ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]):
+ """
+ Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS.
+
+ This includes their ID and email address, to be able to put users in the correct AWS organization later.
+ """
+ sync_list = [x for x in giphouse_data if x not in aws_data]
+ return sync_list
+
+ def create_scp_policy(self, policy_name, policy_description, policy_content):
+ """
+ Create an SCP policy.
+
+ :param policy_name: The policy name.
+ :param policy_description: The policy description.
+ :param policy_content: The policy configuration as a dictionary.
+ The policy is automatically converted to JSON format, including escaped quotation marks.
+ :return: Details of newly created policy as a dict on success and NoneType object otherwise.
+ """
+ client = boto3.client("organizations")
+ try:
+ response = client.create_policy(
+ Content=json.dumps(policy_content),
+ Description=policy_description,
+ Name=policy_name,
+ Type="SERVICE_CONTROL_POLICY",
+ )
+ except ClientError as error:
+ self.fail = True
+ self.logger.error("Something went wrong creating an SCP policy.")
+ self.logger.error(error)
+ else:
+ return response["Policy"]
+
+ def attach_scp_policy(self, policy_id, target_id):
+ """
+ Attaches an SCP policy to a target (root, OU, or member account).
+
+ :param policy_id: The ID of the policy to be attached.
+ :param target_id: The ID of the target root, OU, or member account.
+ """
+ client = boto3.client("organizations")
+ try:
+ client.attach_policy(PolicyId=policy_id, TargetId=target_id)
+ except ClientError as error:
+ if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException":
+ self.fail = True
+ self.logger.error("Something went wrong attaching an SCP policy to a target.")
+ self.logger.debug(f"{error}")
+ self.logger.debug(f"{error.response}")
+
+ def check_aws_api_connection(self):
+ """
+ Check whether boto3 can connect to AWS API with current credentials.
+
+ :returns: First tuple element always exists and indicates success.
+ Second tuple element is contains information about the entity
+ who made the successful API call and None otherwise.
+ """
+ client_sts = boto3.client("sts")
+ try:
+ caller_identity_info = client_sts.get_caller_identity()
+ except (NoCredentialsError, ClientError) as error:
+ self.logger.info("Establishing AWS API connection failed.")
+ self.logger.debug(error)
+ return False, None
+ else:
+ self.logger.info("Establishing AWS API connection succeeded.")
+
+ return True, caller_identity_info
+
+ def check_iam_policy(self, iam_user_arn, desired_actions):
+ """
+ Check for the specified IAM user ARN whether the actions in list \
+ desired_actions are allowed according to its IAM policy.
+
+ :param iam_user_arn: ARN of the IAM user being checked.
+ :param iam_actions: List of AWS API actions to check.
+ :returns: True iff all actions in desired_actions are allowed.
+ """
+ client_iam = boto3.client("iam")
+
+ try:
+ response = client_iam.simulate_principal_policy(PolicySourceArn=iam_user_arn, ActionNames=desired_actions)
+ except ClientError as error:
+ self.logger.info("AWS API actions check failed.")
+ self.logger.debug(error)
+ return False
+
+ success = True
+ for evaluation_result in response["EvaluationResults"]:
+ action_name = evaluation_result["EvalActionName"]
+ if evaluation_result["EvalDecision"] != "allowed":
+ self.logger.debug(f"The AWS API action {action_name} is denied for IAM user {iam_user_arn}.")
+ success = False
+
+ if success:
+ self.logger.info("AWS API actions check succeeded.")
+
+ return success
+
+ def check_organization_existence(self):
+ """
+ Check whether an AWS organization exists for the AWS API caller's account.
+
+ :returns: First tuple element always exists and indicates success.
+ Second tuple element is describes properties of the organization and None otherwise.
+ """
+ client_organizations = boto3.client("organizations")
+
+ try:
+ response_org = client_organizations.describe_organization()
+ except ClientError as error:
+ self.logger.info("AWS organization existence check failed.")
+ self.logger.debug(error)
+ return False, None
+ else:
+ self.logger.info("AWS organization existence check succeeded.")
+
+ return True, response_org["Organization"]
+
+ def check_is_management_account(self, api_caller_info, organization_info):
+ """
+ Check whether caller of AWS API has organization's management account ID.
+
+ :returns: True iff the current organization's management account ID equals the AWS API caller's account ID.
+ """
+ management_account_id = organization_info["MasterAccountId"]
+ api_caller_account_id = api_caller_info["Account"]
+ is_management_account = management_account_id == api_caller_account_id
+
+ if is_management_account:
+ self.logger.info("Management account check succeeded.")
+ else:
+ self.logger.info("Management account check failed.")
+ self.logger.debug(f"The organization's management account ID is: '{management_account_id}'.")
+ self.logger.debug(f"The AWS API caller account ID is: '{api_caller_account_id}'.")
+
+ return is_management_account
+
+ def check_scp_enabled(self, organization_info):
+ """
+ Check whether the SCP policy type is an enabled feature for the AWS organization.
+
+ :returns: True iff the SCP policy type feature is enabled for the organization.
+ """
+ scp_is_enabled = False
+ for policy in organization_info["AvailablePolicyTypes"]:
+ if policy["Type"] == "SERVICE_CONTROL_POLICY" and policy["Status"] == "ENABLED":
+ scp_is_enabled = True
+ break
+
+ if not scp_is_enabled:
+ self.logger.info("The SCP policy type is disabled for the organization.")
+ self.logger.debug(organization_info["AvailablePolicyTypes"])
+ else:
+ self.logger.info("Organization SCP policy status check succeeded.")
+
+ return scp_is_enabled
+
+ def pipeline_preconditions(self):
+ """
+ Check all crucial pipeline preconditions.
+
+ 1. Locatable boto3 credentials and successful AWS API connection
+ 2. Check allowed AWS API actions based on IAM policy of caller
+ 3. Existing organization for AWS API caller
+ 4. AWS API caller acts under same account ID as organization's management account ID
+ 5. SCP policy type feature enabled for organization
+
+ :return: True iff all pipeline preconditions are met.
+ """
+ check_api_connection, api_caller_info = self.check_aws_api_connection()
+ if not check_api_connection:
+ return False
+
+ check_api_actions = self.check_iam_policy(api_caller_info["Arn"], self.required_aws_actions)
+ if not check_api_actions:
+ return False
+
+ check_org_existence, organization_info = self.check_organization_existence()
+ self.org_info = organization_info
+ if not check_org_existence:
+ return False
+
+ check_acc_management = self.check_is_management_account(api_caller_info, organization_info)
+ if not check_acc_management:
+ return False
+
+ check_scp_enabled = self.check_scp_enabled(organization_info)
+ if not check_scp_enabled:
+ return False
+
+ return True
+
+ def pipeline_policy(self, ou_id):
+ """
+ Create an SCP policy and attaches it to the organizational unit of the current semester.
+
+ :param ou_id: ID of the organizational unit for the current semester.
+ :return: True iff the policy to be attached to the OU already exists and is successfully attached.
+ """
+ client = boto3.client("organizations")
+ try:
+ client.describe_policy(PolicyId=self.policy_id)
+ except ClientError as error:
+ self.logger.debug(error)
+ return False
+
+ self.attach_scp_policy(self.policy_id, ou_id)
+ if self.fail:
+ return False
+ return True
+
+ def pipeline_create_account(self, sync_data):
+ """
+ Create a single new AWS member account in the organization of the API caller.
+
+ The status of the member account request is repeatedly checked based on the class' attributes:
+ self.ACCOUNT_REQUEST_INTERVAL_SECONDS: thread sleeping time before each status check
+ self.ACCOUNT_REQUEST_MAX_ATTEMPTS: maximum number of times to thread sleep and check
+
+ :param email: The e-mail address of the new member account.
+ :param username: The username of the new member account.
+ :returns: (True, account_id) on success and otherwise (False, failure_reason).
+ """
+ client = boto3.client("organizations")
+
+ # Request new member account.
+ try:
+ response_create = client.create_account(
+ Email=sync_data.project_email,
+ AccountName=sync_data.project_slug,
+ IamUserAccessToBilling="DENY",
+ Tags=[
+ {"Key": "project_slug", "Value": sync_data.project_slug},
+ {"Key": "project_semester", "Value": sync_data.project_semester},
+ ],
+ )
+ except ClientError as error:
+ self.logger.debug(error)
+ return False, "CLIENTERROR_CREATE_ACCOUNT"
+
+ # Repeatedly check status of new member account request.
+ request_id = response_create["CreateAccountStatus"]["Id"]
+ for _ in range(1, self.ACCOUNT_REQUEST_MAX_ATTEMPTS + 1):
+ time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS)
+
+ try:
+ response_status = client.describe_create_account_status(CreateAccountRequestId=request_id)
+ except ClientError as error:
+ self.logger.debug(error)
+ return False, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS"
+
+ request_state = response_status["CreateAccountStatus"]["State"]
+ if request_state == "FAILED":
+ return False, response_status["CreateAccountStatus"]["FailureReason"]
+ elif request_state == "SUCCEEDED":
+ return True, response_status["CreateAccountStatus"]["AccountId"]
+
+ return False, "STILL_IN_PROGRESS"
+
+ def pipeline_create_and_move_accounts(self, new_member_accounts, root_id, destination_ou_id):
+ """
+ Create multiple accounts in the organization of the API caller and move them from the root to a destination OU.
+
+ :param new_member_accounts: List of SyncData objects.
+ :param root_id: The organization's root ID.
+ :param destination_ou_id: The organization's destination OU ID.
+ :returns: True iff **all** new member accounts were created and moved successfully.
+ """
+ client = boto3.client("organizations")
+ overall_success = True
+
+ for new_member in new_member_accounts:
+ success, response = self.pipeline_create_account(new_member)
+ if success:
+ account_id = response
+ try:
+ root_id = client.list_roots()["Roots"][0]["Id"]
+ client.move_account(
+ AccountId=account_id, SourceParentId=root_id, DestinationParentId=destination_ou_id
+ )
+ except ClientError as error:
+ self.logger.debug(error)
+ overall_success = False
+ else:
+ failure_reason = response
+ self.logger.debug(failure_reason)
+ overall_success = False
+
+ return overall_success
+
+ def pipeline_update_current_course_iteration_ou(self, aws_tree):
+ """
+ Update the AWS tree with the new course iteration OU's.
+
+ :param aws_tree: The AWS tree to be checked.
+ :returns: True, iteration_id on success and otherwise False, failure_reason.
+ """
+ is_current_iteration, iteration_ou_id = self.check_current_ou_exists(aws_tree)
+
+ if not is_current_iteration:
+ iteration_name = str(Semester.objects.get_or_create_current_semester())
+ iteration_ou_id = self.create_course_iteration_OU(iteration_name)
+
+ if not self.fail:
+ return True, iteration_ou_id
+ else:
+ return False, "ITERATION_OU_CREATION_FAILED"
+
+ def pipeline(self):
+ """
+ Single pipeline that integrates all buildings blocks for the AWS integration process.
+
+ :return: True iff all pipeline stages successfully executed.
+ """
+ # Check preconditions.
+ if not self.pipeline_preconditions():
+ return False
+
+ # Get synchronization data.
+ client = boto3.client("organizations")
+ try:
+ root_id = client.list_roots()["Roots"][0]["Id"]
+ except ClientError as error:
+ self.logger.debug("Failed to retrieve root ID of organization.")
+ self.logger.debug(error)
+ return False
+
+ aws_tree = self.extract_aws_setup(root_id)
+ if self.fail:
+ self.logger.debug("Extracting AWS setup failed.")
+ return False
+
+ aws_sync_data = aws_tree.awstree_to_syncdata_list()
+ giphouse_sync_data = self.get_emails_with_teamids()
+ merged_sync_data = self.generate_aws_sync_list(giphouse_sync_data, aws_sync_data)
+
+ # Check edge cases.
+ if self.check_for_double_member_email(aws_sync_data, merged_sync_data):
+ return False
+
+ success, incorrect_emails = self.check_members_in_correct_iteration(aws_tree)
+ if not success:
+ self.logger.debug(f"Got incorrectly placed AWS member accounts: {incorrect_emails}.")
+ return False
+
+ failure, double_iteration_names = self.check_double_iteration_names(aws_tree)
+ if failure:
+ self.logger.debug(f"Found double iteration names: {double_iteration_names}.")
+ return False
+
+ # Check/create course iteration OU.
+ current_course_iteration_exists, response = self.pipeline_update_current_course_iteration_ou(aws_tree)
+ if not current_course_iteration_exists:
+ failure_reason = response
+ self.logger.debug(failure_reason)
+ return False
+ course_iteration_ou_id = response
+
+ # Create and attach SCP policy to course iteration OU.
+ if not self.pipeline_policy(course_iteration_ou_id):
+ return False
+
+ # Create new member accounts and move to course iteration OU.
+ if not self.pipeline_create_and_move_accounts(merged_sync_data, root_id, course_iteration_ou_id):
+ return False
+
+ return True
+
+ def check_for_double_member_email(self, aws_list: list[SyncData], sync_list: list[SyncData]):
+ """Check if no users are assigned to multiple projects."""
+ sync_emails = [x.project_email for x in sync_list]
+ aws_emails = [x.project_email for x in aws_list]
+
+ duplicates = [email for email in sync_emails if email in aws_emails]
+
+ for duplicate in duplicates:
+ error = f"Email address {duplicate} is already in the list of members in AWS"
+ self.logger.info("An email clash occured while syncing.")
+ self.logger.debug(error)
+
+ if duplicates != []:
+ return True
+ return False
+
+ def check_current_ou_exists(self, AWSdata: AWSTree):
+ """
+ Check if the the OU (organizational unit) for the current semester already exists in AWS.
+
+ Get data in tree structure (dictionary) defined in the function that retrieves the AWS data
+ """
+ current = str(Semester.objects.get_or_create_current_semester())
+
+ for iteration in AWSdata.iterations:
+ if current == iteration.name:
+ return (True, iteration.ou_id)
+
+ return (False, None)
+
+ def check_members_in_correct_iteration(self, AWSdata: AWSTree):
+ """Check if the data from the member tag matches the semester OU it is in."""
+ incorrect_emails = []
+ for iteration in AWSdata.iterations:
+ for member in iteration.members:
+ if member.project_semester != iteration.name:
+ incorrect_emails.append(member.project_email)
+
+ if incorrect_emails != []:
+ return (False, incorrect_emails)
+
+ return (True, None)
+
+ def check_double_iteration_names(self, AWSdata: AWSTree):
+ """Check if there are multiple OU's with the same name in AWS."""
+ names = [iteration.name for iteration in AWSdata.iterations]
+ doubles = []
+
+ for name in names:
+ if names.count(name) != 1 and name not in doubles:
+ doubles.append(name)
+
+ if doubles != []:
+ return (True, doubles)
+ return (False, None)
+
+ def extract_aws_setup(self, parent_ou_id):
+ """
+ Give a list of all the children of the parent OU.
+
+ :param parent_ou_id: The ID of the parent OU.
+ """
+ client = boto3.client("organizations")
+ try:
+ response = client.list_organizational_units_for_parent(ParentId=parent_ou_id)
+ aws_tree = AWSTree("root", parent_ou_id, [])
+ for iteration in response["OrganizationalUnits"]:
+ ou_id = iteration["Id"]
+ ou_name = iteration["Name"]
+ response = client.list_accounts_for_parent(ParentId=ou_id)
+ children = response["Accounts"]
+ syncData = []
+ for child in children:
+ account_id = child["Id"]
+ account_email = child["Email"]
+ response = client.list_tags_for_resource(ResourceId=account_id)
+ tags = response["Tags"]
+ merged_tags = {d["Key"]: d["Value"] for d in tags}
+ self.logger.debug(merged_tags)
+ if all(key in merged_tags for key in ["project_slug", "project_semester"]):
+ syncData.append(
+ SyncData(account_email, merged_tags["project_slug"], merged_tags["project_semester"])
+ )
+ else:
+ self.logger.error(
+ "Could not find project_slug or project_semester tag for account with ID: " + account_id
+ )
+ self.fail = True
+
+ aws_tree.iterations.append(Iteration(ou_name, ou_id, syncData))
+ return aws_tree
+ except ClientError as error:
+ self.fail = True
+ self.logger.error("Something went wrong extracting the AWS setup.")
+ self.logger.debug(f"{error}")
+ self.logger.debug(f"{error.response}")
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index 6822fc14..d2e7cec7 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -1,26 +1,939 @@
-from unittest.mock import patch
+"""Tests for awssync.py."""
+
+import json
+from unittest.mock import MagicMock, patch
import boto3
+import botocore
from botocore.exceptions import ClientError
from django.test import TestCase
-from moto import mock_organizations
+from moto import mock_organizations, mock_sts
+
+from courses.models import Semester
+
+from mailing_lists.models import MailingList
from projects import awssync
+from projects.models import Project
+
+
+class SyncDataTest(TestCase):
+ """Test SyncData class (struct)."""
+
+ def setUp(self):
+ """setup test environment."""
+ self.sync = awssync.SyncData
+
+ def test_throw_type_error_SyncData_class(self):
+ """Test Type Error when equals is called on wrong type."""
+ with self.assertRaises(TypeError) as context:
+ self.sync("", "", "") == []
+ self.assertTrue("Must compare to object of type SyncData" in str(context.exception))
class AWSSyncTest(TestCase):
"""Test AWSSync class."""
def setUp(self):
+ """Set up testing environment."""
self.sync = awssync.AWSSync()
+ self.semester = Semester.objects.create(year=2023, season=Semester.SPRING)
+ self.mailing_list = MailingList.objects.create(address="test1")
+ self.project = Project.objects.create(id=1, name="test1", semester=self.semester, slug="test1")
+ self.mailing_list.projects.add(self.project)
+ self.mock_org = mock_organizations()
+ self.mock_org.start()
+
+ def tearDown(self):
+ self.mock_org.stop()
+
+ def simulateFailure(self):
+ self.sync.fail = True
def test_button_pressed(self):
+ """Test button_pressed function."""
return_value = self.sync.button_pressed()
self.assertTrue(return_value)
+ def test_create_aws_organization(self):
+ moto_client = boto3.client("organizations")
+ org = self.sync
+ org.create_aws_organization()
+ describe_org = moto_client.describe_organization()["Organization"]
+ self.assertEqual(describe_org, org.org_info)
+
+ def test_create_aws_organization__exception(self):
+ org = self.sync
+ with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
+ org.create_aws_organization()
+ self.assertTrue(org.fail)
+ self.assertIsNone(org.org_info)
+
+ def test_create_course_iteration_OU(self):
+ moto_client = boto3.client("organizations")
+ org = self.sync
+ org.create_aws_organization()
+ org.create_course_iteration_OU("1")
+ describe_unit = moto_client.describe_organizational_unit(OrganizationalUnitId=org.iterationOU_info["Id"])[
+ "OrganizationalUnit"
+ ]
+ self.assertEqual(describe_unit, org.iterationOU_info)
+
+ def test_create_course_iteration_OU_without_organization(self):
+ org = self.sync
+ org.create_course_iteration_OU("1")
+ self.assertTrue(org.fail)
+
+ def test_create_course_iteration_OU__exception(self):
+ org = self.sync
+ org.create_aws_organization()
+ with patch("boto3.client") as mocker:
+ mocker().list_roots.side_effect = ClientError({}, "list_roots")
+ org.create_course_iteration_OU("1")
+ self.assertTrue(org.fail)
+
+ def test_get_all_mailing_lists(self):
+ """Test get_all_mailing_lists function."""
+ mailing_lists = self.sync.get_all_mailing_lists()
+ self.assertIsInstance(mailing_lists, list)
+
+ def test_get_emails_with_teamids_normal(self):
+ """Test get_emails_with_teamids function."""
+ email_id = self.sync.get_emails_with_teamids()
+
+ self.assertIsInstance(email_id, list)
+ self.assertIsInstance(email_id[0], awssync.SyncData)
+ expected_result = [awssync.SyncData("test1@giphouse.nl", "test1", "Spring 2023")]
+ self.assertEqual(email_id, expected_result)
+
+ def test_get_emails_with_teamids_no_project(self):
+ """Test get_emails_with_teamids function."""
+ MailingList.objects.all().delete()
+ self.mailing_list = MailingList.objects.create(address="test2")
+ email_id = self.sync.get_emails_with_teamids()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
+ def test_get_emails_with_teamids_no_mailing_list(self):
+ """Test get_emails_with_teamids function."""
+ MailingList.objects.all().delete()
+ Project.objects.all().delete()
+ email_id = self.sync.get_emails_with_teamids()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
+ def test_get_emails_with_teamids_different_semester(self):
+ """Test get_emails_with_teamids function."""
+ MailingList.objects.all().delete()
+ new_semester = Semester.objects.create(year=2022, season=Semester.FALL)
+ self.mailing_list = MailingList.objects.create(address="test2")
+ self.project = Project.objects.create(id=2, name="test2", semester=new_semester, slug="test2")
+ self.mailing_list.projects.add(self.project)
+ email_id = self.sync.get_emails_with_teamids()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
+ def test_create_scp_policy(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+
+ self.assertFalse(self.sync.fail)
+ self.assertEqual(policy["PolicySummary"]["Name"], policy_name)
+ self.assertEqual(policy["PolicySummary"]["Description"], policy_description)
+ self.assertEqual(policy["Content"], json.dumps(policy_content))
+
+ def test_create_scp_policy__exception(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {
+ "Version": "2012-10-17",
+ "Statement": [{"Effect": "NonExistentEffect", "Action": "*", "Resource": "*"}],
+ }
+ with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+
+ self.assertTrue(self.sync.fail)
+ self.assertIsNone(policy)
+
+ def test_attach_scp_policy(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+
+ policy_id = policy["PolicySummary"]["Id"]
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ self.sync.attach_scp_policy(policy_id, root_id)
+
+ current_scp_policies = moto_client.list_policies_for_target(TargetId=root_id, Filter="SERVICE_CONTROL_POLICY")
+ current_scp_policy_ids = [scp_policy["Id"] for scp_policy in current_scp_policies["Policies"]]
+
+ self.assertIn(policy_id, current_scp_policy_ids)
+ self.assertFalse(self.sync.fail)
+
+ def test_attach_scp_policy__exception(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+
+ policy_id = policy["PolicySummary"]["Id"]
+ root_id = self.sync.org_info["Id"] # Retrieves organization ID, not root ID, resulting in ClientError.
+ self.sync.attach_scp_policy(policy_id, root_id)
+
+ self.assertTrue(self.sync.fail)
+
+ @mock_sts
+ def test_check_aws_api_connection(self):
+ success, caller_identity_info = self.sync.check_aws_api_connection()
+
+ self.assertTrue(success)
+ self.assertIsNotNone(caller_identity_info)
+
+ @mock_sts
+ def test_check_aws_api_connection__exception(self):
+ with patch("boto3.client") as mocker:
+ mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity")
+ mocker.return_value = mocker
+ success, caller_identity_info = self.sync.check_aws_api_connection()
+
+ self.assertFalse(success)
+ self.assertIsNone(caller_identity_info)
+
+ # IAM simulate_principal_policy is not covered by moto.
+ def test_check_iam_policy(self):
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ # success == True
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+ self.assertTrue(success)
+
+ # success == False
+ mock_evaluation_results["EvaluationResults"][0]["EvalDecision"] = "implicitDeny"
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+ self.assertFalse(success)
+
+ def test_check_iam_policy__exception(self):
+ iam_user_arn = "daddy"
+ desired_actions = []
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.side_effect = ClientError({}, "simulate_principal_policy")
+ success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ self.assertFalse(success)
+
+ def test_check_organization_existence(self):
+ moto_client = boto3.client("organizations")
+ organization_create_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ success, organization_describe_info = self.sync.check_organization_existence()
+
+ self.assertTrue(success)
+ self.assertEqual(organization_create_info, organization_describe_info)
+
+ def test_check_organization_existence__exception(self):
+ with patch("boto3.client") as mocker:
+ mocker.describe_organization.side_effect = ClientError({}, "describe_organization")
+ mocker.return_value = mocker
+ success, organization_info = self.sync.check_organization_existence()
+
+ self.assertFalse(success)
+ self.assertIsNone(organization_info)
+
+ @mock_sts
+ def test_check_is_management_account(self):
+ moto_client = boto3.client("organizations")
+
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ _, caller_identity_info = self.sync.check_aws_api_connection()
+ _, organization_info = self.sync.check_organization_existence()
+
+ # is_management_account == True
+ success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info)
+ self.assertTrue(success_acc)
+
+ # is_management_account == False
+ caller_identity_info["Account"] = "daddy"
+ success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info)
+ self.assertFalse(success_acc)
+
+ def test_check_scp_enabled(self):
+ moto_client = boto3.client("organizations")
+
+ # SCP enabled.
+ organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ scp_is_enabled = self.sync.check_scp_enabled(organization_info)
+ self.assertTrue(scp_is_enabled)
+
+ # SCP semi-disabled (pending).
+ organization_info["AvailablePolicyTypes"][0]["Status"] = "PENDING_DISABLE"
+ scp_is_enabled = self.sync.check_scp_enabled(organization_info)
+ self.assertFalse(scp_is_enabled)
+
+ # SCP disabled (empty list).
+ organization_info["AvailablePolicyTypes"] = []
+ scp_is_enabled = self.sync.check_scp_enabled(organization_info)
+ self.assertFalse(scp_is_enabled)
+
+ @mock_sts
+ def test_pipeline_preconditions__all_success(self):
+ # Create organization.
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ mocker.return_value = check_iam_policy
+ success = self.sync.pipeline_preconditions()
+
+ self.assertTrue(success)
+
+ @mock_sts
+ def test_pipeline_preconditions__no_connection(self):
+ with patch("boto3.client") as mocker:
+ mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity")
+ mocker.return_value = mocker
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ def test_pipeline_preconditions__no_iam(self):
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "implicitDeny",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_api_actions = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ mocker.return_value = check_api_actions
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ @mock_sts
+ def test_pipeline_preconditions__no_organization(self):
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ mocker.return_value = check_iam_policy
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ @mock_sts
+ def test_pipeline_preconditions__no_management(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")
+
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam:
+ mocker_iam.return_value = check_iam_policy
+ with patch("projects.awssync.AWSSync.check_aws_api_connection") as mocker_api:
+ mocker_api.return_value = True, {"Account": "daddy", "Arn": "01234567890123456789"}
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ @mock_sts
+ def test_pipeline_preconditions__no_scp(self):
+ moto_client = boto3.client("organizations")
+
+ organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ # Mock return value of simulate_principal_policy.
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ # Mock return value of check_iam_policy.
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam:
+ mocker_iam.return_value = check_iam_policy
+
+ # Mock return value of check_organization_existence with no SCP policy enabled.
+ organization_info["AvailablePolicyTypes"] = []
+ with patch("projects.awssync.AWSSync.check_organization_existence") as mocker:
+ mocker.return_value = True, organization_info
+ success = self.sync.pipeline_preconditions()
+
+ self.assertFalse(success)
+
+ """
+ def test_pipeline_create_scp_policy(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+
+ policy = self.sync.pipeline_create_scp_policy()
+
+ self.assertFalse(self.sync.fail)
+ self.assertEqual(policy["PolicySummary"]["Name"], policy_name)
+ self.assertEqual(policy["PolicySummary"]["Description"], policy_description)
+ self.assertEqual(policy["Content"], json.dumps(policy_content))
+
+ def test_pipeline_create_scp_policy__exception(self):
+ self.sync.create_aws_organization()
+
+ with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
+ policy = self.sync.pipeline_create_scp_policy()
+
+ self.assertTrue(self.sync.fail)
+ self.assertIsNone(policy)
+ """
+
+ def test_pipeline_policy(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+ self.sync.policy_id = policy["PolicySummary"]["Id"]
+
+ ou_id = self.sync.create_course_iteration_OU("Test")
+
+ success = self.sync.pipeline_policy(ou_id)
+ self.assertTrue(success)
+
+ def test_pipeline_policy__exception(self):
+ self.sync.create_aws_organization()
+
+ ou_id = self.sync.create_course_iteration_OU("Test")
+
+ success = self.sync.pipeline_policy(ou_id)
+ self.assertFalse(success)
+
+ def test_pipeline_policy__failure_attach(self):
+ self.sync.create_aws_organization()
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+ self.sync.policy_id = policy["PolicySummary"]["Id"]
+
+ ou_id = self.sync.create_course_iteration_OU("Test")
+
+ self.sync.attach_scp_policy = MagicMock(side_effect=self.simulateFailure())
+
+ success = self.sync.pipeline_policy(ou_id)
+ self.assertFalse(success)
+
+ @mock_sts
+ def test_pipeline(self):
+ moto_client = boto3.client("organizations")
+
+ # pipeline_preconditions() == False
+ success = self.sync.pipeline()
+ self.assertFalse(success)
+
+ # pipeline_preconditions() == True
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
+ self.sync.policy_id = policy["PolicySummary"]["Id"]
+
+ iam_user_arn = "daddy"
+ desired_actions = []
+ mock_evaluation_results = {
+ "EvaluationResults": [
+ {
+ "EvalActionName": "organizations:CreateOrganizationalUnit",
+ "EvalDecision": "allowed",
+ "EvalResourceName": "*",
+ "MissingContextValues": [],
+ }
+ ]
+ }
+
+ with patch("boto3.client") as mocker:
+ mocker().simulate_principal_policy.return_value = mock_evaluation_results
+ check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+
+ with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ mocker.return_value = check_iam_policy
+ success = self.sync.pipeline()
+
+ self.assertTrue(success)
+
+ def test_pipeline__exception_list_roots(self):
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().list_roots.side_effect = ClientError({}, "list_roots")
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__edge_case_double_emails(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ aws_tree = awssync.AWSTree(
+ "Root",
+ "123",
+ [
+ awssync.Iteration(
+ "Spring 2023",
+ "456",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
+ ],
+ )
+ ],
+ )
+
+ gip_teams = [
+ awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
+ awssync.SyncData("email1@example.com", "project2", "Spring 2023"),
+ ]
+
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
+ self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__edge_case_incorrectly_placed(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ aws_tree = awssync.AWSTree(
+ "Root",
+ "123",
+ [
+ awssync.Iteration(
+ "Fall 2023",
+ "456",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
+ ],
+ )
+ ],
+ )
+
+ gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
+
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
+ self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__edge_case_double_iteration_names(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ aws_tree = awssync.AWSTree(
+ "Root",
+ "123",
+ [
+ awssync.Iteration(
+ "Spring 2023", "456", [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
+ ),
+ awssync.Iteration("Spring 2023", "789", []),
+ ],
+ )
+
+ gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
+
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
+ self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__failed_creating_iteration_ou(self):
+ moto_client = boto3.client("organizations")
+ moto_client.create_organization(FeatureSet="ALL")["Organization"]
+
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ with patch("boto3.client") as mocker:
+ mocker().create_organizational_unit.side_effect = ClientError({}, "create_organizational_unit")
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__exception_attaching_policy(self):
+ self.sync.create_aws_organization()
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().attach_policy.side_effect = ClientError(
+ {"Error": {"Code": "PolicyTypeNotEnabledException"}}, "attach_policy"
+ )
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__already_attached_policy(self):
+ self.sync.create_aws_organization()
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().attach_policy.side_effect = ClientError(
+ {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy"
+ )
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__failed_create_and_move_account(self):
+ self.sync.create_aws_organization()
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().move_account.side_effect = ClientError({}, "move_account")
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline__exception_extract_aws_setup(self):
+ self.sync.pipeline_preconditions = MagicMock(return_value=True)
+
+ with patch("boto3.client") as mocker:
+ mocker().list_organizational_units_for_parent.side_effect = ClientError(
+ {}, "list_organizational_units_for_parent"
+ )
+ success = self.sync.pipeline()
+
+ self.assertFalse(success)
+
+ def test_pipeline_update_current_course_iteration_ou___failure_check_current_ou(self):
+
+ self.sync.check_current_ou_exists = MagicMock(return_value=(False, None))
+
+ self.sync.create_aws_organization()
+ success, id = self.sync.pipeline_update_current_course_iteration_ou(None)
+ self.assertTrue(success)
+ self.assertFalse(id is None)
+
+ def test_pipeline_update_current_course_iteration_ou___success(self):
+
+ self.sync.check_current_ou_exists = MagicMock(return_value=(True, "1234"))
+
+ self.sync.create_aws_organization()
+ success, id = self.sync.pipeline_update_current_course_iteration_ou(None)
+ self.assertTrue(success)
+ self.assertEquals(id, "1234")
+
+ def test_pipeline_update_current_course_iteration_ou___failure_create_ou(self):
+
+ self.sync.check_current_ou_exists = MagicMock(return_value=(False, None))
+ self.sync.create_course_iteration_OU = MagicMock(side_effect=self.simulateFailure())
+
+ self.sync.create_aws_organization()
+ success, failure_reason = self.sync.pipeline_update_current_course_iteration_ou(None)
+
+ self.assertFalse(success)
+ self.assertEquals(failure_reason, "ITERATION_OU_CREATION_FAILED")
+ self.assertTrue(self.sync.fail)
+
+ def test_pipeline_create_account(self):
+ self.sync.create_aws_organization()
+
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertTrue(success)
+ self.assertIsNotNone(response)
+
+ def test_pipeline_create_account__exception_create_account(self):
+ self.sync.create_aws_organization()
+
+ with patch("boto3.client") as mocker:
+ mocker().create_account.side_effect = ClientError({}, "create_account")
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertFalse(success)
+ self.assertEquals(response, "CLIENTERROR_CREATE_ACCOUNT")
+
+ def test_pipeline_create_account__exception_describe_account_status(self):
+ self.sync.create_aws_organization()
+
+ with patch("boto3.client") as mocker:
+ mocker().describe_create_account_status.side_effect = ClientError({}, "describe_create_account_status")
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertFalse(success)
+ self.assertEquals(response, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS")
+
+ def test_pipeline_create_account__state_failed(self):
+ self.sync.create_aws_organization()
+
+ with patch("boto3.client") as mocker:
+ response = {"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}}
+ mocker().describe_create_account_status.return_value = response
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertFalse(success)
+ self.assertEquals(response, "EMAIL_ALREADY_EXISTS")
+
+ def test_pipeline_create_account__state_in_progress(self):
+ self.sync.create_aws_organization()
+
+ with patch("boto3.client") as mocker:
+ response = {
+ "CreateAccountStatus": {
+ "State": "IN_PROGRESS",
+ }
+ }
+ mocker().describe_create_account_status.return_value = response
+ success, response = self.sync.pipeline_create_account(
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ )
+
+ self.assertFalse(success)
+ self.assertEquals(response, "STILL_IN_PROGRESS")
+
+ def test_pipeline_create_and_move_accounts(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+
+ new_member_accounts = [
+ awssync.SyncData("alice@example.com", "alice", "Spring 2023"),
+ awssync.SyncData("bob@example.com", "bob", "Spring 2023"),
+ ]
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ course_iteration_id = self.sync.create_course_iteration_OU("Spring 2023")
+
+ success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
+ self.assertTrue(success)
+
+ def test_pipeline_create_and_move_accounts__email_exists(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+
+ new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")]
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ course_iteration_id = self.sync.create_course_iteration_OU("2023Fall")
+
+ with patch("projects.awssync.AWSSync.pipeline_create_account") as mocker:
+ mocker.return_value = False, "EMAIL_ALREADY_EXISTS"
+ success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
+
+ self.assertFalse(success)
+
+ def test_pipeline_create_and_move_accounts__exception_move_account(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+
+ new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")]
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ course_iteration_id = self.sync.create_course_iteration_OU("2023Fall")
+
+ self.sync.pipeline_create_account = MagicMock(return_value=(True, 1234))
+ with patch("boto3.client") as mocker:
+ mocker().move_account.side_effect = ClientError({}, "move_account")
+ success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
+
+ self.assertFalse(success)
+
+ @mock_organizations
+ def test_get_aws_data(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+
+ response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1")
+ OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
+ response_account_1 = moto_client.create_account(
+ Email="account_1@gmail.com",
+ AccountName="account_1",
+ Tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}],
+ )
+ account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
+ moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id)
+
+ aws_tree = self.sync.extract_aws_setup(root_id)
+ iteration_test = awssync.Iteration("OU_1", OU_1_id, [awssync.SyncData("account_1@gmail.com", "test1", "2021")])
+ aws_tree_test = awssync.AWSTree("root", root_id, [iteration_test])
+ self.assertEquals(aws_tree, aws_tree_test)
+
+ @mock_organizations
+ def test_get_aws_data_no_root(self):
+ boto3.client("organizations")
+ self.sync.create_aws_organization()
+ self.sync.extract_aws_setup("NonExistentRootID")
+ self.assertTrue(self.sync.fail)
+
+ @mock_organizations
+ def test_get_aws_data_no_slugs(self):
+ moto_client = boto3.client("organizations")
+ self.sync.create_aws_organization()
+ root_id = moto_client.list_roots()["Roots"][0]["Id"]
+
+ response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1")
+ OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
+ response_account_1 = moto_client.create_account(
+ Email="account_1@gmail.com",
+ AccountName="account_1",
+ Tags=[],
+ )
+ account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
+ moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id)
+ self.sync.extract_aws_setup(root_id)
+ self.assertTrue(self.sync.fail)
+
+
+class AWSSyncListTest(TestCase):
+ """Test AWSSyncList class."""
+
+ def setUp(self):
+ self.sync = awssync.AWSSync()
+ self.syncData = awssync.SyncData
+
+ self.test1 = self.syncData("test1@test1.test1", "test1", "test1")
+ self.test2 = self.syncData("test2@test2.test2", "test2", "test2")
+ self.test3 = self.syncData("test3@test3.test3", "test3", "test3")
+
+ def test_AWS_sync_list_both_empty(self):
+ gip_list = []
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_empty_AWS(self):
+ gip_list = [self.test1, self.test2]
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list)
+
+ def test_AWS_sync_list_empty_GiP(self):
+ gip_list = []
+ aws_list = [self.test1, self.test2]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_both_full(self):
+ gip_list = [self.test1, self.test2]
+ aws_list = [self.test2, self.test3]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1])
+
+
+class AWSAPITalkerTest(TestCase):
def mock_api(self, operation_name, kwarg):
if operation_name == "CreateOrganization":
raise ClientError(
@@ -45,18 +958,214 @@ def mock_api(self, operation_name, kwarg):
},
"create_organization",
)
+ if operation_name == "CreateOrganizationalUnit":
+ raise ClientError(
+ {
+ "Error": {
+ "Message": "The OU already exists.",
+ "Code": "ParentNotFoundException",
+ },
+ "ResponseMetadata": {
+ "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "HTTPStatusCode": 400,
+ "HTTPHeaders": {
+ "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "content-type": "application/x-amz-json-1.1",
+ "content-length": "111",
+ "date": "Sun, 01 Jan 2023 00:00:00 GMT",
+ "connection": "close",
+ },
+ "RetryAttempts": 0,
+ },
+ "Message": "The OU already exists.",
+ },
+ "create_organizational_unit",
+ )
+ if operation_name == "CreatePolicy":
+ raise ClientError(
+ {
+ "Error": {
+ "Message": """The provided policy document does not meet the
+ requirements of the specified policy type.""",
+ "Code": "MalformedPolicyDocumentException",
+ },
+ "ResponseMetadata": {
+ "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "HTTPStatusCode": 400,
+ "HTTPHeaders": {
+ "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
+ "content-type": "application/x-amz-json-1.1",
+ "content-length": "147",
+ "date": "Sun, 01 Jan 2023 00:00:00 GMT",
+ "connection": "close",
+ },
+ "RetryAttempts": 0,
+ },
+ "Message": """The provided policy document does not meet the
+ requirements of the specified policy type.""",
+ },
+ "create_policy",
+ )
+ return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg)
- @mock_organizations
- def test_create_aws_organization(self):
- moto_client = boto3.client("organizations")
- org = self.sync
- org.create_aws_organization()
- describe_org = moto_client.describe_organization()["Organization"]
- self.assertEqual(describe_org, org.org_info)
- @patch("botocore.client.BaseClient._make_api_call", mock_api)
- def test_create_aws_organization__exception(self):
- org = self.sync
- org.create_aws_organization()
- self.assertTrue(org.fail)
- self.assertIsNone(org.org_info)
+class AWSTreeChecksTest(TestCase):
+ """Test checks done on AWSTree data struncture."""
+
+ def setUp(self):
+ self.sync = awssync.AWSSync()
+ self.awstree = awssync.AWSTree("Name", "1234", [])
+ self.iteration = awssync.Iteration("Name", "1234", [])
+ self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020")
+
+ self.sync_list = [
+ awssync.SyncData("email1@example.com", "Spring 2022", "Project A"),
+ awssync.SyncData("email2@example.com", "Fall 2022", "Project B"),
+ awssync.SyncData("email3@example.com", "Spring 2022", "Project C"),
+ ]
+ self.aws_list = [
+ awssync.SyncData("email4@example.com", "Fall 2021", "Project D"),
+ awssync.SyncData("email5@example.com", "Spring 2022", "Project E"),
+ awssync.SyncData("email6@example.com", "Fall 2022", "Project F"),
+ ]
+
+ self.treelist = [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ awssync.SyncData("email3@example.com", "project3", "Spring 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ]
+
+ self.aws_tree1 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Spring 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree2 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Fall 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree3 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Fall 2020",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Fall 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ def test_repr_AWSTree(self):
+ self.assertEquals(str(self.awstree), "AWSTree('Name', '1234', [])")
+
+ def test_repr_Iteration(self):
+ self.assertEquals(str(self.iteration), "Iteration('Name', '1234', [])")
+
+ def test_repr_SyncData(self):
+ self.assertEquals(str(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')")
+
+ def test_awstree_to_syncdata_list(self):
+ self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist)
+
+ def test_check_for_double_member_email(self):
+ # Test when there are no duplicate emails
+ self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
+
+ # Test when there is a duplicate email
+ self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G"))
+ self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
+
+ def test_check_current_ou_exists(self):
+ # Test when current semester OU does not exist
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"):
+ self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022")
+ val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
+ self.assertEqual((val1, val2), (False, None))
+
+ # Test when current semester OU exists
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"):
+ self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021")
+ val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
+ self.assertEqual((val1, val2), (True, "98765"))
+
+ def test_check_members_in_correct_iteration(self):
+ # Test when correct
+ val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1)
+ self.assertEqual((val1, val2), (True, None))
+
+ # Test when incorrect
+ val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2)
+ self.assertEqual((val1, val2), (False, ["email3@example.com"]))
+
+ def test_check_double_iteration_names(self):
+ # Test when correct
+ val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1)
+ self.assertEqual((val1, val2), (False, None))
+
+ # Test when double
+ val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3)
+ self.assertEqual((val1, val2), (True, ["Fall 2020"]))
+
+ def test_AWSTree_equals(self):
+ self.assertEqual(self.aws_tree1, self.aws_tree1)
+ self.assertNotEqual(self.aws_tree1, self.aws_tree2)
+ with self.assertRaises(TypeError):
+ awssync.AWSTree("", "", []) == []
+ self.assertRaises(TypeError)
+
+ def test_Iteration_equals(self):
+ self.assertEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[0])
+ self.assertNotEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[1])
+ with self.assertRaises(TypeError):
+ awssync.Iteration("", "", []) == []
+ self.assertRaises(TypeError)
diff --git a/website/room_reservation/views.py b/website/room_reservation/views.py
index eeef5b36..f617d2ee 100644
--- a/website/room_reservation/views.py
+++ b/website/room_reservation/views.py
@@ -134,7 +134,7 @@ def get_context_data(self, **kwargs):
}
for reservation in Reservation.objects.filter(
start_time__date__gte=timezone.now() - self.time_window_past,
- start_time__date__lte=timezone.now() + self.time_window_future,
+ start_time__date__lt=timezone.now() + self.time_window_future,
)
]
)
From 9f6738fb73e9a996f4f134fe66da8890659a710d Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Tue, 25 Apr 2023 10:23:18 +0000
Subject: [PATCH 18/32] Dedicated module for AWS helper data structures (#47)
* Separate AWS helper data structures into a dedicated module
* Fix asserts for exceptions in tests
* Add missing type hinting
* Replace str calls with repr
---
website/projects/awssync.py | 76 +------
website/projects/awssync_structs.py | 69 ++++++
website/projects/tests/test_awssync.py | 208 -----------------
.../projects/tests/test_awssync_structs.py | 211 ++++++++++++++++++
4 files changed, 281 insertions(+), 283 deletions(-)
create mode 100644 website/projects/awssync_structs.py
create mode 100644 website/projects/tests/test_awssync_structs.py
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 96e71327..95562452 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -1,5 +1,4 @@
"""Framework for synchronisation with Amazon Web Services (AWS)."""
-from __future__ import annotations
import json
import logging
@@ -14,83 +13,10 @@
from mailing_lists.models import MailingList
+from projects.awssync_structs import AWSTree, Iteration, SyncData
from projects.models import Project
-class SyncData:
- """Structure for AWS giphouse sync data."""
-
- def __init__(self, project_email, project_slug, project_semester):
- """Create SyncData instance."""
- self.project_email = project_email
- self.project_slug = project_slug
- self.project_semester = project_semester
-
- def __eq__(self, other):
- """Overload equals for SyncData type."""
- if not isinstance(other, SyncData):
- raise TypeError("Must compare to object of type SyncData")
- return (
- self.project_email == other.project_email
- and self.project_slug == other.project_slug
- and self.project_semester == other.project_semester
- )
-
- def __repr__(self):
- """Overload to string function for SyncData type."""
- return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')"
-
-
-class Iteration:
- """Datatype for AWS data in the Course iteration OU."""
-
- def __init__(self, name, ou_id, members: list[SyncData]):
- """Initialize Iteration object."""
- self.name = name
- self.ou_id = ou_id
- self.members = members
-
- def __repr__(self):
- """Overload to string function for Iteration datatype."""
- return f"Iteration('{self.name}', '{self.ou_id}', {self.members})"
-
- def __eq__(self, other: Iteration) -> bool:
- """Overload equals operator for Iteration objects."""
- if not isinstance(other, Iteration):
- raise TypeError("Must compare to object of type Iteration")
- return self.name == other.name and self.ou_id == other.ou_id and self.members == other.members
-
-
-class AWSTree:
- """Tree structure for AWS data."""
-
- def __init__(self, name, ou_id, iterations: list[Iteration]):
- """Initialize AWSTree object."""
- self.name = name
- self.ou_id = ou_id
- self.iterations = iterations
-
- def __repr__(self):
- """Overload to string function for AWSTree object."""
- return f"AWSTree('{self.name}', '{self.ou_id}', {self.iterations})"
-
- def __eq__(self, other: AWSTree) -> bool:
- """Overload equals operator for AWSTree objects."""
- if not isinstance(other, AWSTree):
- raise TypeError("Must compare to object of type AWSTree")
- return self.name == other.name and self.ou_id == other.ou_id and self.iterations == other.iterations
-
- def awstree_to_syncdata_list(self):
- """Convert AWSTree to list of SyncData elements."""
- awslist = []
-
- for iteration in self.iterations:
- for member in iteration.members:
- awslist.append(member)
-
- return awslist
-
-
class AWSSync:
"""Synchronise with Amazon Web Services."""
diff --git a/website/projects/awssync_structs.py b/website/projects/awssync_structs.py
new file mode 100644
index 00000000..e0d8b934
--- /dev/null
+++ b/website/projects/awssync_structs.py
@@ -0,0 +1,69 @@
+from __future__ import annotations
+
+
+class SyncData:
+ """Structure for AWS giphouse sync data."""
+
+ def __init__(self, project_email: str, project_slug: str, project_semester: str) -> None:
+ """Create SyncData instance."""
+ self.project_email = project_email
+ self.project_slug = project_slug
+ self.project_semester = project_semester
+
+ def __eq__(self, other: SyncData) -> bool:
+ """Overload equals for SyncData type."""
+ if not isinstance(other, SyncData):
+ raise TypeError("Must compare to object of type SyncData")
+ return (
+ self.project_email == other.project_email
+ and self.project_slug == other.project_slug
+ and self.project_semester == other.project_semester
+ )
+
+ def __repr__(self) -> str:
+ """Overload to repr function for SyncData type."""
+ return f"SyncData('{self.project_email}', '{self.project_slug}', '{self.project_semester}')"
+
+
+class Iteration:
+ """Datatype for AWS data in the Course iteration OU."""
+
+ def __init__(self, name: str, ou_id: str, members: list[SyncData]) -> None:
+ """Initialize Iteration object."""
+ self.name = name
+ self.ou_id = ou_id
+ self.members = members
+
+ def __repr__(self) -> str:
+ """Overload to repr function for Iteration datatype."""
+ return f"Iteration('{self.name}', '{self.ou_id}', {self.members})"
+
+ def __eq__(self, other: Iteration) -> bool:
+ """Overload equals operator for Iteration objects."""
+ if not isinstance(other, Iteration):
+ raise TypeError("Must compare to object of type Iteration")
+ return self.name == other.name and self.ou_id == other.ou_id and self.members == other.members
+
+
+class AWSTree:
+ """Tree structure for AWS data."""
+
+ def __init__(self, name: str, ou_id: str, iterations: list[Iteration]) -> None:
+ """Initialize AWSTree object."""
+ self.name = name
+ self.ou_id = ou_id
+ self.iterations = iterations
+
+ def __repr__(self) -> str:
+ """Overload to repr function for AWSTree object."""
+ return f"AWSTree('{self.name}', '{self.ou_id}', {self.iterations})"
+
+ def __eq__(self, other: AWSTree) -> bool:
+ """Overload equals operator for AWSTree objects."""
+ if not isinstance(other, AWSTree):
+ raise TypeError("Must compare to object of type AWSTree")
+ return self.name == other.name and self.ou_id == other.ou_id and self.iterations == other.iterations
+
+ def awstree_to_syncdata_list(self) -> list[SyncData]:
+ """Convert AWSTree to list of SyncData elements."""
+ return [member for iteration in self.iterations for member in iteration.members]
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index d2e7cec7..6c49494f 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -20,20 +20,6 @@
from projects.models import Project
-class SyncDataTest(TestCase):
- """Test SyncData class (struct)."""
-
- def setUp(self):
- """setup test environment."""
- self.sync = awssync.SyncData
-
- def test_throw_type_error_SyncData_class(self):
- """Test Type Error when equals is called on wrong type."""
- with self.assertRaises(TypeError) as context:
- self.sync("", "", "") == []
- self.assertTrue("Must compare to object of type SyncData" in str(context.exception))
-
-
class AWSSyncTest(TestCase):
"""Test AWSSync class."""
@@ -901,38 +887,6 @@ def test_get_aws_data_no_slugs(self):
self.assertTrue(self.sync.fail)
-class AWSSyncListTest(TestCase):
- """Test AWSSyncList class."""
-
- def setUp(self):
- self.sync = awssync.AWSSync()
- self.syncData = awssync.SyncData
-
- self.test1 = self.syncData("test1@test1.test1", "test1", "test1")
- self.test2 = self.syncData("test2@test2.test2", "test2", "test2")
- self.test3 = self.syncData("test3@test3.test3", "test3", "test3")
-
- def test_AWS_sync_list_both_empty(self):
- gip_list = []
- aws_list = []
- self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
-
- def test_AWS_sync_list_empty_AWS(self):
- gip_list = [self.test1, self.test2]
- aws_list = []
- self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list)
-
- def test_AWS_sync_list_empty_GiP(self):
- gip_list = []
- aws_list = [self.test1, self.test2]
- self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
-
- def test_AWS_sync_list_both_full(self):
- gip_list = [self.test1, self.test2]
- aws_list = [self.test2, self.test3]
- self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1])
-
-
class AWSAPITalkerTest(TestCase):
def mock_api(self, operation_name, kwarg):
if operation_name == "CreateOrganization":
@@ -1007,165 +961,3 @@ def mock_api(self, operation_name, kwarg):
"create_policy",
)
return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg)
-
-
-class AWSTreeChecksTest(TestCase):
- """Test checks done on AWSTree data struncture."""
-
- def setUp(self):
- self.sync = awssync.AWSSync()
- self.awstree = awssync.AWSTree("Name", "1234", [])
- self.iteration = awssync.Iteration("Name", "1234", [])
- self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020")
-
- self.sync_list = [
- awssync.SyncData("email1@example.com", "Spring 2022", "Project A"),
- awssync.SyncData("email2@example.com", "Fall 2022", "Project B"),
- awssync.SyncData("email3@example.com", "Spring 2022", "Project C"),
- ]
- self.aws_list = [
- awssync.SyncData("email4@example.com", "Fall 2021", "Project D"),
- awssync.SyncData("email5@example.com", "Spring 2022", "Project E"),
- awssync.SyncData("email6@example.com", "Fall 2022", "Project F"),
- ]
-
- self.treelist = [
- awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
- awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
- awssync.SyncData("email3@example.com", "project3", "Spring 2021"),
- awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
- ]
-
- self.aws_tree1 = awssync.AWSTree(
- "AWS Tree",
- "12345",
- [
- awssync.Iteration(
- "Fall 2020",
- "54321",
- [
- awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
- awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
- ],
- ),
- awssync.Iteration(
- "Spring 2021",
- "98765",
- [
- awssync.SyncData("email3@example.com", "project3", "Spring 2021"),
- awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
- ],
- ),
- ],
- )
-
- self.aws_tree2 = awssync.AWSTree(
- "AWS Tree",
- "12345",
- [
- awssync.Iteration(
- "Fall 2020",
- "54321",
- [
- awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
- awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
- ],
- ),
- awssync.Iteration(
- "Spring 2021",
- "98765",
- [
- awssync.SyncData("email3@example.com", "project3", "Fall 2021"),
- awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
- ],
- ),
- ],
- )
-
- self.aws_tree3 = awssync.AWSTree(
- "AWS Tree",
- "12345",
- [
- awssync.Iteration(
- "Fall 2020",
- "54321",
- [
- awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
- awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
- ],
- ),
- awssync.Iteration(
- "Fall 2020",
- "98765",
- [
- awssync.SyncData("email3@example.com", "project3", "Fall 2021"),
- awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
- ],
- ),
- ],
- )
-
- def test_repr_AWSTree(self):
- self.assertEquals(str(self.awstree), "AWSTree('Name', '1234', [])")
-
- def test_repr_Iteration(self):
- self.assertEquals(str(self.iteration), "Iteration('Name', '1234', [])")
-
- def test_repr_SyncData(self):
- self.assertEquals(str(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')")
-
- def test_awstree_to_syncdata_list(self):
- self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist)
-
- def test_check_for_double_member_email(self):
- # Test when there are no duplicate emails
- self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
-
- # Test when there is a duplicate email
- self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G"))
- self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
-
- def test_check_current_ou_exists(self):
- # Test when current semester OU does not exist
- with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"):
- self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022")
- val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
- self.assertEqual((val1, val2), (False, None))
-
- # Test when current semester OU exists
- with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"):
- self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021")
- val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
- self.assertEqual((val1, val2), (True, "98765"))
-
- def test_check_members_in_correct_iteration(self):
- # Test when correct
- val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1)
- self.assertEqual((val1, val2), (True, None))
-
- # Test when incorrect
- val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2)
- self.assertEqual((val1, val2), (False, ["email3@example.com"]))
-
- def test_check_double_iteration_names(self):
- # Test when correct
- val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1)
- self.assertEqual((val1, val2), (False, None))
-
- # Test when double
- val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3)
- self.assertEqual((val1, val2), (True, ["Fall 2020"]))
-
- def test_AWSTree_equals(self):
- self.assertEqual(self.aws_tree1, self.aws_tree1)
- self.assertNotEqual(self.aws_tree1, self.aws_tree2)
- with self.assertRaises(TypeError):
- awssync.AWSTree("", "", []) == []
- self.assertRaises(TypeError)
-
- def test_Iteration_equals(self):
- self.assertEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[0])
- self.assertNotEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[1])
- with self.assertRaises(TypeError):
- awssync.Iteration("", "", []) == []
- self.assertRaises(TypeError)
diff --git a/website/projects/tests/test_awssync_structs.py b/website/projects/tests/test_awssync_structs.py
new file mode 100644
index 00000000..6f6b4fff
--- /dev/null
+++ b/website/projects/tests/test_awssync_structs.py
@@ -0,0 +1,211 @@
+"""Tests for awssync_structs.py."""
+
+from unittest.mock import patch
+
+from django.test import TestCase
+
+from courses.models import Semester
+
+from projects import awssync
+
+
+class SyncDataTest(TestCase):
+ """Test SyncData class (struct)."""
+
+ def setUp(self):
+ """setup test environment."""
+ self.sync = awssync.SyncData
+
+ def test_throw_type_error_SyncData_class(self):
+ """Test Type Error when equals is called on wrong type."""
+ self.assertRaises(TypeError, self.sync("a", "b", "c").__eq__, 123)
+
+
+class AWSSyncListTest(TestCase):
+ """Test AWSSyncList class."""
+
+ def setUp(self):
+ self.sync = awssync.AWSSync()
+ self.syncData = awssync.SyncData
+
+ self.test1 = self.syncData("test1@test1.test1", "test1", "test1")
+ self.test2 = self.syncData("test2@test2.test2", "test2", "test2")
+ self.test3 = self.syncData("test3@test3.test3", "test3", "test3")
+
+ def test_AWS_sync_list_both_empty(self):
+ gip_list = []
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_empty_AWS(self):
+ gip_list = [self.test1, self.test2]
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list)
+
+ def test_AWS_sync_list_empty_GiP(self):
+ gip_list = []
+ aws_list = [self.test1, self.test2]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_both_full(self):
+ gip_list = [self.test1, self.test2]
+ aws_list = [self.test2, self.test3]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [self.test1])
+
+
+class AWSTreeChecksTest(TestCase):
+ """Test checks done on AWSTree data struncture."""
+
+ def setUp(self):
+ self.sync = awssync.AWSSync()
+ self.awstree = awssync.AWSTree("Name", "1234", [])
+ self.iteration = awssync.Iteration("Name", "1234", [])
+ self.sync_data = awssync.SyncData("email@example.com", "Project X", "Spring 2020")
+
+ self.sync_list = [
+ awssync.SyncData("email1@example.com", "Spring 2022", "Project A"),
+ awssync.SyncData("email2@example.com", "Fall 2022", "Project B"),
+ awssync.SyncData("email3@example.com", "Spring 2022", "Project C"),
+ ]
+ self.aws_list = [
+ awssync.SyncData("email4@example.com", "Fall 2021", "Project D"),
+ awssync.SyncData("email5@example.com", "Spring 2022", "Project E"),
+ awssync.SyncData("email6@example.com", "Fall 2022", "Project F"),
+ ]
+
+ self.treelist = [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ awssync.SyncData("email3@example.com", "project3", "Spring 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ]
+
+ self.aws_tree1 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Spring 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree2 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Fall 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree3 = awssync.AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ awssync.Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ awssync.SyncData("email1@example.com", "project1", "Fall 2020"),
+ awssync.SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ awssync.Iteration(
+ "Fall 2020",
+ "98765",
+ [
+ awssync.SyncData("email3@example.com", "project3", "Fall 2021"),
+ awssync.SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ def test_repr_AWSTree(self):
+ self.assertEquals(repr(self.awstree), "AWSTree('Name', '1234', [])")
+
+ def test_repr_Iteration(self):
+ self.assertEquals(repr(self.iteration), "Iteration('Name', '1234', [])")
+
+ def test_repr_SyncData(self):
+ self.assertEquals(repr(self.sync_data), "SyncData('email@example.com', 'Project X', 'Spring 2020')")
+
+ def test_awstree_to_syncdata_list(self):
+ self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist)
+
+ def test_check_for_double_member_email(self):
+ # Test when there are no duplicate emails
+ self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
+
+ # Test when there is a duplicate email
+ self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G"))
+ self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
+
+ def test_check_current_ou_exists(self):
+ # Test when current semester OU does not exist
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"):
+ self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022")
+ val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
+ self.assertEqual((val1, val2), (False, None))
+
+ # Test when current semester OU exists
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"):
+ self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021")
+ val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
+ self.assertEqual((val1, val2), (True, "98765"))
+
+ def test_check_members_in_correct_iteration(self):
+ # Test when correct
+ val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1)
+ self.assertEqual((val1, val2), (True, None))
+
+ # Test when incorrect
+ val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2)
+ self.assertEqual((val1, val2), (False, ["email3@example.com"]))
+
+ def test_check_double_iteration_names(self):
+ # Test when correct
+ val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1)
+ self.assertEqual((val1, val2), (False, None))
+
+ # Test when double
+ val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3)
+ self.assertEqual((val1, val2), (True, ["Fall 2020"]))
+
+ def test_AWSTree_equals(self):
+ self.assertEqual(self.aws_tree1, self.aws_tree1)
+ self.assertNotEqual(self.aws_tree1, self.aws_tree2)
+ self.assertRaises(TypeError, awssync.AWSTree("", "", []).__eq__, [])
+
+ def test_Iteration_equals(self):
+ self.assertEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[0])
+ self.assertNotEqual(self.aws_tree1.iterations[0], self.aws_tree1.iterations[1])
+ self.assertRaises(TypeError, awssync.Iteration("", "", []).__eq__, [])
From 0acac89f62ad7022dec8b648d6326d10e666c648 Mon Sep 17 00:00:00 2001
From: Henk Berendsen <61596108+hb140502@users.noreply.github.com>
Date: Tue, 9 May 2023 10:17:35 +0200
Subject: [PATCH 19/32] 44 class for handling all aws api calls (#50)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* Updated deliverable sprint 1 (#22)
* AWS synchronisation button (#8)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Removed unnecessary docstrings.
* Add boto3 and moto dependencies (#11)
* Add logger and replace prints with logs
* Add function to create AWS organization
* Add unit tests for creating AWS organization
* bugfix (#619)
Co-authored-by: nvoers
* Added logger setlevel (#20)
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com>
Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com>
Co-authored-by: nvoers
Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com>
* Sprint 2 deliverable (#43)
* AWS synchronisation button (#8)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Removed unnecessary docstrings.
* Add boto3 and moto dependencies (#11)
* Add logger and replace prints with logs
* Add function to create AWS organization
* Add unit tests for creating AWS organization
* bugfix (#619)
Co-authored-by: nvoers
* Added logger setlevel (#20)
* Db sync (#16)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Added get mailinglist to the awssync file
* Added first version of get_all_managers
* Added test case for mailing lists
* Removed some prints
* reformatted using black
* flake8 intentation added
* flake8 intentation correction
* Removed get manager
* Linting added
* unused import removed
* Added get_teamid_from_email
* Changed function email with teamid
* Updated get_emails_with_teamids, working now
* Added test for get_emails_with_ids
* Added linting
* linting
* Added more test
* Linting in awssync and its test file
* Moved the imports around
* moved the imports around
* Black linting
* switched imports around
* Switched imports around part 2
* Switched imports around part 3
* Switched imports around part 4
* Fixed when no project exist for mailing list
* Added some more tests
* Removed exeption try expect
* Black linting
* Changed get_email_with_teamid to new format
* Changed get_emails_with_teamids to go over project
* Added tests for get_emails_with_teamids
* changed info for get_emails_with_teamids
---------
Co-authored-by: Henk
* Db sync (#25)
* Added button to /admin/projects/project page on website
* Framework for AWS synchronisation button functionality
* Linked synchronisation button to framework
* Fixed import order to comply with flake8
* Added docstring to synchronise_to_AWS
* Changed redirect in synchronise_to_AWS function so that it redirects to the page from which the AWS sync button was pressed.
* Added tests and changed awssync.py to allow for testing the button_pressed function.
* Added get mailinglist to the awssync file
* Added first version of get_all_managers
* Added test case for mailing lists
* Removed some prints
* reformatted using black
* flake8 intentation added
* flake8 intentation correction
* Removed get manager
* Linting added
* unused import removed
* Added get_teamid_from_email
* Changed function email with teamid
* Updated get_emails_with_teamids, working now
* Added test for get_emails_with_ids
* Added linting
* linting
* Added more test
* Linting in awssync and its test file
* Moved the imports around
* moved the imports around
* Black linting
* switched imports around
* Switched imports around part 2
* Switched imports around part 3
* Switched imports around part 4
* Fixed when no project exist for mailing list
* Added some more tests
* Removed exeption try expect
* Black linting
* Changed get_email_with_teamid to new format
* Changed get_emails_with_teamids to go over project
* Added tests for get_emails_with_teamids
* changed info for get_emails_with_teamids
* Changed email data dict to struct
* added test for TypeError exception for eq operator
* resolved linting errors
* changed comment to correct datatype
* dramatically improved test class name
---------
Co-authored-by: Henk
Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com>
Co-authored-by: Jer111
* Added function to generate which users have to be invited after the sync button is pressed (#23)
* Added 'generate_aws_sync_list' function and tests
* solved black errors
* changed generate_aws_sync_list to use SyncData structure
* Create and attach SCP policies (#29)
* Add functions for creating and attaching SCP policies
* Improve test cases
* 12 moto helper (#36)
* merged with development and added create_c_i_OU
* Added some tests for create_c_i_OU
* Added some tests for create_c_i_ou
* Linting
* Changed the mock_api call back to orginal
* Added create_team_ou with tests
* Fix problems with moto testing
* Worked on tests and added apitalkerclass
* Make test asserts more meaningful
* black
* Added tests for create_ou's without parts
* Added one test that gets all children under OU
* Fix linting
* Changed return of response
create team ou did not save the name of the team OU
* Fix test create team OU
* Resolved linting issues
* Fix flake8
* remove create_team_ou
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: Fouad Lamsettef
* Add checks for edge cases between AWS and Giphouse databases (#37)
* added double user check (partly)
* added some checks and made two new fancy classes for the storage of AWS tree dictionaries
* added tests
* added equals for AWSTree and Iteration objects
* test stupid error
* does it work now?
* resolved merge conflicts with rebasing on development
* cleaned up code based on pull request comments
* Extraction of AWS data
* Added function for extracting data
* Added mock format
* finished function (testing needed)
* Linting fix
* fix no return of tree
* Fix AttributeError
* Unwrap tuple instead of accessing by element to increase code readability
* Fixed for new struct
* Implementation bug fixes
* added tests (not done)
* Removed classes for merge
* Added function for extracting data
* Added mock format
* finished function (testing needed)
* Linting fix
* fix no return of tree
* Fix AttributeError
* Unwrap tuple instead of accessing by element to increase code readability
* Fixed for new struct
* Implementation bug fixes
* added tests (not done)
* Linting fix
* git fixes
* Black fix
* pydocstyle fix
* Black fix again
* removed flake8 'fix'
* Final flake8 fix
* Final final flake8 fix
* spelling error fix
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
* AWS synchronization pipeline (and integration bug fixes) (#42)
* Create and attach SCP policies (#29)
* Add functions for creating and attaching SCP policies
* Improve test cases
* Add function template
* Fix 'a/an' grammar mistake
* Add pipeline preconditions
* Add pipeline preconditions tests
* Add checks for required API actions
* Add test cases for checking required API actions
* Added implementation of creating and attaching policy in the pipeline
* Remove double API actions
* Added implementation of creating and moving accounts in the pipeline. This should still be divided into smaller functions.
* Increase code readability
* Stop checking account request status after known failure
* Fixed small typos and added account details to a debug message about account creation failure
* Added tests for pipeline policy and fixed typos in debug messages.
* Split creating and moving accounts into multiple functions, and handle exceptions
* added update_course_itation_ou with tests
* updated pipeline_update_current_course_iteration
* Add test cases for creating and moving member accounts
* Bug fixes for pipeline dependencies that arose from integration sprint 2 tasks
* Revised pipeline policy function and corresponding tests so that it should not fail after first pipeline run
* Change duplicate policy attachment to soft-fail; replace organization ID with root ID
---------
Co-authored-by: Henk
Co-authored-by: Jer111
---------
Co-authored-by: Henk Berendsen <61596108+hb140502@users.noreply.github.com>
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com>
Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com>
Co-authored-by: nvoers
Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com>
Co-authored-by: Henk
Co-authored-by: mitchellboes <49476235+mitchellboes@users.noreply.github.com>
Co-authored-by: Jer111
Co-authored-by: Fouad Lamsettef
* Added class for API calls and preliminary version of the tests
* Added final test for API talker class
* Changed folder structure to organize AWS-related files
* Fixed tests which failed due to not being mocked correctly
* Removed aws files which have been moved to different folders
* Fixed bug which caused mocking to not work
* Added version of the api talker test which attempts to fix the error in test_simulate_principal_policy
* Fix mocking issue
* Added three remaining API calls and a test for one of them (list_tags_for_resource). Also added a function in the tests which creates an organization and returns its ID to clean up the other tests a bit.
* Added function and test for list_roots API call
* Fixed review comments
* Fixed formatting issue in test_awssync.py and resolved review comment in test_awsapitalker.py
---------
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
Co-authored-by: BrentHag <123667942+BrentHag@users.noreply.github.com>
Co-authored-by: Nick van Oers <39125833+nvoers@users.noreply.github.com>
Co-authored-by: nvoers
Co-authored-by: Jer111 <82157107+Jer111@users.noreply.github.com>
Co-authored-by: Filip Łysak <92109241+FilipLysak001@users.noreply.github.com>
Co-authored-by: mitchellboes <49476235+mitchellboes@users.noreply.github.com>
Co-authored-by: Jer111
Co-authored-by: Fouad Lamsettef
---
website/projects/aws/__init__.py | 0
website/projects/aws/awsapitalker.py | 162 ++++++++++++++
website/projects/tests/tests_aws/__init__.py | 0
.../tests/tests_aws/test_awsapitalker.py | 197 ++++++++++++++++++
4 files changed, 359 insertions(+)
create mode 100644 website/projects/aws/__init__.py
create mode 100644 website/projects/aws/awsapitalker.py
create mode 100644 website/projects/tests/tests_aws/__init__.py
create mode 100644 website/projects/tests/tests_aws/test_awsapitalker.py
diff --git a/website/projects/aws/__init__.py b/website/projects/aws/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/website/projects/aws/awsapitalker.py b/website/projects/aws/awsapitalker.py
new file mode 100644
index 00000000..308a07da
--- /dev/null
+++ b/website/projects/aws/awsapitalker.py
@@ -0,0 +1,162 @@
+import boto3
+
+import botocore
+
+
+class AWSAPITalker:
+ """Communicate with AWS API using boto3."""
+
+ def __init__(self):
+ """
+ Initialize in order to communicate with the AWS API.
+
+ First, initializes the boto3 clients which communicate with AWS.
+ Second, sets the maximum amount of elements to fit on one page of an AWS response.
+ """
+ self.iam_client = boto3.client("iam")
+ self.org_client = boto3.client("organizations")
+ self.sts_client = boto3.client("sts")
+
+ self.max_results = 20
+
+ def create_organization(self, feature_set: str) -> dict:
+ """
+ Create an AWS organization.
+
+ :param feature_set: enabled features in the organization (either 'ALL' or 'CONSOLIDATED BILLING').
+ :return: dictionary containing information about the organization.
+ """
+ return self.org_client.create_organization(FeatureSet=feature_set)
+
+ def create_organizational_unit(self, parent_id: str, ou_name: str, tags: list[dict] = []) -> dict:
+ """
+ Create an organizational unit.
+
+ :param parent_id: the root/OU below which where the new OU will be created.
+ :param ou_name: the name of the new OU.
+ :param tags: tags (list of dictionaries containing the keys 'Key' and 'Value') to be attached to the account.
+ :return: dictionary containing information about the organizational unit.
+ """
+ return self.org_client.create_organizational_unit(ParentId=parent_id, Name=ou_name, Tags=tags)
+
+ def attach_policy(self, target_id: str, policy_id: str):
+ """
+ Attach the specified policy to the specified target.
+
+ :param target_id: ID of the target to which the policy should be attached.
+ :param policy_id: ID of the policy to attach.
+ """
+ self.org_client.attach_policy(TargetId=target_id, PolicyId=policy_id)
+
+ def get_caller_identity(self) -> dict:
+ """Get the identity of the caller of the API actions."""
+ return self.sts_client.get_caller_identity()
+
+ def simulate_principal_policy(self, policy_source_arn: str, action_names: list[str]) -> dict:
+ """
+ Determine the effective permissions of the policies of an IAM entity by simulating API actions.
+
+ :param policy_source: ARN of the IAM entity.
+ :param action_names: list of AWS API actions to simulate.
+ :return: dictionary containing information about the simulation's outcome.
+ """
+ return self.iam_client.simulate_principal_policy(PolicySourceArn=policy_source_arn, ActionNames=action_names)
+
+ def describe_organization(self) -> dict:
+ """Describe the AWS organization."""
+ return self.org_client.describe_organization()
+
+ def describe_policy(self, policy_id: str) -> dict:
+ """Describe the policy with the specified ID."""
+ return self.org_client.describe_policy(PolicyId=policy_id)
+
+ def create_account(self, email: str, account_name: str, tags: list[dict] = []) -> dict:
+ """
+ Move an AWS account in the organization.
+
+ :param email: email address of the account.
+ :param account_name: name of the account.
+ :param tags: tags (list of dictionaries containing the keys 'Key' and 'Value') to be attached to the account.
+ :return: dictionary containing information about the account creation status.
+ """
+ return self.org_client.create_account(
+ Email=email, AccountName=account_name, IamUserAccessToBilling="DENY", Tags=tags
+ )
+
+ def move_account(self, account_id: str, source_parent_id: str, dest_parent_id: str):
+ """
+ Move an AWS account in the organization.
+
+ :param account_id: ID of the account.
+ :param source_parent_id: ID of the root/OU containing the account.
+ :param dest_parent_id: ID of the root/OU which the account should be moved to.
+ """
+ self.org_client.move_account(
+ AccountId=account_id, SourceParentId=source_parent_id, DestinationParentId=dest_parent_id
+ )
+
+ def combine_pages(self, page_iterator: botocore.paginate.PageIterator, key: str) -> list[dict]:
+ """
+ Combine the information on each page of an AWS API response into a list.
+
+ This function is only used for AWS API operations which can return multiple pages as a response.
+
+ :param page_iterator: boto3 feature which iterates over all pages.
+ :param key: the key corresponding to the list of values to be retrieved from each page.
+ :return: a list that combines the values from all pages.
+ """
+ list = []
+
+ for page in page_iterator:
+ list = list + page[key]
+
+ return list
+
+ def list_organizational_units_for_parent(self, parent_id: str) -> list[dict]:
+ """
+ List all organizational units below the specified parent.
+
+ :param parent_id: ID of the parent.
+ :return: list of dictionaries containing organizational unit information.
+ """
+ paginator = self.org_client.get_paginator("list_organizational_units_for_parent")
+ page_iterator = paginator.paginate(ParentId=parent_id, MaxResults=self.max_results)
+
+ return self.combine_pages(page_iterator, "OrganizationalUnits")
+
+ def list_accounts_for_parent(self, parent_id: str) -> list[dict]:
+ """
+ List all accounts below the specified parent.
+
+ :param parent_id: ID of the parent.
+ :return: list of dictionaries containing account information
+ """
+ paginator = self.org_client.get_paginator("list_accounts_for_parent")
+ page_iterator = paginator.paginate(ParentId=parent_id, MaxResults=self.max_results)
+
+ return self.combine_pages(page_iterator, "Accounts")
+
+ def list_tags_for_resource(self, resource_id: str) -> list[dict]:
+ """
+ List all tags belonging to the specified resource.
+
+ :param resource_id: ID of the resource.
+ :return: list of dictionaries containing tag information
+ """
+ paginator = self.org_client.get_paginator("list_tags_for_resource")
+ page_iterator = paginator.paginate(
+ ResourceId=resource_id,
+ )
+
+ return self.combine_pages(page_iterator, "Tags")
+
+ def list_roots(self) -> list[dict]:
+ """
+ List all roots in the organization.
+
+ :return: list of dictionaries containing root information.
+ """
+ paginator = self.org_client.get_paginator("list_roots")
+ page_iterator = paginator.paginate()
+
+ return self.combine_pages(page_iterator, "Roots")
diff --git a/website/projects/tests/tests_aws/__init__.py b/website/projects/tests/tests_aws/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py
new file mode 100644
index 00000000..5d6a1c9e
--- /dev/null
+++ b/website/projects/tests/tests_aws/test_awsapitalker.py
@@ -0,0 +1,197 @@
+import json
+from unittest.mock import MagicMock, patch
+
+import boto3
+
+from django.test import TestCase
+
+from moto import mock_organizations, mock_sts
+
+from projects.aws import awsapitalker
+
+
+class AWSAPITalkerTest(TestCase):
+ """Test AWSAPITalker class."""
+
+ def setUp(self):
+ """Set up testing environment."""
+ self.mock_org = mock_organizations()
+ self.mock_sts = mock_sts()
+ self.mock_org.start()
+ self.mock_sts.start()
+ self.api_talker = awsapitalker.AWSAPITalker()
+
+ def tearDown(self):
+ self.mock_org.stop()
+ self.mock_sts.stop()
+
+ def create_organization(self):
+ """Returns the ID of the organization created for testing"""
+ org_info = self.api_talker.create_organization("ALL")
+ return org_info["Organization"]["Id"]
+
+ def create_dummy_policy_content(self):
+ """Returns a string containing the content of a policy used for testing."""
+ return json.dumps({"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]})
+
+ def create_dummy_policy(self):
+ """
+ Creates a policy used for testing.
+
+ :return: ID of the created policy.
+ """
+ moto_client = boto3.client("organizations")
+
+ policy_content = self.create_dummy_policy_content()
+
+ return moto_client.create_policy(
+ Name="Test policy",
+ Content=policy_content,
+ Type="SERVICE_CONTROL_POLICY",
+ Description="Policy for testing purposes",
+ )["Policy"]["PolicySummary"]["Id"]
+
+ def test_create_organization(self):
+ response = self.api_talker.create_organization("ALL")
+
+ self.assertEquals(response["Organization"]["FeatureSet"], "ALL")
+
+ def test_create_organizational_unit(self):
+ org_id = self.create_organization()
+
+ response = self.api_talker.create_organizational_unit(org_id, "Test OU")
+
+ self.assertEqual(response["OrganizationalUnit"]["Name"], "Test OU")
+
+ def test_attach_policy(self):
+ moto_client = boto3.client("organizations")
+
+ org_id = self.create_organization()
+
+ policy_id = self.create_dummy_policy()
+
+ ou_info = self.api_talker.create_organizational_unit(org_id, "Test OU")
+ ou_id = ou_info["OrganizationalUnit"]["Id"]
+
+ self.api_talker.attach_policy(ou_id, policy_id)
+
+ response = moto_client.list_policies_for_target(TargetId=ou_id, Filter="SERVICE_CONTROL_POLICY")
+ self.assertIn(policy_id, [p["Id"] for p in response["Policies"]])
+
+ def test_get_caller_identity(self):
+ response = self.api_talker.get_caller_identity()
+ self.assertIsNotNone(response)
+
+ def test_simulate_principal_policy(self):
+ arn = self.api_talker.get_caller_identity()["Arn"]
+
+ with patch.object(
+ self.api_talker.iam_client,
+ "simulate_principal_policy",
+ MagicMock(return_value={"EvaluationResults": [{"EvalDecision": "allowed"}]}),
+ ):
+ eval_results = self.api_talker.simulate_principal_policy(arn, ["sts:SimulatePrincipalPolicy"])[
+ "EvaluationResults"
+ ]
+
+ self.assertEquals(eval_results[0]["EvalDecision"], "allowed")
+
+ def test_describe_organization(self):
+ self.create_organization()
+
+ response = self.api_talker.describe_organization()
+
+ self.assertIn("Organization", response)
+ self.assertIn("MasterAccountId", response["Organization"])
+ self.assertIn("MasterAccountEmail", response["Organization"])
+
+ def test_describe_policy(self):
+ self.create_organization()
+
+ policy_id = self.create_dummy_policy()
+
+ policy = self.api_talker.describe_policy(policy_id)["Policy"]
+ policy_summary = policy["PolicySummary"]
+ policy_content = self.create_dummy_policy_content()
+
+ self.assertEquals(policy_summary["Name"], "Test policy")
+ self.assertEquals(policy_summary["Description"], "Policy for testing purposes")
+ self.assertEquals(policy_content, policy["Content"])
+
+ def test_create_account(self):
+ moto_client = boto3.client("organizations")
+
+ self.create_organization()
+
+ response = self.api_talker.create_account("test@example.com", "Test")
+
+ accounts = moto_client.list_accounts()["Accounts"]
+
+ self.assertEquals(response["CreateAccountStatus"]["AccountName"], "Test")
+ self.assertIn(("Test", "test@example.com"), [(account["Name"], account["Email"]) for account in accounts])
+
+ def test_move_account(self):
+ moto_client = boto3.client("organizations")
+
+ org_id = self.create_organization()
+
+ account_status = self.api_talker.create_account("test@example.com", "Test")
+ account_id = account_status["CreateAccountStatus"]["AccountId"]
+
+ source_ou_info = self.api_talker.create_organizational_unit(org_id, "Source OU")
+ source_ou_id = source_ou_info["OrganizationalUnit"]["Id"]
+ dest_ou_info = self.api_talker.create_organizational_unit(org_id, "Destination OU")
+ dest_ou_id = dest_ou_info["OrganizationalUnit"]["Id"]
+
+ self.api_talker.move_account(account_id, source_ou_id, dest_ou_id)
+
+ accounts_under_source = moto_client.list_children(ParentId=source_ou_id, ChildType="ACCOUNT")["Children"]
+ accounts_under_dest = moto_client.list_children(ParentId=dest_ou_id, ChildType="ACCOUNT")["Children"]
+ self.assertNotIn(account_id, [account["Id"] for account in accounts_under_source])
+ self.assertIn(account_id, [account["Id"] for account in accounts_under_dest])
+
+ def test_list_organizational_units_for_parent(self):
+ self.create_organization()
+
+ root_id = self.api_talker.list_roots()[0]["Id"]
+
+ ou_1 = self.api_talker.create_organizational_unit(root_id, "Test OU 1")["OrganizationalUnit"]
+ ou_2 = self.api_talker.create_organizational_unit(root_id, "Test OU 2")["OrganizationalUnit"]
+
+ received_ou_list = self.api_talker.list_organizational_units_for_parent(root_id)
+
+ self.assertCountEqual([ou_1, ou_2], received_ou_list)
+
+ def test_list_accounts_for_parent(self):
+ self.create_organization()
+
+ self.api_talker.create_account("test1@example.com", "Test Account 1")
+ self.api_talker.create_account("test2@example.com", "Test Account 2")
+
+ root_id = self.api_talker.list_roots()[0]["Id"]
+
+ received_accounts = self.api_talker.list_accounts_for_parent(root_id)
+ received_emails = [account["Email"] for account in received_accounts]
+
+ expected_emails = ["master@example.com", "test1@example.com", "test2@example.com"]
+
+ self.assertEqual(expected_emails, received_emails)
+
+ def test_list_tags_for_resource(self):
+ org_id = self.create_organization()
+
+ specified_tags = [{"Key": "key1", "Value": "val1"}, {"Key": "key2", "Value": "val2"}]
+
+ response = self.api_talker.create_organizational_unit(org_id, "Test OU", specified_tags)
+ ou_id = response["OrganizationalUnit"]["Id"]
+
+ received_tags = self.api_talker.list_tags_for_resource(ou_id)
+
+ self.assertEqual(specified_tags, received_tags)
+
+ def test_list_roots(self):
+ self.create_organization()
+
+ roots = self.api_talker.list_roots()
+
+ self.assertTrue(len(roots) == 1)
From d87c28855530f2026fa369f4291861cfafd9b3b9 Mon Sep 17 00:00:00 2001
From: mitchellboes <49476235+mitchellboes@users.noreply.github.com>
Date: Tue, 9 May 2023 11:39:53 +0200
Subject: [PATCH 20/32] Merge #41 (check double iteration names, members in
correct iteration) into development
* refactored only the necessary checks to other files, as well as their tests.
* made changes based on pull request
---
website/projects/awssync.py | 40 +-------
website/projects/awssync_checks.py | 32 +++++++
website/projects/tests/test_awssync.py | 8 +-
website/projects/tests/test_awssync_checks.py | 93 +++++++++++++++++++
.../projects/tests/test_awssync_structs.py | 18 ----
5 files changed, 132 insertions(+), 59 deletions(-)
create mode 100644 website/projects/awssync_checks.py
create mode 100644 website/projects/tests/test_awssync_checks.py
diff --git a/website/projects/awssync.py b/website/projects/awssync.py
index 95562452..7221cf02 100644
--- a/website/projects/awssync.py
+++ b/website/projects/awssync.py
@@ -1,4 +1,5 @@
"""Framework for synchronisation with Amazon Web Services (AWS)."""
+from __future__ import annotations
import json
import logging
@@ -13,6 +14,7 @@
from mailing_lists.models import MailingList
+from projects.awssync_checks import Checks
from projects.awssync_structs import AWSTree, Iteration, SyncData
from projects.models import Project
@@ -517,15 +519,9 @@ def pipeline(self):
if self.check_for_double_member_email(aws_sync_data, merged_sync_data):
return False
- success, incorrect_emails = self.check_members_in_correct_iteration(aws_tree)
- if not success:
- self.logger.debug(f"Got incorrectly placed AWS member accounts: {incorrect_emails}.")
- return False
-
- failure, double_iteration_names = self.check_double_iteration_names(aws_tree)
- if failure:
- self.logger.debug(f"Found double iteration names: {double_iteration_names}.")
- return False
+ checker = Checks()
+ checker.check_members_in_correct_iteration(aws_tree)
+ checker.check_double_iteration_names(aws_tree)
# Check/create course iteration OU.
current_course_iteration_exists, response = self.pipeline_update_current_course_iteration_ou(aws_tree)
@@ -575,32 +571,6 @@ def check_current_ou_exists(self, AWSdata: AWSTree):
return (False, None)
- def check_members_in_correct_iteration(self, AWSdata: AWSTree):
- """Check if the data from the member tag matches the semester OU it is in."""
- incorrect_emails = []
- for iteration in AWSdata.iterations:
- for member in iteration.members:
- if member.project_semester != iteration.name:
- incorrect_emails.append(member.project_email)
-
- if incorrect_emails != []:
- return (False, incorrect_emails)
-
- return (True, None)
-
- def check_double_iteration_names(self, AWSdata: AWSTree):
- """Check if there are multiple OU's with the same name in AWS."""
- names = [iteration.name for iteration in AWSdata.iterations]
- doubles = []
-
- for name in names:
- if names.count(name) != 1 and name not in doubles:
- doubles.append(name)
-
- if doubles != []:
- return (True, doubles)
- return (False, None)
-
def extract_aws_setup(self, parent_ou_id):
"""
Give a list of all the children of the parent OU.
diff --git a/website/projects/awssync_checks.py b/website/projects/awssync_checks.py
new file mode 100644
index 00000000..7c28f02d
--- /dev/null
+++ b/website/projects/awssync_checks.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from projects.awssync_structs import AWSTree
+
+
+class Checks:
+ """Class for pipeline checks."""
+
+ def check_members_in_correct_iteration(self, AWSdata: AWSTree) -> None:
+ """Check if the data from the member tag matches the semester OU it is in."""
+ emails_inconsistent_accounts = [
+ member.project_email
+ for iteration in AWSdata.iterations
+ for member in iteration.members
+ if member.project_semester != iteration.name
+ ]
+
+ if emails_inconsistent_accounts:
+ raise Exception(
+ f"There are members in a course iteration OU with an inconsistent course iteration tag.\
+ Inconsistent names are {emails_inconsistent_accounts}"
+ )
+
+ def check_double_iteration_names(self, AWSdata: AWSTree) -> None:
+ """Check if there are multiple OU's with the same name in AWS."""
+ names = [iteration.name for iteration in AWSdata.iterations]
+ duplicates = [iteration_name for iteration_name in set(names) if names.count(iteration_name) > 1]
+
+ if duplicates:
+ raise Exception(
+ f"There are multiple course iteration OUs with the same name. Duplicates are: {duplicates}"
+ )
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/test_awssync.py
index 6c49494f..0f493b77 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/test_awssync.py
@@ -618,9 +618,7 @@ def test_pipeline__edge_case_incorrectly_placed(self):
self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
- success = self.sync.pipeline()
-
- self.assertFalse(success)
+ self.assertRaises(Exception, self.sync.pipeline)
def test_pipeline__edge_case_double_iteration_names(self):
moto_client = boto3.client("organizations")
@@ -643,9 +641,7 @@ def test_pipeline__edge_case_double_iteration_names(self):
self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
- success = self.sync.pipeline()
-
- self.assertFalse(success)
+ self.assertRaises(Exception, self.sync.pipeline)
def test_pipeline__failed_creating_iteration_ou(self):
moto_client = boto3.client("organizations")
diff --git a/website/projects/tests/test_awssync_checks.py b/website/projects/tests/test_awssync_checks.py
new file mode 100644
index 00000000..6e40fc70
--- /dev/null
+++ b/website/projects/tests/test_awssync_checks.py
@@ -0,0 +1,93 @@
+"""Tests for awssync/checks.py."""
+
+from django.test import TestCase
+
+from projects.awssync_checks import Checks
+from projects.awssync_structs import AWSTree, Iteration, SyncData
+
+
+class ChecksTest(TestCase):
+ def setUp(self):
+ self.checks = Checks()
+ self.aws_tree1 = AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ SyncData("email1@example.com", "project1", "Fall 2020"),
+ SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ SyncData("email3@example.com", "project3", "Spring 2021"),
+ SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree2 = AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ SyncData("email1@example.com", "project1", "Fall 2020"),
+ SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ SyncData("email3@example.com", "project3", "Fall 2021"),
+ SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree3 = AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ SyncData("email1@example.com", "project1", "Fall 2020"),
+ SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ Iteration(
+ "Fall 2020",
+ "98765",
+ [
+ SyncData("email3@example.com", "project3", "Fall 2021"),
+ SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ def test_check_members_in_correct_iteration(self):
+ # Test when correct
+ self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1))
+
+ # Test when incorrect
+ self.assertRaises(Exception, self.checks.check_members_in_correct_iteration, self.aws_tree2)
+
+ def test_check_double_iteration_names(self):
+ # Test when correct
+ self.assertIsNone(self.checks.check_double_iteration_names(self.aws_tree1))
+
+ # Test when double
+ self.assertRaises(Exception, self.checks.check_double_iteration_names, self.aws_tree3)
diff --git a/website/projects/tests/test_awssync_structs.py b/website/projects/tests/test_awssync_structs.py
index 6f6b4fff..8b27840a 100644
--- a/website/projects/tests/test_awssync_structs.py
+++ b/website/projects/tests/test_awssync_structs.py
@@ -182,24 +182,6 @@ def test_check_current_ou_exists(self):
val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
self.assertEqual((val1, val2), (True, "98765"))
- def test_check_members_in_correct_iteration(self):
- # Test when correct
- val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree1)
- self.assertEqual((val1, val2), (True, None))
-
- # Test when incorrect
- val1, val2 = self.sync.check_members_in_correct_iteration(self.aws_tree2)
- self.assertEqual((val1, val2), (False, ["email3@example.com"]))
-
- def test_check_double_iteration_names(self):
- # Test when correct
- val1, val2 = self.sync.check_double_iteration_names(self.aws_tree1)
- self.assertEqual((val1, val2), (False, None))
-
- # Test when double
- val1, val2 = self.sync.check_double_iteration_names(self.aws_tree3)
- self.assertEqual((val1, val2), (True, ["Fall 2020"]))
-
def test_AWSTree_equals(self):
self.assertEqual(self.aws_tree1, self.aws_tree1)
self.assertNotEqual(self.aws_tree1, self.aws_tree2)
From a4dfc69fa6400bf12c6d6bc037a5bfdba055274e Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Wed, 10 May 2023 12:18:51 +0000
Subject: [PATCH 21/32] Refactor pipeline preconditions (#54)
* Move all AWS files to dedicated AWS directory
* Add refactored pipeline preconditions
* Add coverage unit tests
---
website/projects/admin.py | 2 +-
website/projects/{ => aws}/awssync.py | 4 +-
website/projects/aws/awssync_checks.py | 110 +++++++++++
.../aws/awssync_checks_permissions.py | 57 ++++++
website/projects/{ => aws}/awssync_structs.py | 0
website/projects/awssync_checks.py | 32 ---
website/projects/tests/test_awssync_checks.py | 93 ---------
.../tests/{ => tests_aws}/test_awssync.py | 20 +-
.../tests/tests_aws/test_awssync_checks.py | 183 ++++++++++++++++++
.../{ => tests_aws}/test_awssync_structs.py | 2 +-
10 files changed, 364 insertions(+), 139 deletions(-)
rename website/projects/{ => aws}/awssync.py (99%)
create mode 100644 website/projects/aws/awssync_checks.py
create mode 100644 website/projects/aws/awssync_checks_permissions.py
rename website/projects/{ => aws}/awssync_structs.py (100%)
delete mode 100644 website/projects/awssync_checks.py
delete mode 100644 website/projects/tests/test_awssync_checks.py
rename website/projects/tests/{ => tests_aws}/test_awssync.py (98%)
create mode 100644 website/projects/tests/tests_aws/test_awssync_checks.py
rename website/projects/tests/{ => tests_aws}/test_awssync_structs.py (99%)
diff --git a/website/projects/admin.py b/website/projects/admin.py
index 7fae8a6d..1cc2e032 100644
--- a/website/projects/admin.py
+++ b/website/projects/admin.py
@@ -12,7 +12,7 @@
from mailing_lists.models import MailingList
-from projects.awssync import AWSSync
+from projects.aws.awssync import AWSSync
from projects.forms import ProjectAdminForm, RepositoryInlineForm
from projects.githubsync import GitHubSync
from projects.models import Client, Project, Repository
diff --git a/website/projects/awssync.py b/website/projects/aws/awssync.py
similarity index 99%
rename from website/projects/awssync.py
rename to website/projects/aws/awssync.py
index 7221cf02..45a2a301 100644
--- a/website/projects/awssync.py
+++ b/website/projects/aws/awssync.py
@@ -14,8 +14,8 @@
from mailing_lists.models import MailingList
-from projects.awssync_checks import Checks
-from projects.awssync_structs import AWSTree, Iteration, SyncData
+from projects.aws.awssync_checks import Checks
+from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
from projects.models import Project
diff --git a/website/projects/aws/awssync_checks.py b/website/projects/aws/awssync_checks.py
new file mode 100644
index 00000000..3c7c0e4b
--- /dev/null
+++ b/website/projects/aws/awssync_checks.py
@@ -0,0 +1,110 @@
+from __future__ import annotations
+
+import logging
+
+from projects.aws.awsapitalker import AWSAPITalker
+from projects.aws.awssync_structs import AWSTree
+
+
+class Checks:
+ """Class for pipeline checks."""
+
+ def __init__(self):
+ """Initialize an instance with an AWSAPITalker and a logger."""
+ self.api_talker = AWSAPITalker()
+ self.logger = logging.getLogger("django.aws")
+
+ def check_members_in_correct_iteration(self, AWSdata: AWSTree) -> None:
+ """Check if the data from the member tag matches the semester OU it is in."""
+ emails_inconsistent_accounts = [
+ member.project_email
+ for iteration in AWSdata.iterations
+ for member in iteration.members
+ if member.project_semester != iteration.name
+ ]
+
+ if emails_inconsistent_accounts:
+ raise Exception(
+ f"There are members in a course iteration OU with an inconsistent course iteration tag.\
+ Inconsistent names are {emails_inconsistent_accounts}"
+ )
+
+ def check_double_iteration_names(self, AWSdata: AWSTree) -> None:
+ """Check if there are multiple OU's with the same name in AWS."""
+ names = [iteration.name for iteration in AWSdata.iterations]
+ duplicates = [iteration_name for iteration_name in set(names) if names.count(iteration_name) > 1]
+
+ if duplicates:
+ raise Exception(
+ f"There are multiple course iteration OUs with the same name. Duplicates are: {duplicates}"
+ )
+
+ def check_aws_api_connection(self) -> None:
+ """Check AWS API connection establishment with current boto3 credentials."""
+ self.api_talker.get_caller_identity()
+
+ def check_iam_policy(self, desired_actions: list[str]) -> None:
+ """Check permissions for list of AWS API actions."""
+ iam_user_arn = self.api_talker.get_caller_identity()["Arn"]
+ policy_evaluations = self.api_talker.simulate_principal_policy(iam_user_arn, desired_actions)
+
+ denied_api_actions = [
+ evaluation_result["EvalActionName"]
+ for evaluation_result in policy_evaluations["EvaluationResults"]
+ if evaluation_result["EvalDecision"] != "allowed"
+ ]
+
+ if denied_api_actions:
+ raise Exception(f"Some AWS API actions have been denied: {denied_api_actions}.")
+
+ def check_organization_existence(self) -> None:
+ """Check existence AWS organization."""
+ self.api_talker.describe_organization()
+
+ def check_is_management_account(self) -> None:
+ """Check if AWS API caller has same effective account ID as the organization's management account."""
+ organization_info = self.api_talker.describe_organization()
+ iam_user_info = self.api_talker.get_caller_identity()
+
+ management_account_id = organization_info["Organization"]["MasterAccountId"]
+ api_caller_account_id = iam_user_info["Account"]
+ is_management_account = management_account_id == api_caller_account_id
+
+ if not is_management_account:
+ raise Exception("AWS API caller and organization's management account have different account IDs.")
+
+ def check_scp_enabled(self) -> None:
+ """Check if SCP policy type feature is enabled for the AWS organization."""
+ organization_info = self.api_talker.describe_organization()
+ available_policy_types = organization_info["Organization"]["AvailablePolicyTypes"]
+
+ scp_is_enabled = any(
+ policy["Type"] == "SERVICE_CONTROL_POLICY" and policy["Status"] == "ENABLED"
+ for policy in available_policy_types
+ )
+
+ if not scp_is_enabled:
+ raise Exception("The SCP policy type is disabled for the organization.")
+
+ def pipeline_preconditions(self, api_permissions: list[str]) -> None:
+ """
+ Check all crucial pipeline preconditions. Raises exception prematurely on failure.
+
+ Preconditions:
+ 1. Locatable boto3 credentials and successful AWS API connection
+ 2. Check allowed AWS API actions based on IAM policy of caller
+ 3. Existing organization for AWS API caller
+ 4. AWS API caller acts under same account ID as organization's management account ID
+ 5. SCP policy type feature enabled for organization
+ """
+ preconditions = [
+ (self.check_aws_api_connection, (), "AWS API connection established"),
+ (self.check_iam_policy, (api_permissions,), "AWS API actions permissions"),
+ (self.check_organization_existence, (), "AWS organization existence"),
+ (self.check_is_management_account, (), "AWS API caller is management account"),
+ (self.check_scp_enabled, (), "SCP enabled"),
+ ]
+
+ for precondition, args, description in preconditions:
+ precondition(*args)
+ self.logger.info(f"Pipeline precondition success: {description}.")
diff --git a/website/projects/aws/awssync_checks_permissions.py b/website/projects/aws/awssync_checks_permissions.py
new file mode 100644
index 00000000..6028a04a
--- /dev/null
+++ b/website/projects/aws/awssync_checks_permissions.py
@@ -0,0 +1,57 @@
+api_permissions = [
+ # "organizations:AcceptHandshake",
+ "organizations:AttachPolicy",
+ # "organizations:CancelHandshake",
+ # "organizations:CloseAccount",
+ "organizations:CreateAccount",
+ # "organizations:CreateGovCloudAccount",
+ "organizations:CreateOrganization",
+ "organizations:CreateOrganizationalUnit",
+ "organizations:CreatePolicy",
+ # "organizations:DeclineHandshake",
+ # "organizations:DeleteOrganization",
+ "organizations:DeleteOrganizationalUnit",
+ "organizations:DeletePolicy",
+ "organizations:DeleteResourcePolicy",
+ # "organizations:DeregisterDelegatedAdministrator",
+ "organizations:DescribeAccount",
+ "organizations:DescribeCreateAccountStatus",
+ "organizations:DescribeEffectivePolicy",
+ # "organizations:DescribeHandshake",
+ "organizations:DescribeOrganization",
+ "organizations:DescribeOrganizationalUnit",
+ "organizations:DescribePolicy",
+ "organizations:DescribeResourcePolicy",
+ "organizations:DetachPolicy",
+ # "organizations:DisableAWSServiceAccess",
+ "organizations:DisablePolicyType",
+ # "organizations:EnableAWSServiceAccess",
+ # "organizations:EnableAllFeatures",
+ "organizations:EnablePolicyType",
+ # "organizations:InviteAccountToOrganization",
+ # "organizations:LeaveOrganization",
+ # "organizations:ListAWSServiceAccessForOrganization",
+ "organizations:ListAccounts",
+ "organizations:ListAccountsForParent",
+ "organizations:ListChildren",
+ "organizations:ListCreateAccountStatus",
+ # "organizations:ListDelegatedAdministrators",
+ # "organizations:ListDelegatedServicesForAccount",
+ # "organizations:ListHandshakesForAccount",
+ # "organizations:ListHandshakesForOrganization",
+ "organizations:ListOrganizationalUnitsForParent",
+ "organizations:ListParents",
+ "organizations:ListPolicies",
+ "organizations:ListPoliciesForTarget",
+ "organizations:ListRoots",
+ "organizations:ListTagsForResource",
+ "organizations:ListTargetsForPolicy",
+ "organizations:MoveAccount",
+ "organizations:PutResourcePolicy",
+ # "organizations:RegisterDelegatedAdministrator",
+ # "organizations:RemoveAccountFromOrganization",
+ "organizations:TagResource",
+ "organizations:UntagResource",
+ "organizations:UpdateOrganizationalUnit",
+ "organizations:UpdatePolicy",
+]
diff --git a/website/projects/awssync_structs.py b/website/projects/aws/awssync_structs.py
similarity index 100%
rename from website/projects/awssync_structs.py
rename to website/projects/aws/awssync_structs.py
diff --git a/website/projects/awssync_checks.py b/website/projects/awssync_checks.py
deleted file mode 100644
index 7c28f02d..00000000
--- a/website/projects/awssync_checks.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from __future__ import annotations
-
-from projects.awssync_structs import AWSTree
-
-
-class Checks:
- """Class for pipeline checks."""
-
- def check_members_in_correct_iteration(self, AWSdata: AWSTree) -> None:
- """Check if the data from the member tag matches the semester OU it is in."""
- emails_inconsistent_accounts = [
- member.project_email
- for iteration in AWSdata.iterations
- for member in iteration.members
- if member.project_semester != iteration.name
- ]
-
- if emails_inconsistent_accounts:
- raise Exception(
- f"There are members in a course iteration OU with an inconsistent course iteration tag.\
- Inconsistent names are {emails_inconsistent_accounts}"
- )
-
- def check_double_iteration_names(self, AWSdata: AWSTree) -> None:
- """Check if there are multiple OU's with the same name in AWS."""
- names = [iteration.name for iteration in AWSdata.iterations]
- duplicates = [iteration_name for iteration_name in set(names) if names.count(iteration_name) > 1]
-
- if duplicates:
- raise Exception(
- f"There are multiple course iteration OUs with the same name. Duplicates are: {duplicates}"
- )
diff --git a/website/projects/tests/test_awssync_checks.py b/website/projects/tests/test_awssync_checks.py
deleted file mode 100644
index 6e40fc70..00000000
--- a/website/projects/tests/test_awssync_checks.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""Tests for awssync/checks.py."""
-
-from django.test import TestCase
-
-from projects.awssync_checks import Checks
-from projects.awssync_structs import AWSTree, Iteration, SyncData
-
-
-class ChecksTest(TestCase):
- def setUp(self):
- self.checks = Checks()
- self.aws_tree1 = AWSTree(
- "AWS Tree",
- "12345",
- [
- Iteration(
- "Fall 2020",
- "54321",
- [
- SyncData("email1@example.com", "project1", "Fall 2020"),
- SyncData("email2@example.com", "project2", "Fall 2020"),
- ],
- ),
- Iteration(
- "Spring 2021",
- "98765",
- [
- SyncData("email3@example.com", "project3", "Spring 2021"),
- SyncData("email4@example.com", "project4", "Spring 2021"),
- ],
- ),
- ],
- )
-
- self.aws_tree2 = AWSTree(
- "AWS Tree",
- "12345",
- [
- Iteration(
- "Fall 2020",
- "54321",
- [
- SyncData("email1@example.com", "project1", "Fall 2020"),
- SyncData("email2@example.com", "project2", "Fall 2020"),
- ],
- ),
- Iteration(
- "Spring 2021",
- "98765",
- [
- SyncData("email3@example.com", "project3", "Fall 2021"),
- SyncData("email4@example.com", "project4", "Spring 2021"),
- ],
- ),
- ],
- )
-
- self.aws_tree3 = AWSTree(
- "AWS Tree",
- "12345",
- [
- Iteration(
- "Fall 2020",
- "54321",
- [
- SyncData("email1@example.com", "project1", "Fall 2020"),
- SyncData("email2@example.com", "project2", "Fall 2020"),
- ],
- ),
- Iteration(
- "Fall 2020",
- "98765",
- [
- SyncData("email3@example.com", "project3", "Fall 2021"),
- SyncData("email4@example.com", "project4", "Spring 2021"),
- ],
- ),
- ],
- )
-
- def test_check_members_in_correct_iteration(self):
- # Test when correct
- self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1))
-
- # Test when incorrect
- self.assertRaises(Exception, self.checks.check_members_in_correct_iteration, self.aws_tree2)
-
- def test_check_double_iteration_names(self):
- # Test when correct
- self.assertIsNone(self.checks.check_double_iteration_names(self.aws_tree1))
-
- # Test when double
- self.assertRaises(Exception, self.checks.check_double_iteration_names, self.aws_tree3)
diff --git a/website/projects/tests/test_awssync.py b/website/projects/tests/tests_aws/test_awssync.py
similarity index 98%
rename from website/projects/tests/test_awssync.py
rename to website/projects/tests/tests_aws/test_awssync.py
index 0f493b77..64f6938f 100644
--- a/website/projects/tests/test_awssync.py
+++ b/website/projects/tests/tests_aws/test_awssync.py
@@ -16,7 +16,7 @@
from mailing_lists.models import MailingList
-from projects import awssync
+from projects.aws import awssync
from projects.models import Project
@@ -315,7 +315,7 @@ def test_pipeline_preconditions__all_success(self):
check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
# Mock return value of check_iam_policy.
- with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker:
mocker.return_value = check_iam_policy
success = self.sync.pipeline_preconditions()
@@ -350,7 +350,7 @@ def test_pipeline_preconditions__no_iam(self):
check_api_actions = self.sync.check_iam_policy(iam_user_arn, desired_actions)
# Mock return value of check_iam_policy.
- with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker:
mocker.return_value = check_api_actions
success = self.sync.pipeline_preconditions()
@@ -377,7 +377,7 @@ def test_pipeline_preconditions__no_organization(self):
check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
# Mock return value of check_iam_policy.
- with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker:
mocker.return_value = check_iam_policy
success = self.sync.pipeline_preconditions()
@@ -407,9 +407,9 @@ def test_pipeline_preconditions__no_management(self):
check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
# Mock return value of check_iam_policy.
- with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam:
+ with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker_iam:
mocker_iam.return_value = check_iam_policy
- with patch("projects.awssync.AWSSync.check_aws_api_connection") as mocker_api:
+ with patch("projects.aws.awssync.AWSSync.check_aws_api_connection") as mocker_api:
mocker_api.return_value = True, {"Account": "daddy", "Arn": "01234567890123456789"}
success = self.sync.pipeline_preconditions()
@@ -440,12 +440,12 @@ def test_pipeline_preconditions__no_scp(self):
check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
# Mock return value of check_iam_policy.
- with patch("projects.awssync.AWSSync.check_iam_policy") as mocker_iam:
+ with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker_iam:
mocker_iam.return_value = check_iam_policy
# Mock return value of check_organization_existence with no SCP policy enabled.
organization_info["AvailablePolicyTypes"] = []
- with patch("projects.awssync.AWSSync.check_organization_existence") as mocker:
+ with patch("projects.aws.awssync.AWSSync.check_organization_existence") as mocker:
mocker.return_value = True, organization_info
success = self.sync.pipeline_preconditions()
@@ -548,7 +548,7 @@ def test_pipeline(self):
mocker().simulate_principal_policy.return_value = mock_evaluation_results
check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
- with patch("projects.awssync.AWSSync.check_iam_policy") as mocker:
+ with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker:
mocker.return_value = check_iam_policy
success = self.sync.pipeline()
@@ -815,7 +815,7 @@ def test_pipeline_create_and_move_accounts__email_exists(self):
root_id = moto_client.list_roots()["Roots"][0]["Id"]
course_iteration_id = self.sync.create_course_iteration_OU("2023Fall")
- with patch("projects.awssync.AWSSync.pipeline_create_account") as mocker:
+ with patch("projects.aws.awssync.AWSSync.pipeline_create_account") as mocker:
mocker.return_value = False, "EMAIL_ALREADY_EXISTS"
success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
diff --git a/website/projects/tests/tests_aws/test_awssync_checks.py b/website/projects/tests/tests_aws/test_awssync_checks.py
new file mode 100644
index 00000000..6a141759
--- /dev/null
+++ b/website/projects/tests/tests_aws/test_awssync_checks.py
@@ -0,0 +1,183 @@
+"""Tests for awssync/checks.py."""
+from unittest.mock import MagicMock
+
+from botocore.exceptions import ClientError
+
+from django.test import TestCase
+
+from moto import mock_iam, mock_organizations, mock_sts
+
+from projects.aws.awssync_checks import Checks
+from projects.aws.awssync_checks_permissions import api_permissions
+from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
+
+
+@mock_sts
+@mock_organizations
+@mock_iam
+class ChecksTest(TestCase):
+ def setUp(self):
+ self.checks = Checks()
+ self.aws_tree1 = AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ SyncData("email1@example.com", "project1", "Fall 2020"),
+ SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ SyncData("email3@example.com", "project3", "Spring 2021"),
+ SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree2 = AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ SyncData("email1@example.com", "project1", "Fall 2020"),
+ SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ Iteration(
+ "Spring 2021",
+ "98765",
+ [
+ SyncData("email3@example.com", "project3", "Fall 2021"),
+ SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ self.aws_tree3 = AWSTree(
+ "AWS Tree",
+ "12345",
+ [
+ Iteration(
+ "Fall 2020",
+ "54321",
+ [
+ SyncData("email1@example.com", "project1", "Fall 2020"),
+ SyncData("email2@example.com", "project2", "Fall 2020"),
+ ],
+ ),
+ Iteration(
+ "Fall 2020",
+ "98765",
+ [
+ SyncData("email3@example.com", "project3", "Fall 2021"),
+ SyncData("email4@example.com", "project4", "Spring 2021"),
+ ],
+ ),
+ ],
+ )
+
+ def test_check_members_in_correct_iteration(self):
+ # Test when correct
+ self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1))
+
+ # Test when incorrect
+ self.assertRaises(Exception, self.checks.check_members_in_correct_iteration, self.aws_tree2)
+
+ def test_check_double_iteration_names(self):
+ # Test when correct
+ self.assertIsNone(self.checks.check_double_iteration_names(self.aws_tree1))
+
+ # Test when double
+ self.assertRaises(Exception, self.checks.check_double_iteration_names, self.aws_tree3)
+
+ def mock_simulate_principal_policy(self, allow: bool, api_operations: list[str]):
+ return MagicMock(
+ return_value={
+ "EvaluationResults": [
+ {"EvalActionName": api_operation_name, "EvalDecision": "allowed" if allow else "implicitDeny"}
+ for api_operation_name in api_operations
+ ]
+ }
+ )
+
+ def test_check_aws_api_connection(self):
+ self.checks.check_aws_api_connection()
+
+ def test_check_iam_policy(self):
+ self.checks.api_talker.iam_client.simulate_principal_policy = self.mock_simulate_principal_policy(
+ True, api_permissions
+ )
+ self.checks.check_iam_policy(api_permissions)
+
+ def test_check_iam_policy__exception(self):
+ self.checks.api_talker.iam_client.simulate_principal_policy = self.mock_simulate_principal_policy(
+ False, api_permissions
+ )
+ self.assertRaises(Exception, self.checks.check_iam_policy, api_permissions)
+
+ def test_check_organization_existence(self):
+ self.checks.api_talker.create_organization("ALL")
+ self.checks.check_organization_existence()
+
+ def test_check_organization_existence__exception(self):
+ self.assertRaises(ClientError, self.checks.check_organization_existence)
+
+ def test_check_is_management_account(self):
+ self.checks.api_talker.create_organization("ALL")
+ self.checks.check_is_management_account()
+
+ def test_check_is_management_account__exception(self):
+ self.checks.api_talker.create_organization("ALL")
+
+ mock_identity = self.checks.api_talker.sts_client.get_caller_identity()
+ mock_identity["Account"] = "alice123"
+ self.checks.api_talker.sts_client.get_caller_identity = MagicMock(return_value=mock_identity)
+
+ self.assertRaises(Exception, self.checks.check_is_management_account)
+
+ def test_check_scp_enabled(self):
+ self.checks.api_talker.create_organization("ALL")
+
+ self.checks.api_talker.org_client.enable_policy_type(
+ RootId=self.checks.api_talker.list_roots()[0]["Id"],
+ PolicyType="SERVICE_CONTROL_POLICY",
+ )
+
+ self.checks.check_scp_enabled()
+
+ def test_check_scp_enabled__exception(self):
+ self.checks.api_talker.create_organization("ALL")
+
+ args = {
+ "RootId": self.checks.api_talker.list_roots()[0]["Id"],
+ "PolicyType": "SERVICE_CONTROL_POLICY",
+ }
+
+ self.checks.api_talker.org_client.enable_policy_type(**args)
+ response = self.checks.api_talker.org_client.disable_policy_type(**args)
+
+ mock_describe_organization = self.checks.api_talker.describe_organization()
+ mock_describe_organization["Organization"]["AvailablePolicyTypes"] = response["Root"]["PolicyTypes"]
+ self.checks.api_talker.org_client.describe_organization = MagicMock(return_value=mock_describe_organization)
+
+ self.assertRaises(Exception, self.checks.check_scp_enabled)
+
+ def test_pipeline_preconditions(self):
+ self.checks.api_talker.create_organization("ALL")
+
+ self.checks.api_talker.iam_client.simulate_principal_policy = self.mock_simulate_principal_policy(
+ True, api_permissions
+ )
+
+ self.checks.pipeline_preconditions(api_permissions)
diff --git a/website/projects/tests/test_awssync_structs.py b/website/projects/tests/tests_aws/test_awssync_structs.py
similarity index 99%
rename from website/projects/tests/test_awssync_structs.py
rename to website/projects/tests/tests_aws/test_awssync_structs.py
index 8b27840a..3ecb722c 100644
--- a/website/projects/tests/test_awssync_structs.py
+++ b/website/projects/tests/tests_aws/test_awssync_structs.py
@@ -6,7 +6,7 @@
from courses.models import Semester
-from projects import awssync
+from projects.aws import awssync
class SyncDataTest(TestCase):
From f9187c7567fda2ec117aadebd2540085cf8651ea Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Thu, 11 May 2023 10:40:51 +0000
Subject: [PATCH 22/32] Refactor creating course OU and attaching policy (#57)
* Add new refactored AWSSync class
* Refactored creating course OU and attaching policy
* Add coverage unit tests
* Add additional OU name check to unit test
* Replace deprecated unittest method alias
* Improve naming coverage unit tests
* Fix test to check name and ID for single OU instead of over possibly multiple OUs
* Remove not used logger
* Rename function to be more accurate in what it does
---
website/projects/aws/awssync_refactored.py | 36 ++++++++
.../tests_aws/test_awssync_refactored.py | 84 +++++++++++++++++++
2 files changed, 120 insertions(+)
create mode 100644 website/projects/aws/awssync_refactored.py
create mode 100644 website/projects/tests/tests_aws/test_awssync_refactored.py
diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py
new file mode 100644
index 00000000..2f843eff
--- /dev/null
+++ b/website/projects/aws/awssync_refactored.py
@@ -0,0 +1,36 @@
+from __future__ import annotations
+
+from botocore.exceptions import ClientError
+
+from courses.models import Semester
+
+from projects.aws.awsapitalker import AWSAPITalker
+from projects.aws.awssync_structs import AWSTree
+
+
+class AWSSyncRefactored:
+ """Synchronise with Amazon Web Services."""
+
+ def __init__(self):
+ """Create an AWSSync instance."""
+ self.api_talker = AWSAPITalker()
+
+ def get_or_create_course_ou(self, tree: AWSTree) -> str:
+ """Create organizational unit under root with name of current semester."""
+ root_id = tree.ou_id
+ course_ou_name = str(Semester.objects.get_or_create_current_semester())
+ course_ou_id = next((ou.ou_id for ou in tree.iterations if ou.name == course_ou_name), None)
+
+ if not course_ou_id:
+ course_ou = self.api_talker.create_organizational_unit(root_id, course_ou_name)
+ course_ou_id = course_ou["OrganizationalUnit"]["Id"]
+
+ return course_ou_id
+
+ def attach_policy(self, target_id: str, policy_id: str) -> None:
+ """Attach policy to target resource."""
+ try:
+ self.api_talker.attach_policy(target_id, policy_id)
+ except ClientError as error:
+ if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException":
+ raise
diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py
new file mode 100644
index 00000000..abf026b0
--- /dev/null
+++ b/website/projects/tests/tests_aws/test_awssync_refactored.py
@@ -0,0 +1,84 @@
+"""Tests for awssync_refactored.py."""
+import json
+from unittest.mock import patch
+
+from botocore.exceptions import ClientError
+
+from django.test import TestCase
+
+from moto import mock_organizations
+
+from courses.models import Semester
+
+from projects.aws.awssync_refactored import AWSSyncRefactored
+from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
+
+
+@mock_organizations
+class AWSSyncRefactoredTest(TestCase):
+ def setUp(self):
+ self.sync = AWSSyncRefactored()
+
+ def test_get_or_create_course_ou__new(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+ tree = AWSTree("root", root_id, [])
+ current_semester_name = "Spring 2023"
+
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value=current_semester_name):
+ course_ou_id = self.sync.get_or_create_course_ou(tree)
+
+ course_ou_exists = any(
+ ou["Id"] == course_ou_id and ou["Name"] == current_semester_name
+ for ou in self.sync.api_talker.list_organizational_units_for_parent(root_id)
+ )
+
+ self.assertTrue(course_ou_exists)
+
+ def test_get_or_create_course_ou__already_exists(self):
+ tree = AWSTree(
+ "root",
+ "r-123",
+ [
+ Iteration("Spring 2023", "ou-456", [SyncData("alice@giphouse.nl", "alices-project", "Spring 2023")]),
+ Iteration("Fall 2023", "ou-789", [SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023")]),
+ ],
+ )
+
+ with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
+ course_ou_id = self.sync.get_or_create_course_ou(tree)
+ self.assertEqual("ou-456", course_ou_id)
+
+ def test_attach_policy__not_attached(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+
+ new_policy_content = json.dumps(
+ {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ )
+ new_policy_id = self.sync.api_talker.org_client.create_policy(
+ Content=new_policy_content, Description="Deny all access.", Name="DenyAll", Type="SERVICE_CONTROL_POLICY"
+ )["Policy"]["PolicySummary"]["Id"]
+
+ self.sync.attach_policy(root_id, new_policy_id)
+ attached_policies = self.sync.api_talker.org_client.list_policies_for_target(
+ TargetId=root_id, Filter="SERVICE_CONTROL_POLICY"
+ )["Policies"]
+ attached_policy_ids = [policy["Id"] for policy in attached_policies]
+
+ self.assertIn(new_policy_id, attached_policy_ids)
+
+ def test_attach_policy__caught_exception(self):
+ # Error code "DuplicatePolicyAttachmentException" can not be simulated by moto, so it is mocked.
+ attach_policy_hard_side_effect = ClientError(
+ {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy"
+ )
+ with patch.object(
+ self.sync.api_talker.org_client, "attach_policy", side_effect=attach_policy_hard_side_effect
+ ):
+ return_value = self.sync.attach_policy("r-123", "p-123")
+
+ self.assertIsNone(return_value)
+
+ def test_attach_policy__reraised_exception(self):
+ self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123")
From 32d823012a09023c8864f846e0d611e9f293ac91 Mon Sep 17 00:00:00 2001
From: Henk Berendsen <61596108+hb140502@users.noreply.github.com>
Date: Fri, 12 May 2023 13:25:46 +0200
Subject: [PATCH 23/32] Remove unnecessary moto_client variables in
test_awsapitalker.py (#56)
* Changed moto_client into self.api_talker.org_client
* Use class decorators for mocking instead of start and stop functions
---
.../tests/tests_aws/test_awsapitalker.py | 34 ++++++-------------
1 file changed, 11 insertions(+), 23 deletions(-)
diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py
index 5d6a1c9e..9cf3301e 100644
--- a/website/projects/tests/tests_aws/test_awsapitalker.py
+++ b/website/projects/tests/tests_aws/test_awsapitalker.py
@@ -1,8 +1,6 @@
import json
from unittest.mock import MagicMock, patch
-import boto3
-
from django.test import TestCase
from moto import mock_organizations, mock_sts
@@ -10,21 +8,15 @@
from projects.aws import awsapitalker
+@mock_organizations
+@mock_sts
class AWSAPITalkerTest(TestCase):
"""Test AWSAPITalker class."""
def setUp(self):
"""Set up testing environment."""
- self.mock_org = mock_organizations()
- self.mock_sts = mock_sts()
- self.mock_org.start()
- self.mock_sts.start()
self.api_talker = awsapitalker.AWSAPITalker()
- def tearDown(self):
- self.mock_org.stop()
- self.mock_sts.stop()
-
def create_organization(self):
"""Returns the ID of the organization created for testing"""
org_info = self.api_talker.create_organization("ALL")
@@ -40,11 +32,9 @@ def create_dummy_policy(self):
:return: ID of the created policy.
"""
- moto_client = boto3.client("organizations")
-
policy_content = self.create_dummy_policy_content()
- return moto_client.create_policy(
+ return self.api_talker.org_client.create_policy(
Name="Test policy",
Content=policy_content,
Type="SERVICE_CONTROL_POLICY",
@@ -64,8 +54,6 @@ def test_create_organizational_unit(self):
self.assertEqual(response["OrganizationalUnit"]["Name"], "Test OU")
def test_attach_policy(self):
- moto_client = boto3.client("organizations")
-
org_id = self.create_organization()
policy_id = self.create_dummy_policy()
@@ -75,7 +63,7 @@ def test_attach_policy(self):
self.api_talker.attach_policy(ou_id, policy_id)
- response = moto_client.list_policies_for_target(TargetId=ou_id, Filter="SERVICE_CONTROL_POLICY")
+ response = self.api_talker.org_client.list_policies_for_target(TargetId=ou_id, Filter="SERVICE_CONTROL_POLICY")
self.assertIn(policy_id, [p["Id"] for p in response["Policies"]])
def test_get_caller_identity(self):
@@ -119,20 +107,16 @@ def test_describe_policy(self):
self.assertEquals(policy_content, policy["Content"])
def test_create_account(self):
- moto_client = boto3.client("organizations")
-
self.create_organization()
response = self.api_talker.create_account("test@example.com", "Test")
- accounts = moto_client.list_accounts()["Accounts"]
+ accounts = self.api_talker.org_client.list_accounts()["Accounts"]
self.assertEquals(response["CreateAccountStatus"]["AccountName"], "Test")
self.assertIn(("Test", "test@example.com"), [(account["Name"], account["Email"]) for account in accounts])
def test_move_account(self):
- moto_client = boto3.client("organizations")
-
org_id = self.create_organization()
account_status = self.api_talker.create_account("test@example.com", "Test")
@@ -145,8 +129,12 @@ def test_move_account(self):
self.api_talker.move_account(account_id, source_ou_id, dest_ou_id)
- accounts_under_source = moto_client.list_children(ParentId=source_ou_id, ChildType="ACCOUNT")["Children"]
- accounts_under_dest = moto_client.list_children(ParentId=dest_ou_id, ChildType="ACCOUNT")["Children"]
+ accounts_under_source = self.api_talker.org_client.list_children(ParentId=source_ou_id, ChildType="ACCOUNT")[
+ "Children"
+ ]
+ accounts_under_dest = self.api_talker.org_client.list_children(ParentId=dest_ou_id, ChildType="ACCOUNT")[
+ "Children"
+ ]
self.assertNotIn(account_id, [account["Id"] for account in accounts_under_source])
self.assertIn(account_id, [account["Id"] for account in accounts_under_dest])
From 54e11dab072ae31c29ccaa147478b980aa135dd5 Mon Sep 17 00:00:00 2001
From: Jer111 <82157107+Jer111@users.noreply.github.com>
Date: Tue, 16 May 2023 10:01:55 +0200
Subject: [PATCH 24/32] Refactor generate synchronization list and extract AWS
tree
* refactored files
* removed create_aws_org from refactored
* flake8
* Correct refactored functions
* Changed extract_aws_setup to be more elegant
* rm list_all_mailing_lists and update extract_aws
* Changed extract_aws
* black
* Changed extract_aws to get_values
* pydocstring
* pydocstring
* Black
---
website/projects/aws/awssync_refactored.py | 84 +++++++++++-
.../tests_aws/test_awssync_refactored.py | 129 ++++++++++++++++++
2 files changed, 212 insertions(+), 1 deletion(-)
diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py
index 2f843eff..91358645 100644
--- a/website/projects/aws/awssync_refactored.py
+++ b/website/projects/aws/awssync_refactored.py
@@ -1,11 +1,16 @@
from __future__ import annotations
+import logging
+
from botocore.exceptions import ClientError
from courses.models import Semester
+from mailing_lists.models import MailingList
+
from projects.aws.awsapitalker import AWSAPITalker
-from projects.aws.awssync_structs import AWSTree
+from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
+from projects.models import Project
class AWSSyncRefactored:
@@ -14,6 +19,83 @@ class AWSSyncRefactored:
def __init__(self):
"""Create an AWSSync instance."""
self.api_talker = AWSAPITalker()
+ self.logger = logging.getLogger("django.aws")
+ self.logger.setLevel(logging.DEBUG)
+ self.fail = False
+
+ def get_syncdata_from_giphouse(self) -> list[SyncData]:
+ """
+ Create a list of SyncData struct containing email, slug and semester.
+
+ Slug and semester combined are together an uniqueness constraint.
+
+ :return: list of SyncData structs with email, slug and semester
+ """
+ sync_data_list = []
+ current_semester = Semester.objects.get_or_create_current_semester()
+
+ for project in Project.objects.filter(mailinglist__isnull=False, semester=current_semester).values(
+ "slug", "semester", "mailinglist"
+ ):
+ project_slug = project["slug"]
+ project_semester = str(Semester.objects.get(pk=project["semester"]))
+ project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address
+
+ sync_data = SyncData(project_email, project_slug, project_semester)
+ sync_data_list.append(sync_data)
+ return sync_data_list
+
+ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]) -> list[SyncData]:
+ """
+ Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS.
+
+ This includes their ID and email address, to be able to put users in the correct AWS organization later.
+ """
+ return [project for project in giphouse_data if project not in aws_data]
+
+ def get_tag_value(self, tags: list[dict[str, str]], key: str) -> str:
+ """Return the value of the tag with the given key, or None if no such tag exists."""
+ for tag in tags:
+ if tag["Key"] == key:
+ return tag["Value"]
+ return None
+
+ def extract_aws_setup(self, parent_ou_id: str) -> AWSTree:
+ """
+ Give a list of all the children of the parent OU.
+
+ :param parent_ou_id: The ID of the parent OU.
+ :return: A AWSTree object containing all the children of the parent OU.
+ """
+ aws_tree = AWSTree(
+ "root",
+ parent_ou_id,
+ [
+ Iteration(
+ ou["Name"],
+ ou["Id"],
+ member_accounts := [
+ SyncData(
+ account["Email"],
+ self.get_tag_value(tags, "project_slug"),
+ self.get_tag_value(tags, "project_semester"),
+ )
+ for account in self.api_talker.list_accounts_for_parent(parent_id=ou["Id"])
+ for tags in [self.api_talker.list_tags_for_resource(resource_id=account["Id"])]
+ ],
+ )
+ for ou in self.api_talker.list_organizational_units_for_parent(parent_id=parent_ou_id)
+ ],
+ )
+
+ incomplete_accounts = [
+ account for account in member_accounts if not (account.project_slug and account.project_semester)
+ ]
+
+ if incomplete_accounts:
+ raise Exception(f"Found incomplete accounts in AWS: {incomplete_accounts}.")
+
+ return aws_tree
def get_or_create_course_ou(self, tree: AWSTree) -> str:
"""Create organizational unit under root with name of current semester."""
diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py
index abf026b0..0346dfb6 100644
--- a/website/projects/tests/tests_aws/test_awssync_refactored.py
+++ b/website/projects/tests/tests_aws/test_awssync_refactored.py
@@ -2,6 +2,7 @@
import json
from unittest.mock import patch
+
from botocore.exceptions import ClientError
from django.test import TestCase
@@ -10,14 +11,142 @@
from courses.models import Semester
+from mailing_lists.models import MailingList
+
from projects.aws.awssync_refactored import AWSSyncRefactored
from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
+from projects.models import Project
@mock_organizations
class AWSSyncRefactoredTest(TestCase):
def setUp(self):
+ """Set up testing environment."""
self.sync = AWSSyncRefactored()
+ self.api_talker = self.sync.api_talker
+
+ def test_get_syncdata_from_giphouse_normal(self):
+ """Test get_emails_with_teamids function in optimal conditions."""
+ self.semester = Semester.objects.create(year=2023, season=Semester.SPRING)
+ for i in range(3):
+ self.mailing_list = MailingList.objects.create(address="test" + str(i))
+ self.project = Project.objects.create(
+ id=i, name="test" + str(i), semester=self.semester, slug="test" + str(i)
+ )
+ self.mailing_list.projects.add(self.project)
+
+ email_id = self.sync.get_syncdata_from_giphouse()
+
+ self.assertIsInstance(email_id, list)
+ self.assertIsInstance(email_id[0], SyncData)
+ expected_result = [
+ SyncData("test0@giphouse.nl", "test0", "Spring 2023"),
+ SyncData("test1@giphouse.nl", "test1", "Spring 2023"),
+ SyncData("test2@giphouse.nl", "test2", "Spring 2023"),
+ ]
+ self.assertEqual(email_id, expected_result)
+
+ def test_get_syncdata_from_giphouse_no_project(self):
+ """Test get_emails_with_teamids function where the mailinglist is not assigned to a project"""
+ MailingList.objects.all().delete()
+ self.mailing_list = MailingList.objects.create(address="test2")
+ email_id = self.sync.get_syncdata_from_giphouse()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
+ def test_get_syncdata_from_giphouse_no_mailing_list(self):
+ """Test get_emails_with_teamids function where no mailinglists exist"""
+ MailingList.objects.all().delete()
+ Project.objects.all().delete()
+ email_id = self.sync.get_syncdata_from_giphouse()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
+ def test_get_syncdata_from_giphouse_different_semester(self):
+ """Test get_emails_with_teamids function where the semester is not equal to the current semester"""
+ MailingList.objects.all().delete()
+ new_semester = Semester.objects.create(year=2022, season=Semester.FALL)
+ self.mailing_list = MailingList.objects.create(address="test4")
+ self.project = Project.objects.create(id=4, name="test4", semester=new_semester, slug="test4")
+ self.mailing_list.projects.add(self.project)
+ email_id = self.sync.get_syncdata_from_giphouse()
+ self.assertIsInstance(email_id, list)
+ self.assertEqual(email_id, [])
+
+ def test_AWS_sync_list_both_empty(self):
+ gip_list = []
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_empty_AWS(self):
+ test1 = SyncData("test1@test1.test1", "test1", "test1")
+ test2 = SyncData("test2@test2.test2", "test2", "test2")
+ gip_list = [test1, test2]
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list)
+
+ def test_AWS_sync_list_empty_GiP(self):
+ test1 = SyncData("test1@test1.test1", "test1", "test1")
+ test2 = SyncData("test2@test2.test2", "test2", "test2")
+ gip_list = []
+ aws_list = [test1, test2]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_both_full(self):
+ test1 = SyncData("test1@test1.test1", "test1", "test1")
+ test2 = SyncData("test2@test2.test2", "test2", "test2")
+ test3 = SyncData("test3@test3.test3", "test3", "test3")
+ gip_list = [test1, test2]
+ aws_list = [test2, test3]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [test1])
+
+ def test_get_tag_value(self):
+ tags = [{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}]
+ self.assertEquals(self.sync.get_tag_value(tags, "project_semester"), "2021")
+ self.assertEquals(self.sync.get_tag_value(tags, "project_slug"), "test1")
+ self.assertEquals(self.sync.get_tag_value(tags, "project_name"), None)
+
+ def test_extract_aws_setup(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.api_talker.list_roots()[0]["Id"]
+
+ ou_response = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1")
+ ou_id = ou_response["OrganizationalUnit"]["Id"]
+
+ account_response = self.api_talker.create_account(
+ email="account_1@gmail.com",
+ account_name="account_1",
+ tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}],
+ )
+ account_id = account_response["CreateAccountStatus"]["AccountId"]
+ self.api_talker.move_account(account_id=account_id, source_parent_id=root_id, dest_parent_id=ou_id)
+
+ aws_tree = self.sync.extract_aws_setup(root_id)
+
+ expected_sync_data = [SyncData("account_1@gmail.com", "test1", "2021")]
+ expected_iteration = Iteration("OU_1", ou_id, expected_sync_data)
+ expected_tree = AWSTree("root", root_id, [expected_iteration])
+
+ self.assertEqual(aws_tree, expected_tree)
+
+ def test_extract_aws_setup_no_slugs(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.api_talker.list_roots()[0]["Id"]
+
+ response_OU_1 = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1")
+ OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
+ response_account_1 = self.api_talker.create_account(
+ email="account_1@gmail.com",
+ account_name="account_1",
+ tags=[],
+ )
+ account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
+
+ self.api_talker.move_account(account_id=account_id_1, source_parent_id=root_id, dest_parent_id=OU_1_id)
+
+ with self.assertRaises(Exception) as context:
+ self.sync.extract_aws_setup(root_id)
+ self.assertIn("Found incomplete accounts in AWS", str(context.exception))
def test_get_or_create_course_ou__new(self):
self.sync.api_talker.create_organization(feature_set="ALL")
From 497abc990b9e01ac1cff61f5f5a333f966a861cc Mon Sep 17 00:00:00 2001
From: flam123
Date: Tue, 23 May 2023 13:10:24 +0200
Subject: [PATCH 25/32] Refactor create and move accounts function
* Changed comments
* Refactored create move
* Refractored create_account
* Changed test
* Added my functions
* Add api talker function for describe_create_account_status
* Deleted create_account function
* Added test
* Fix describe_create_account_status docstring
* Changed describe function
* Finished basic test
* Finished refactor create move
* Black formattting
* Duplicate code
* Finished refractor
---------
Co-authored-by: Henk
---
website/projects/aws/awsapitalker.py | 9 ++
website/projects/aws/awssync_refactored.py | 66 ++++++++++++++
.../tests/tests_aws/test_awsapitalker.py | 11 +++
.../tests_aws/test_awssync_refactored.py | 90 +++++++++++++++++++
4 files changed, 176 insertions(+)
diff --git a/website/projects/aws/awsapitalker.py b/website/projects/aws/awsapitalker.py
index 308a07da..edff72ed 100644
--- a/website/projects/aws/awsapitalker.py
+++ b/website/projects/aws/awsapitalker.py
@@ -160,3 +160,12 @@ def list_roots(self) -> list[dict]:
page_iterator = paginator.paginate()
return self.combine_pages(page_iterator, "Roots")
+
+ def describe_create_account_status(self, create_account_request_id: str) -> dict:
+ """
+ Describe the status of the given account creation request.
+
+ :param create_account_request_id: ID of the account creation request to be described.
+ :return: dictionary containing account creation status information.
+ """
+ return self.org_client.describe_create_account_status(CreateAccountRequestId=create_account_request_id)
diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py
index 91358645..fdbf7588 100644
--- a/website/projects/aws/awssync_refactored.py
+++ b/website/projects/aws/awssync_refactored.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import logging
+import time
from botocore.exceptions import ClientError
@@ -23,6 +24,12 @@ def __init__(self):
self.logger.setLevel(logging.DEBUG)
self.fail = False
+ self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 2
+ self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3
+
+ self.accounts_created = 0
+ self.accounts_moved = 0
+
def get_syncdata_from_giphouse(self) -> list[SyncData]:
"""
Create a list of SyncData struct containing email, slug and semester.
@@ -116,3 +123,62 @@ def attach_policy(self, target_id: str, policy_id: str) -> None:
except ClientError as error:
if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException":
raise
+
+ def create_and_move_accounts(
+ self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str
+ ) -> bool:
+ """
+ Create multiple accounts in the organization of the API caller and move them from the root to a destination OU.
+
+ :param new_member_accounts: List of SyncData objects.
+ :param root_id: The organization's root ID.
+ :param destination_ou_id: The organization's destination OU ID.
+ :returns: True iff **all** new member accounts were created and moved successfully.
+ """
+ for new_member in new_member_accounts:
+ # Create member account
+ response = self.api_talker.create_account(
+ new_member.project_email,
+ new_member.project_slug,
+ [
+ {"Key": "project_slug", "Value": new_member.project_slug},
+ {"Key": "project_semester", "Value": new_member.project_semester},
+ ],
+ )
+ # Repeatedly check status of new member account request.
+ request_id = response["CreateAccountStatus"]["Id"]
+
+ for _ in range(self.ACCOUNT_REQUEST_MAX_ATTEMPTS):
+ time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS)
+
+ try:
+ response_status = self.api_talker.describe_create_account_status(request_id)
+ except ClientError as error:
+ self.logger.debug(error)
+ self.logger.debug(f"Failed to get status of account with e-mail: '{new_member.project_email}'.")
+ break
+
+ request_state = response_status["CreateAccountStatus"]["State"]
+ if request_state == "SUCCEEDED":
+ account_id = response_status["CreateAccountStatus"]["AccountId"]
+
+ self.accounts_created += 1
+ try:
+ self.api_talker.move_account(account_id, root_id, destination_ou_id)
+ self.accounts_moved += 1
+ except ClientError as error:
+ self.logger.debug(error)
+ self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.")
+ break
+
+ elif request_state == "FAILED":
+ failure_reason = response_status["CreateAccountStatus"]["FailureReason"]
+ self.logger.debug(
+ f"Failed to create account with e-mail: {new_member.project_email}. "
+ f"Failure reason: {failure_reason}"
+ )
+ break
+
+ accounts_to_create = len(new_member_accounts)
+ success = accounts_to_create == self.accounts_created == self.accounts_moved
+ return success
diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py
index 9cf3301e..dd7747ea 100644
--- a/website/projects/tests/tests_aws/test_awsapitalker.py
+++ b/website/projects/tests/tests_aws/test_awsapitalker.py
@@ -183,3 +183,14 @@ def test_list_roots(self):
roots = self.api_talker.list_roots()
self.assertTrue(len(roots) == 1)
+
+ def test_describe_create_account_status(self):
+ self.create_organization()
+
+ account = self.api_talker.create_account("test@example.com", "Test")
+ account_id = account["CreateAccountStatus"]["Id"]
+
+ request = self.api_talker.describe_create_account_status(account_id)
+ request_state = request["CreateAccountStatus"]["State"]
+
+ self.assertEqual(request_state, "SUCCEEDED")
diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py
index 0346dfb6..75157013 100644
--- a/website/projects/tests/tests_aws/test_awssync_refactored.py
+++ b/website/projects/tests/tests_aws/test_awssync_refactored.py
@@ -211,3 +211,93 @@ def test_attach_policy__caught_exception(self):
def test_attach_policy__reraised_exception(self):
self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123")
+
+ def test_create_move_account(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
+
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
+ self.assertTrue(success)
+
+ def test_create_move_account__exception_failure(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
+
+ with patch.object(self.sync.api_talker, "move_account", side_effect=ClientError({}, "move_account")):
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
+
+ self.assertFalse(success)
+
+ def test_create_move_account__no_move(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
+
+ with patch.object(
+ self.sync.api_talker,
+ "describe_create_account_status",
+ side_effect=ClientError({}, "describe_create_account_status"),
+ ):
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
+
+ self.assertFalse(success)
+
+ def test_create_move_account__failed(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("alice@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
+
+ with patch.object(
+ self.sync.api_talker.org_client,
+ "describe_create_account_status",
+ return_value={"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}},
+ ):
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
+
+ self.assertFalse(success)
+
+ def test_create_move_account__in_progress(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
+
+ with patch.object(
+ self.sync.api_talker.org_client,
+ "describe_create_account_status",
+ return_value={"CreateAccountStatus": {"State": "IN_PROGRESS"}},
+ ):
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
+
+ self.assertFalse(success)
From 301e711535c4c81626088e293130458019d56af0 Mon Sep 17 00:00:00 2001
From: Jer111 <82157107+Jer111@users.noreply.github.com>
Date: Tue, 30 May 2023 10:35:07 +0200
Subject: [PATCH 26/32] 62 policy id and tag fields on frontend panel (#65)
* Added aws policy field on admin page
* Added view paremeters for aws policies
* Added a '.' to satisfy linting
* Updated the AWS policy model
* Added get_current_policy_id function
* Satisfy docstring
* Added test for save function in AWSPolicy model
* Added test case for the save AWSPolicy model
* Added tags values in aws policies
* Updated test_get_policy_id
* Changed GiPHouse name and get_policy name
---
website/projects/admin.py | 10 ++++++-
website/projects/aws/awssync_refactored.py | 9 +++++-
website/projects/migrations/0007_awspolicy.py | 25 ++++++++++++++++
.../0008_awspolicy_is_current_policy.py | 18 ++++++++++++
...ename_name_awspolicy_policy_id_and_more.py | 27 +++++++++++++++++
...er_awspolicy_is_current_policy_and_more.py | 23 +++++++++++++++
.../0011_alter_awspolicy_is_current_policy.py | 21 ++++++++++++++
.../0012_alter_awspolicy_is_current_policy.py | 21 ++++++++++++++
...spolicy_no_permissions_at_root_and_more.py | 27 +++++++++++++++++
...icy_tags_key_alter_awspolicy_tags_value.py | 23 +++++++++++++++
.../0015_alter_awspolicy_tags_key.py | 18 ++++++++++++
website/projects/models.py | 29 +++++++++++++++++++
website/projects/tests/test_models.py | 22 +++++++++++++-
.../tests_aws/test_awssync_refactored.py | 19 +++++++++++-
14 files changed, 288 insertions(+), 4 deletions(-)
create mode 100644 website/projects/migrations/0007_awspolicy.py
create mode 100644 website/projects/migrations/0008_awspolicy_is_current_policy.py
create mode 100644 website/projects/migrations/0009_rename_name_awspolicy_policy_id_and_more.py
create mode 100644 website/projects/migrations/0010_alter_awspolicy_is_current_policy_and_more.py
create mode 100644 website/projects/migrations/0011_alter_awspolicy_is_current_policy.py
create mode 100644 website/projects/migrations/0012_alter_awspolicy_is_current_policy.py
create mode 100644 website/projects/migrations/0013_remove_awspolicy_no_permissions_at_root_and_more.py
create mode 100644 website/projects/migrations/0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value.py
create mode 100644 website/projects/migrations/0015_alter_awspolicy_tags_key.py
diff --git a/website/projects/admin.py b/website/projects/admin.py
index 1cc2e032..207d9df1 100644
--- a/website/projects/admin.py
+++ b/website/projects/admin.py
@@ -15,7 +15,7 @@
from projects.aws.awssync import AWSSync
from projects.forms import ProjectAdminForm, RepositoryInlineForm
from projects.githubsync import GitHubSync
-from projects.models import Client, Project, Repository
+from projects.models import AWSPolicy, Client, Project, Repository
from registrations.models import Employee
@@ -197,3 +197,11 @@ class ClientAdmin(admin.ModelAdmin):
"""Custom admin for clients."""
search_fields = ("name",)
+
+
+@admin.register(AWSPolicy)
+class AWSPolicyAdmin(admin.ModelAdmin):
+ """Custom admin for AWS Policies."""
+
+ list_display = ["policy_id", "tags_key", "tags_value", "is_current_policy"]
+ search_fields = ("policy_id",)
diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py
index fdbf7588..0bbcc1d4 100644
--- a/website/projects/aws/awssync_refactored.py
+++ b/website/projects/aws/awssync_refactored.py
@@ -11,7 +11,7 @@
from projects.aws.awsapitalker import AWSAPITalker
from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
-from projects.models import Project
+from projects.models import AWSPolicy, Project
class AWSSyncRefactored:
@@ -124,6 +124,13 @@ def attach_policy(self, target_id: str, policy_id: str) -> None:
if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException":
raise
+ def get_current_policy_id(self) -> str:
+ """Get the currrent policy stored on the GiPHouse website."""
+ for policy in AWSPolicy.objects.all():
+ if policy.is_current_policy:
+ return policy.policy_id
+ raise Exception("No current policy found")
+
def create_and_move_accounts(
self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str
) -> bool:
diff --git a/website/projects/migrations/0007_awspolicy.py b/website/projects/migrations/0007_awspolicy.py
new file mode 100644
index 00000000..61ed77b2
--- /dev/null
+++ b/website/projects/migrations/0007_awspolicy.py
@@ -0,0 +1,25 @@
+# Generated by Django 4.1.3 on 2023-05-25 14:40
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0006_alter_project_unique_together_project_slug_and_more"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="AWSPolicy",
+ fields=[
+ ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ ("name", models.CharField(max_length=50)),
+ ("tags", models.TextField()),
+ ],
+ options={
+ "verbose_name": "AWS Policy",
+ "verbose_name_plural": "AWS Policies",
+ },
+ ),
+ ]
diff --git a/website/projects/migrations/0008_awspolicy_is_current_policy.py b/website/projects/migrations/0008_awspolicy_is_current_policy.py
new file mode 100644
index 00000000..8a9f67e9
--- /dev/null
+++ b/website/projects/migrations/0008_awspolicy_is_current_policy.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.3 on 2023-05-25 14:42
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0007_awspolicy"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="awspolicy",
+ name="is_current_policy",
+ field=models.BooleanField(default=False, unique=True),
+ ),
+ ]
diff --git a/website/projects/migrations/0009_rename_name_awspolicy_policy_id_and_more.py b/website/projects/migrations/0009_rename_name_awspolicy_policy_id_and_more.py
new file mode 100644
index 00000000..4bd848a9
--- /dev/null
+++ b/website/projects/migrations/0009_rename_name_awspolicy_policy_id_and_more.py
@@ -0,0 +1,27 @@
+# Generated by Django 4.1.3 on 2023-05-26 09:19
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0008_awspolicy_is_current_policy"),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name="awspolicy",
+ old_name="name",
+ new_name="policy_id",
+ ),
+ migrations.RemoveField(
+ model_name="awspolicy",
+ name="tags",
+ ),
+ migrations.AddField(
+ model_name="awspolicy",
+ name="no_permissions_at_root",
+ field=models.CharField(default="", max_length=50),
+ ),
+ ]
diff --git a/website/projects/migrations/0010_alter_awspolicy_is_current_policy_and_more.py b/website/projects/migrations/0010_alter_awspolicy_is_current_policy_and_more.py
new file mode 100644
index 00000000..1f4a57a7
--- /dev/null
+++ b/website/projects/migrations/0010_alter_awspolicy_is_current_policy_and_more.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.1.3 on 2023-05-26 09:29
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0009_rename_name_awspolicy_policy_id_and_more"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="awspolicy",
+ name="is_current_policy",
+ field=models.BooleanField(default=False),
+ ),
+ migrations.AlterField(
+ model_name="awspolicy",
+ name="no_permissions_at_root",
+ field=models.CharField(max_length=50),
+ ),
+ ]
diff --git a/website/projects/migrations/0011_alter_awspolicy_is_current_policy.py b/website/projects/migrations/0011_alter_awspolicy_is_current_policy.py
new file mode 100644
index 00000000..6ce5667f
--- /dev/null
+++ b/website/projects/migrations/0011_alter_awspolicy_is_current_policy.py
@@ -0,0 +1,21 @@
+# Generated by Django 4.1.3 on 2023-05-26 09:31
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0010_alter_awspolicy_is_current_policy_and_more"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="awspolicy",
+ name="is_current_policy",
+ field=models.BooleanField(
+ default=False,
+ help_text="Attention: When saving this policy, all other policies will be set to 'not current'!",
+ ),
+ ),
+ ]
diff --git a/website/projects/migrations/0012_alter_awspolicy_is_current_policy.py b/website/projects/migrations/0012_alter_awspolicy_is_current_policy.py
new file mode 100644
index 00000000..539fb0f9
--- /dev/null
+++ b/website/projects/migrations/0012_alter_awspolicy_is_current_policy.py
@@ -0,0 +1,21 @@
+# Generated by Django 4.1.3 on 2023-05-26 09:32
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0011_alter_awspolicy_is_current_policy"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="awspolicy",
+ name="is_current_policy",
+ field=models.BooleanField(
+ default=False,
+ help_text="Attention: When saving this policy with 'is current policy' checked, all other policies will be set to 'not current'!",
+ ),
+ ),
+ ]
diff --git a/website/projects/migrations/0013_remove_awspolicy_no_permissions_at_root_and_more.py b/website/projects/migrations/0013_remove_awspolicy_no_permissions_at_root_and_more.py
new file mode 100644
index 00000000..b4fdcdb5
--- /dev/null
+++ b/website/projects/migrations/0013_remove_awspolicy_no_permissions_at_root_and_more.py
@@ -0,0 +1,27 @@
+# Generated by Django 4.1.3 on 2023-05-26 11:39
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0012_alter_awspolicy_is_current_policy"),
+ ]
+
+ operations = [
+ migrations.RemoveField(
+ model_name="awspolicy",
+ name="no_permissions_at_root",
+ ),
+ migrations.AddField(
+ model_name="awspolicy",
+ name="tags_key",
+ field=models.CharField(default="", max_length=50),
+ ),
+ migrations.AddField(
+ model_name="awspolicy",
+ name="tags_value",
+ field=models.CharField(default="", max_length=50),
+ ),
+ ]
diff --git a/website/projects/migrations/0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value.py b/website/projects/migrations/0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value.py
new file mode 100644
index 00000000..6bb5be98
--- /dev/null
+++ b/website/projects/migrations/0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.1.3 on 2023-05-26 11:42
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0013_remove_awspolicy_no_permissions_at_root_and_more"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="awspolicy",
+ name="tags_key",
+ field=models.CharField(blank=True, default="", max_length=50),
+ ),
+ migrations.AlterField(
+ model_name="awspolicy",
+ name="tags_value",
+ field=models.CharField(blank=True, default="", max_length=50),
+ ),
+ ]
diff --git a/website/projects/migrations/0015_alter_awspolicy_tags_key.py b/website/projects/migrations/0015_alter_awspolicy_tags_key.py
new file mode 100644
index 00000000..d029a7a2
--- /dev/null
+++ b/website/projects/migrations/0015_alter_awspolicy_tags_key.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.1.3 on 2023-05-26 11:51
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("projects", "0014_alter_awspolicy_tags_key_alter_awspolicy_tags_value"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="awspolicy",
+ name="tags_key",
+ field=models.CharField(default="", max_length=50),
+ ),
+ ]
diff --git a/website/projects/models.py b/website/projects/models.py
index c5195529..62645b57 100644
--- a/website/projects/models.py
+++ b/website/projects/models.py
@@ -8,6 +8,35 @@
from registrations.models import Employee
+class AWSPolicy(models.Model):
+ """AWS global policy id and tags submission fields."""
+
+ class Meta:
+ """Meta class for AWSPolicy model."""
+
+ verbose_name = "AWS Policy"
+ verbose_name_plural = "AWS Policies"
+
+ policy_id = models.CharField(max_length=50, unique=False, null=False, blank=False)
+ tags_key = models.CharField(max_length=50, unique=False, default="", null=False, blank=False)
+ tags_value = models.CharField(max_length=50, unique=False, default="", null=False, blank=True)
+ is_current_policy = models.BooleanField(
+ default=False,
+ help_text="Attention: When saving this policy with 'is current policy' checked"
+ + ", all other policies will be set to 'not current'!",
+ )
+
+ def save(self, *args, **kwargs):
+ """Save method for AWSPolicy model."""
+ if self.is_current_policy:
+ AWSPolicy.objects.all().update(**{"is_current_policy": False})
+ super(AWSPolicy, self).save(*args, **kwargs)
+
+ def __str__(self):
+ """Return policy id."""
+ return f"{self.policy_id}"
+
+
class Client(models.Model):
"""Project client with logo."""
diff --git a/website/projects/tests/test_models.py b/website/projects/tests/test_models.py
index 550878b9..80675726 100644
--- a/website/projects/tests/test_models.py
+++ b/website/projects/tests/test_models.py
@@ -5,7 +5,7 @@
from courses.models import Course, Semester
from projects import githubsync
-from projects.models import Project, ProjectToBeDeleted, Repository, RepositoryToBeDeleted
+from projects.models import AWSPolicy, Project, ProjectToBeDeleted, Repository, RepositoryToBeDeleted
from registrations.models import Employee, Registration
@@ -112,3 +112,23 @@ def test_number_of_repos(self):
Repository.objects.create(name="testrepository1", project=project)
Repository.objects.create(name="testrepository2", project=project)
self.assertEqual(project.number_of_repos, 2)
+
+
+class AWSPolicySaveTest(TestCase):
+ def test_save_method_with_existing_current_policy(self):
+ existing_policy = AWSPolicy.objects.create(is_current_policy=True)
+ new_policy = AWSPolicy(is_current_policy=True)
+ new_policy.save()
+ existing_policy.refresh_from_db()
+ self.assertFalse(existing_policy.is_current_policy)
+ self.assertTrue(new_policy.is_current_policy)
+
+ def test_save_method_without_existing_current_policy_false(self):
+ policy = AWSPolicy(is_current_policy=False)
+ policy.save()
+ self.assertFalse(policy.is_current_policy)
+
+ def test_save_method_without_existing_current_policy_true(self):
+ policy = AWSPolicy(is_current_policy=True)
+ policy.save()
+ self.assertTrue(policy.is_current_policy)
diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py
index 75157013..52619820 100644
--- a/website/projects/tests/tests_aws/test_awssync_refactored.py
+++ b/website/projects/tests/tests_aws/test_awssync_refactored.py
@@ -15,7 +15,7 @@
from projects.aws.awssync_refactored import AWSSyncRefactored
from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
-from projects.models import Project
+from projects.models import AWSPolicy, Project
@mock_organizations
@@ -212,6 +212,23 @@ def test_attach_policy__caught_exception(self):
def test_attach_policy__reraised_exception(self):
self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123")
+ def test_get_current_policy_id(self):
+ self.policy_id1 = AWSPolicy.objects.create(
+ policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False
+ )
+ self.policy_id2 = AWSPolicy.objects.create(
+ policy_id="Test-Policy2", tags_key="Test-Policy-Id2", is_current_policy=True
+ )
+ current_policy_id = self.sync.get_current_policy_id()
+ self.assertIsInstance(current_policy_id, str)
+ self.assertEqual(current_policy_id, self.policy_id2.policy_id)
+
+ def test_get_current_policy__no_current_policy_id(self):
+ self.policy_id1 = AWSPolicy.objects.create(
+ policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False
+ )
+ self.assertRaises(Exception, self.sync.get_current_policy_id)
+
def test_create_move_account(self):
self.sync.api_talker.create_organization(feature_set="ALL")
root_id = self.sync.api_talker.list_roots()[0]["Id"]
From 4ddf785d35f171d2ba53a7e19e9c6b9768da1c90 Mon Sep 17 00:00:00 2001
From: Jer111 <82157107+Jer111@users.noreply.github.com>
Date: Fri, 2 Jun 2023 14:13:38 +0200
Subject: [PATCH 27/32] Refactor AWSSync pipeline (#67)
* Changed comments
* Refactored create move
* Refractored create_account
* Changed test
* Added my functions
* Add api talker function for describe_create_account_status
* Deleted create_account function
* Added test
* Fix describe_create_account_status docstring
* Changed describe function
* Finished basic test
* Create structure of refactored pipeline
* Finished refactor create move
* Black formattting
* Duplicate code
* Reorganize files and complete merge
* Refactor according to finished create_and_move_account, aws data extraction
* first attempt at printing an error box when something fails in the pipeline (functional but not too pretty).
* Hotfixes pipeline for working sprint 3 demo
* Add pipeline test cases
* Fix 100% code coverage
* Fix tests for error box in pipeline
* Added the current policy to the pipeline
* Add API talker function and test for untag_resource AWS API call
* Remove most debug messages when testing
* Rename awssync_refactored to awssync
* Increase sleep time between requesting account info
* Implement pull request feedback
---------
Co-authored-by: Fouad Lamsettef
Co-authored-by: Henk
Co-authored-by: Mitchell
Co-authored-by: 1058274 <70607431+1058274@users.noreply.github.com>
---
website/projects/admin.py | 2 +-
website/projects/aws/awsapitalker.py | 9 +
website/projects/aws/awssync.py | 675 +++-------
website/projects/aws/awssync_refactored.py | 191 ---
.../tests/tests_aws/test_awsapitalker.py | 17 +
.../projects/tests/tests_aws/test_awssync.py | 1178 +++++------------
.../tests/tests_aws/test_awssync_checks.py | 3 +
.../tests_aws/test_awssync_refactored.py | 320 -----
.../tests/tests_aws/test_awssync_structs.py | 25 -
9 files changed, 513 insertions(+), 1907 deletions(-)
delete mode 100644 website/projects/aws/awssync_refactored.py
delete mode 100644 website/projects/tests/tests_aws/test_awssync_refactored.py
diff --git a/website/projects/admin.py b/website/projects/admin.py
index 207d9df1..74ea5802 100644
--- a/website/projects/admin.py
+++ b/website/projects/admin.py
@@ -175,7 +175,7 @@ def synchronise_current_projects_to_GitHub(self, request):
def synchronise_to_AWS(self, request):
"""Synchronise to Amazon Web Services."""
sync = AWSSync()
- sync.button_pressed()
+ sync.synchronise(request)
return redirect("admin:projects_project_changelist")
def get_urls(self):
diff --git a/website/projects/aws/awsapitalker.py b/website/projects/aws/awsapitalker.py
index edff72ed..32d6ad21 100644
--- a/website/projects/aws/awsapitalker.py
+++ b/website/projects/aws/awsapitalker.py
@@ -169,3 +169,12 @@ def describe_create_account_status(self, create_account_request_id: str) -> dict
:return: dictionary containing account creation status information.
"""
return self.org_client.describe_create_account_status(CreateAccountRequestId=create_account_request_id)
+
+ def untag_resource(self, resource_id: str, tag_keys: list[str]):
+ """
+ Remove tags with specified keys from the resource with the specified ID.
+
+ :param resource_id: the resource from which tags should be removed.
+ :param tag_keys: the keys of the tags to be removed.
+ """
+ return self.org_client.untag_resource(ResourceId=resource_id, TagKeys=tag_keys)
diff --git a/website/projects/aws/awssync.py b/website/projects/aws/awssync.py
index 45a2a301..6939751c 100644
--- a/website/projects/aws/awssync.py
+++ b/website/projects/aws/awssync.py
@@ -1,22 +1,21 @@
-"""Framework for synchronisation with Amazon Web Services (AWS)."""
from __future__ import annotations
-import json
import logging
import time
-import boto3
-
from botocore.exceptions import ClientError
-from botocore.exceptions import NoCredentialsError
+
+from django.contrib import messages
from courses.models import Semester
from mailing_lists.models import MailingList
+from projects.aws.awsapitalker import AWSAPITalker
from projects.aws.awssync_checks import Checks
+from projects.aws.awssync_checks_permissions import api_permissions
from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
-from projects.models import Project
+from projects.models import AWSPolicy, Project
class AWSSync:
@@ -24,95 +23,26 @@ class AWSSync:
def __init__(self):
"""Create an AWSSync instance."""
- self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5
- self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3
-
+ self.api_talker = AWSAPITalker()
+ self.checker = Checks()
self.logger = logging.getLogger("django.aws")
self.logger.setLevel(logging.DEBUG)
- self.org_info = None
- self.iterationOU_info = None
- self.policy_id = "p-examplepolicyid111"
- self.fail = False
- self.required_aws_actions = [
- # "organizations:AcceptHandshake",
- "organizations:AttachPolicy",
- # "organizations:CancelHandshake",
- # "organizations:CloseAccount",
- "organizations:CreateAccount",
- # "organizations:CreateGovCloudAccount",
- "organizations:CreateOrganization",
- "organizations:CreateOrganizationalUnit",
- "organizations:CreatePolicy",
- # "organizations:DeclineHandshake",
- # "organizations:DeleteOrganization",
- "organizations:DeleteOrganizationalUnit",
- "organizations:DeletePolicy",
- "organizations:DeleteResourcePolicy",
- # "organizations:DeregisterDelegatedAdministrator",
- "organizations:DescribeAccount",
- "organizations:DescribeCreateAccountStatus",
- "organizations:DescribeEffectivePolicy",
- # "organizations:DescribeHandshake",
- "organizations:DescribeOrganization",
- "organizations:DescribeOrganizationalUnit",
- "organizations:DescribePolicy",
- "organizations:DescribeResourcePolicy",
- "organizations:DetachPolicy",
- # "organizations:DisableAWSServiceAccess",
- "organizations:DisablePolicyType",
- # "organizations:EnableAWSServiceAccess",
- # "organizations:EnableAllFeatures",
- "organizations:EnablePolicyType",
- # "organizations:InviteAccountToOrganization",
- # "organizations:LeaveOrganization",
- # "organizations:ListAWSServiceAccessForOrganization",
- "organizations:ListAccounts",
- "organizations:ListAccountsForParent",
- "organizations:ListChildren",
- "organizations:ListCreateAccountStatus",
- # "organizations:ListDelegatedAdministrators",
- # "organizations:ListDelegatedServicesForAccount",
- # "organizations:ListHandshakesForAccount",
- # "organizations:ListHandshakesForOrganization",
- "organizations:ListOrganizationalUnitsForParent",
- "organizations:ListParents",
- "organizations:ListPolicies",
- "organizations:ListPoliciesForTarget",
- "organizations:ListRoots",
- "organizations:ListTagsForResource",
- "organizations:ListTargetsForPolicy",
- "organizations:MoveAccount",
- "organizations:PutResourcePolicy",
- # "organizations:RegisterDelegatedAdministrator",
- # "organizations:RemoveAccountFromOrganization",
- "organizations:TagResource",
- "organizations:UntagResource",
- "organizations:UpdateOrganizationalUnit",
- "organizations:UpdatePolicy",
- ]
- self.logger.info("Created AWSSync instance.")
-
- def button_pressed(self):
- """
- Print debug message to show that the button has been pressed.
- :return: True if function executes successfully
- """
- self.logger.info("Pressed button")
- self.logger.debug(f"Pipeline result: {self.pipeline()}")
- return True
+ self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 5
+ self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3
- def get_all_mailing_lists(self):
- """
- Get all mailing lists from the database.
+ self.accounts_created = 0
+ self.accounts_moved = 0
+ self.accounts_to_create = 0
- :return: List of mailing lists
- """
- mailing_lists = MailingList.objects.all()
- mailing_list_names = [ml.email_address for ml in mailing_lists]
- return mailing_list_names
+ self.SUCCESS_MSG = "Successfully synchronized all projects to AWS."
+ self.FAIL_MSG = "Not all accounts were created and moved successfully. Check the console for more information."
+ self.API_ERROR_MSG = "An error occurred while calling the AWS API. Check the console for more information."
+ self.SYNC_ERROR_MSG = (
+ "An error occurred during synchronization with AWS. Check the console for more information"
+ )
- def get_emails_with_teamids(self):
+ def get_syncdata_from_giphouse(self) -> list[SyncData]:
"""
Create a list of SyncData struct containing email, slug and semester.
@@ -120,325 +50,103 @@ def get_emails_with_teamids(self):
:return: list of SyncData structs with email, slug and semester
"""
- email_ids = []
+ sync_data_list = []
+ current_semester = Semester.objects.get_or_create_current_semester()
- for project in (
- Project.objects.filter(mailinglist__isnull=False)
- .filter(semester=Semester.objects.get_or_create_current_semester())
- .values("slug", "semester", "mailinglist")
+ for project in Project.objects.filter(mailinglist__isnull=False, semester=current_semester).values(
+ "slug", "semester", "mailinglist"
):
project_slug = project["slug"]
project_semester = str(Semester.objects.get(pk=project["semester"]))
project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address
sync_data = SyncData(project_email, project_slug, project_semester)
- email_ids.append(sync_data)
- return email_ids
+ sync_data_list.append(sync_data)
+ return sync_data_list
- def create_aws_organization(self):
- """Create an AWS organization with the current user as the management account."""
- client = boto3.client("organizations")
- try:
- response = client.create_organization(FeatureSet="ALL")
- self.org_info = response["Organization"]
- self.logger.info("Created an AWS organization and saved organization info.")
- except ClientError as error:
- self.fail = True
- self.logger.error("Something went wrong creating an AWS organization.")
- self.logger.debug(f"{error}")
- self.logger.debug(f"{error.response}")
-
- def create_course_iteration_OU(self, iteration_name):
- """
- Create an OU for the course iteration.
-
- :param iteration_name: The name of the course iteration OU
-
- :return: The ID of the OU
- """
- client = boto3.client("organizations")
- if self.org_info is None:
- self.logger.info("No organization info found. Creating an AWS organization.")
- self.fail = True
- else:
- try:
- root_id = client.list_roots()["Roots"][0]["Id"]
- response = client.create_organizational_unit(
- ParentId=root_id,
- Name=iteration_name,
- )
- self.logger.info(f"Created an OU for course iteration {iteration_name}.")
- self.iterationOU_info = response["OrganizationalUnit"]
- return response["OrganizationalUnit"]["Id"]
- except ClientError as error:
- self.fail = True
- self.logger.error(f"Something went wrong creating an OU for course iteration {iteration_name}.")
- self.logger.debug(f"{error}")
- self.logger.debug(f"{error.response}")
-
- def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]):
+ def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]) -> list[SyncData]:
"""
Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS.
This includes their ID and email address, to be able to put users in the correct AWS organization later.
"""
- sync_list = [x for x in giphouse_data if x not in aws_data]
- return sync_list
-
- def create_scp_policy(self, policy_name, policy_description, policy_content):
- """
- Create an SCP policy.
-
- :param policy_name: The policy name.
- :param policy_description: The policy description.
- :param policy_content: The policy configuration as a dictionary.
- The policy is automatically converted to JSON format, including escaped quotation marks.
- :return: Details of newly created policy as a dict on success and NoneType object otherwise.
- """
- client = boto3.client("organizations")
- try:
- response = client.create_policy(
- Content=json.dumps(policy_content),
- Description=policy_description,
- Name=policy_name,
- Type="SERVICE_CONTROL_POLICY",
- )
- except ClientError as error:
- self.fail = True
- self.logger.error("Something went wrong creating an SCP policy.")
- self.logger.error(error)
- else:
- return response["Policy"]
-
- def attach_scp_policy(self, policy_id, target_id):
- """
- Attaches an SCP policy to a target (root, OU, or member account).
-
- :param policy_id: The ID of the policy to be attached.
- :param target_id: The ID of the target root, OU, or member account.
- """
- client = boto3.client("organizations")
- try:
- client.attach_policy(PolicyId=policy_id, TargetId=target_id)
- except ClientError as error:
- if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException":
- self.fail = True
- self.logger.error("Something went wrong attaching an SCP policy to a target.")
- self.logger.debug(f"{error}")
- self.logger.debug(f"{error.response}")
-
- def check_aws_api_connection(self):
- """
- Check whether boto3 can connect to AWS API with current credentials.
+ return [project for project in giphouse_data if project not in aws_data]
- :returns: First tuple element always exists and indicates success.
- Second tuple element is contains information about the entity
- who made the successful API call and None otherwise.
- """
- client_sts = boto3.client("sts")
- try:
- caller_identity_info = client_sts.get_caller_identity()
- except (NoCredentialsError, ClientError) as error:
- self.logger.info("Establishing AWS API connection failed.")
- self.logger.debug(error)
- return False, None
- else:
- self.logger.info("Establishing AWS API connection succeeded.")
-
- return True, caller_identity_info
-
- def check_iam_policy(self, iam_user_arn, desired_actions):
- """
- Check for the specified IAM user ARN whether the actions in list \
- desired_actions are allowed according to its IAM policy.
+ def get_tag_value(self, tags: list[dict[str, str]], key: str) -> str:
+ """Return the value of the tag with the given key, or None if no such tag exists."""
+ for tag in tags:
+ if tag["Key"] == key:
+ return tag["Value"]
+ return None
- :param iam_user_arn: ARN of the IAM user being checked.
- :param iam_actions: List of AWS API actions to check.
- :returns: True iff all actions in desired_actions are allowed.
+ def extract_aws_setup(self, parent_ou_id: str) -> AWSTree:
"""
- client_iam = boto3.client("iam")
-
- try:
- response = client_iam.simulate_principal_policy(PolicySourceArn=iam_user_arn, ActionNames=desired_actions)
- except ClientError as error:
- self.logger.info("AWS API actions check failed.")
- self.logger.debug(error)
- return False
-
- success = True
- for evaluation_result in response["EvaluationResults"]:
- action_name = evaluation_result["EvalActionName"]
- if evaluation_result["EvalDecision"] != "allowed":
- self.logger.debug(f"The AWS API action {action_name} is denied for IAM user {iam_user_arn}.")
- success = False
-
- if success:
- self.logger.info("AWS API actions check succeeded.")
-
- return success
-
- def check_organization_existence(self):
- """
- Check whether an AWS organization exists for the AWS API caller's account.
-
- :returns: First tuple element always exists and indicates success.
- Second tuple element is describes properties of the organization and None otherwise.
- """
- client_organizations = boto3.client("organizations")
-
- try:
- response_org = client_organizations.describe_organization()
- except ClientError as error:
- self.logger.info("AWS organization existence check failed.")
- self.logger.debug(error)
- return False, None
- else:
- self.logger.info("AWS organization existence check succeeded.")
-
- return True, response_org["Organization"]
-
- def check_is_management_account(self, api_caller_info, organization_info):
- """
- Check whether caller of AWS API has organization's management account ID.
-
- :returns: True iff the current organization's management account ID equals the AWS API caller's account ID.
- """
- management_account_id = organization_info["MasterAccountId"]
- api_caller_account_id = api_caller_info["Account"]
- is_management_account = management_account_id == api_caller_account_id
-
- if is_management_account:
- self.logger.info("Management account check succeeded.")
- else:
- self.logger.info("Management account check failed.")
- self.logger.debug(f"The organization's management account ID is: '{management_account_id}'.")
- self.logger.debug(f"The AWS API caller account ID is: '{api_caller_account_id}'.")
-
- return is_management_account
-
- def check_scp_enabled(self, organization_info):
- """
- Check whether the SCP policy type is an enabled feature for the AWS organization.
-
- :returns: True iff the SCP policy type feature is enabled for the organization.
- """
- scp_is_enabled = False
- for policy in organization_info["AvailablePolicyTypes"]:
- if policy["Type"] == "SERVICE_CONTROL_POLICY" and policy["Status"] == "ENABLED":
- scp_is_enabled = True
- break
-
- if not scp_is_enabled:
- self.logger.info("The SCP policy type is disabled for the organization.")
- self.logger.debug(organization_info["AvailablePolicyTypes"])
- else:
- self.logger.info("Organization SCP policy status check succeeded.")
-
- return scp_is_enabled
-
- def pipeline_preconditions(self):
- """
- Check all crucial pipeline preconditions.
-
- 1. Locatable boto3 credentials and successful AWS API connection
- 2. Check allowed AWS API actions based on IAM policy of caller
- 3. Existing organization for AWS API caller
- 4. AWS API caller acts under same account ID as organization's management account ID
- 5. SCP policy type feature enabled for organization
-
- :return: True iff all pipeline preconditions are met.
- """
- check_api_connection, api_caller_info = self.check_aws_api_connection()
- if not check_api_connection:
- return False
-
- check_api_actions = self.check_iam_policy(api_caller_info["Arn"], self.required_aws_actions)
- if not check_api_actions:
- return False
-
- check_org_existence, organization_info = self.check_organization_existence()
- self.org_info = organization_info
- if not check_org_existence:
- return False
-
- check_acc_management = self.check_is_management_account(api_caller_info, organization_info)
- if not check_acc_management:
- return False
-
- check_scp_enabled = self.check_scp_enabled(organization_info)
- if not check_scp_enabled:
- return False
+ Give a list of all the children of the parent OU.
- return True
+ :param parent_ou_id: The ID of the parent OU.
+ :return: A AWSTree object containing all the children of the parent OU.
+ """
+ member_accounts = []
+ aws_tree = AWSTree(
+ "root",
+ parent_ou_id,
+ [
+ Iteration(
+ ou["Name"],
+ ou["Id"],
+ member_accounts := [
+ SyncData(
+ account["Email"],
+ self.get_tag_value(tags, "project_slug"),
+ self.get_tag_value(tags, "project_semester"),
+ )
+ for account in self.api_talker.list_accounts_for_parent(parent_id=ou["Id"])
+ for tags in [self.api_talker.list_tags_for_resource(resource_id=account["Id"])]
+ ],
+ )
+ for ou in self.api_talker.list_organizational_units_for_parent(parent_id=parent_ou_id)
+ ],
+ )
- def pipeline_policy(self, ou_id):
- """
- Create an SCP policy and attaches it to the organizational unit of the current semester.
+ incomplete_accounts = [
+ account for account in member_accounts if not (account.project_slug and account.project_semester)
+ ]
- :param ou_id: ID of the organizational unit for the current semester.
- :return: True iff the policy to be attached to the OU already exists and is successfully attached.
- """
- client = boto3.client("organizations")
- try:
- client.describe_policy(PolicyId=self.policy_id)
- except ClientError as error:
- self.logger.debug(error)
- return False
+ if incomplete_accounts:
+ raise Exception(f"Found incomplete accounts in AWS: {incomplete_accounts}.")
- self.attach_scp_policy(self.policy_id, ou_id)
- if self.fail:
- return False
- return True
+ return aws_tree
- def pipeline_create_account(self, sync_data):
- """
- Create a single new AWS member account in the organization of the API caller.
+ def get_or_create_course_ou(self, tree: AWSTree) -> str:
+ """Create organizational unit under root with name of current semester."""
+ root_id = tree.ou_id
+ course_ou_name = str(Semester.objects.get_or_create_current_semester())
+ course_ou_id = next((ou.ou_id for ou in tree.iterations if ou.name == course_ou_name), None)
- The status of the member account request is repeatedly checked based on the class' attributes:
- self.ACCOUNT_REQUEST_INTERVAL_SECONDS: thread sleeping time before each status check
- self.ACCOUNT_REQUEST_MAX_ATTEMPTS: maximum number of times to thread sleep and check
+ if not course_ou_id:
+ course_ou = self.api_talker.create_organizational_unit(root_id, course_ou_name)
+ course_ou_id = course_ou["OrganizationalUnit"]["Id"]
- :param email: The e-mail address of the new member account.
- :param username: The username of the new member account.
- :returns: (True, account_id) on success and otherwise (False, failure_reason).
- """
- client = boto3.client("organizations")
+ return course_ou_id
- # Request new member account.
+ def attach_policy(self, target_id: str, policy_id: str) -> None:
+ """Attach policy to target resource."""
try:
- response_create = client.create_account(
- Email=sync_data.project_email,
- AccountName=sync_data.project_slug,
- IamUserAccessToBilling="DENY",
- Tags=[
- {"Key": "project_slug", "Value": sync_data.project_slug},
- {"Key": "project_semester", "Value": sync_data.project_semester},
- ],
- )
+ self.api_talker.attach_policy(target_id, policy_id)
except ClientError as error:
- self.logger.debug(error)
- return False, "CLIENTERROR_CREATE_ACCOUNT"
-
- # Repeatedly check status of new member account request.
- request_id = response_create["CreateAccountStatus"]["Id"]
- for _ in range(1, self.ACCOUNT_REQUEST_MAX_ATTEMPTS + 1):
- time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS)
-
- try:
- response_status = client.describe_create_account_status(CreateAccountRequestId=request_id)
- except ClientError as error:
- self.logger.debug(error)
- return False, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS"
-
- request_state = response_status["CreateAccountStatus"]["State"]
- if request_state == "FAILED":
- return False, response_status["CreateAccountStatus"]["FailureReason"]
- elif request_state == "SUCCEEDED":
- return True, response_status["CreateAccountStatus"]["AccountId"]
+ if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException":
+ raise
- return False, "STILL_IN_PROGRESS"
+ def get_current_policy_id(self) -> str:
+ """Get the currrent policy stored on the GiPHouse website."""
+ for policy in AWSPolicy.objects.all():
+ if policy.is_current_policy:
+ return policy.policy_id
+ raise Exception("No current policy found")
- def pipeline_create_and_move_accounts(self, new_member_accounts, root_id, destination_ou_id):
+ def create_and_move_accounts(
+ self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str
+ ) -> bool:
"""
Create multiple accounts in the organization of the API caller and move them from the root to a destination OU.
@@ -447,167 +155,98 @@ def pipeline_create_and_move_accounts(self, new_member_accounts, root_id, destin
:param destination_ou_id: The organization's destination OU ID.
:returns: True iff **all** new member accounts were created and moved successfully.
"""
- client = boto3.client("organizations")
- overall_success = True
-
for new_member in new_member_accounts:
- success, response = self.pipeline_create_account(new_member)
- if success:
- account_id = response
+ # Create member account
+ response = self.api_talker.create_account(
+ new_member.project_email,
+ new_member.project_slug,
+ [
+ {"Key": "project_slug", "Value": new_member.project_slug},
+ {"Key": "project_semester", "Value": new_member.project_semester},
+ ],
+ )
+ # Repeatedly check status of new member account request.
+ request_id = response["CreateAccountStatus"]["Id"]
+
+ for _ in range(self.ACCOUNT_REQUEST_MAX_ATTEMPTS):
+ time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS)
+
try:
- root_id = client.list_roots()["Roots"][0]["Id"]
- client.move_account(
- AccountId=account_id, SourceParentId=root_id, DestinationParentId=destination_ou_id
- )
+ response_status = self.api_talker.describe_create_account_status(request_id)
except ClientError as error:
+ self.logger.debug(f"Failed to get status of account with e-mail: '{new_member.project_email}'.")
self.logger.debug(error)
- overall_success = False
- else:
- failure_reason = response
- self.logger.debug(failure_reason)
- overall_success = False
-
- return overall_success
-
- def pipeline_update_current_course_iteration_ou(self, aws_tree):
- """
- Update the AWS tree with the new course iteration OU's.
-
- :param aws_tree: The AWS tree to be checked.
- :returns: True, iteration_id on success and otherwise False, failure_reason.
- """
- is_current_iteration, iteration_ou_id = self.check_current_ou_exists(aws_tree)
+ break
+
+ request_state = response_status["CreateAccountStatus"]["State"]
+
+ if request_state == "SUCCEEDED":
+ account_id = response_status["CreateAccountStatus"]["AccountId"]
+
+ self.accounts_created += 1
+ try:
+ self.api_talker.move_account(account_id, root_id, destination_ou_id)
+ self.accounts_moved += 1
+ except ClientError as error:
+ self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.")
+ self.logger.debug(error)
+ break
+
+ elif request_state == "FAILED":
+ failure_reason = response_status["CreateAccountStatus"]["FailureReason"]
+ self.logger.debug(
+ f"Failed to create account with e-mail: {new_member.project_email}. "
+ f"Failure reason: {failure_reason}"
+ )
+ break
- if not is_current_iteration:
- iteration_name = str(Semester.objects.get_or_create_current_semester())
- iteration_ou_id = self.create_course_iteration_OU(iteration_name)
+ self.accounts_to_create = len(new_member_accounts)
+ self.logger.debug(f"Accounts created: {self.accounts_created}/{self.accounts_to_create}")
+ self.logger.debug(f"Accounts moved: {self.accounts_moved}/{self.accounts_to_create}")
+ success = self.accounts_to_create == self.accounts_created == self.accounts_moved
- if not self.fail:
- return True, iteration_ou_id
- else:
- return False, "ITERATION_OU_CREATION_FAILED"
+ return success
- def pipeline(self):
+ def pipeline(self) -> bool:
"""
Single pipeline that integrates all buildings blocks for the AWS integration process.
:return: True iff all pipeline stages successfully executed.
"""
- # Check preconditions.
- if not self.pipeline_preconditions():
- return False
-
- # Get synchronization data.
- client = boto3.client("organizations")
- try:
- root_id = client.list_roots()["Roots"][0]["Id"]
- except ClientError as error:
- self.logger.debug("Failed to retrieve root ID of organization.")
- self.logger.debug(error)
- return False
+ self.checker.pipeline_preconditions(api_permissions)
+ root_id = self.api_talker.list_roots()[0]["Id"]
aws_tree = self.extract_aws_setup(root_id)
- if self.fail:
- self.logger.debug("Extracting AWS setup failed.")
- return False
+ self.checker.check_members_in_correct_iteration(aws_tree)
+ self.checker.check_double_iteration_names(aws_tree)
aws_sync_data = aws_tree.awstree_to_syncdata_list()
- giphouse_sync_data = self.get_emails_with_teamids()
+ giphouse_sync_data = self.get_syncdata_from_giphouse()
merged_sync_data = self.generate_aws_sync_list(giphouse_sync_data, aws_sync_data)
- # Check edge cases.
- if self.check_for_double_member_email(aws_sync_data, merged_sync_data):
- return False
-
- checker = Checks()
- checker.check_members_in_correct_iteration(aws_tree)
- checker.check_double_iteration_names(aws_tree)
-
- # Check/create course iteration OU.
- current_course_iteration_exists, response = self.pipeline_update_current_course_iteration_ou(aws_tree)
- if not current_course_iteration_exists:
- failure_reason = response
- self.logger.debug(failure_reason)
- return False
- course_iteration_ou_id = response
-
- # Create and attach SCP policy to course iteration OU.
- if not self.pipeline_policy(course_iteration_ou_id):
- return False
+ ou_id = self.get_or_create_course_ou(aws_tree)
- # Create new member accounts and move to course iteration OU.
- if not self.pipeline_create_and_move_accounts(merged_sync_data, root_id, course_iteration_ou_id):
- return False
+ policy_id = self.get_current_policy_id()
+ self.attach_policy(ou_id, policy_id)
- return True
+ return self.create_and_move_accounts(merged_sync_data, root_id, ou_id)
- def check_for_double_member_email(self, aws_list: list[SyncData], sync_list: list[SyncData]):
- """Check if no users are assigned to multiple projects."""
- sync_emails = [x.project_email for x in sync_list]
- aws_emails = [x.project_email for x in aws_list]
-
- duplicates = [email for email in sync_emails if email in aws_emails]
-
- for duplicate in duplicates:
- error = f"Email address {duplicate} is already in the list of members in AWS"
- self.logger.info("An email clash occured while syncing.")
- self.logger.debug(error)
-
- if duplicates != []:
- return True
- return False
-
- def check_current_ou_exists(self, AWSdata: AWSTree):
- """
- Check if the the OU (organizational unit) for the current semester already exists in AWS.
-
- Get data in tree structure (dictionary) defined in the function that retrieves the AWS data
- """
- current = str(Semester.objects.get_or_create_current_semester())
-
- for iteration in AWSdata.iterations:
- if current == iteration.name:
- return (True, iteration.ou_id)
-
- return (False, None)
-
- def extract_aws_setup(self, parent_ou_id):
+ def synchronise(self, request):
"""
- Give a list of all the children of the parent OU.
+ Synchronise projects of the current semester to AWS and notify user of success or potential errors.
- :param parent_ou_id: The ID of the parent OU.
+ :param request: HTTP request indicating the synchronization button has been pressed.
"""
- client = boto3.client("organizations")
try:
- response = client.list_organizational_units_for_parent(ParentId=parent_ou_id)
- aws_tree = AWSTree("root", parent_ou_id, [])
- for iteration in response["OrganizationalUnits"]:
- ou_id = iteration["Id"]
- ou_name = iteration["Name"]
- response = client.list_accounts_for_parent(ParentId=ou_id)
- children = response["Accounts"]
- syncData = []
- for child in children:
- account_id = child["Id"]
- account_email = child["Email"]
- response = client.list_tags_for_resource(ResourceId=account_id)
- tags = response["Tags"]
- merged_tags = {d["Key"]: d["Value"] for d in tags}
- self.logger.debug(merged_tags)
- if all(key in merged_tags for key in ["project_slug", "project_semester"]):
- syncData.append(
- SyncData(account_email, merged_tags["project_slug"], merged_tags["project_semester"])
- )
- else:
- self.logger.error(
- "Could not find project_slug or project_semester tag for account with ID: " + account_id
- )
- self.fail = True
+ synchronisation_success = self.pipeline()
- aws_tree.iterations.append(Iteration(ou_name, ou_id, syncData))
- return aws_tree
- except ClientError as error:
- self.fail = True
- self.logger.error("Something went wrong extracting the AWS setup.")
- self.logger.debug(f"{error}")
- self.logger.debug(f"{error.response}")
+ if synchronisation_success:
+ messages.success(request, self.SUCCESS_MSG)
+ else:
+ messages.warning(request, self.FAIL_MSG)
+ except ClientError as api_error:
+ messages.error(request, self.API_ERROR_MSG)
+ self.logger.error(api_error)
+ except Exception as sync_error:
+ messages.error(request, self.SYNC_ERROR_MSG)
+ self.logger.error(sync_error)
diff --git a/website/projects/aws/awssync_refactored.py b/website/projects/aws/awssync_refactored.py
deleted file mode 100644
index 0bbcc1d4..00000000
--- a/website/projects/aws/awssync_refactored.py
+++ /dev/null
@@ -1,191 +0,0 @@
-from __future__ import annotations
-
-import logging
-import time
-
-from botocore.exceptions import ClientError
-
-from courses.models import Semester
-
-from mailing_lists.models import MailingList
-
-from projects.aws.awsapitalker import AWSAPITalker
-from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
-from projects.models import AWSPolicy, Project
-
-
-class AWSSyncRefactored:
- """Synchronise with Amazon Web Services."""
-
- def __init__(self):
- """Create an AWSSync instance."""
- self.api_talker = AWSAPITalker()
- self.logger = logging.getLogger("django.aws")
- self.logger.setLevel(logging.DEBUG)
- self.fail = False
-
- self.ACCOUNT_REQUEST_INTERVAL_SECONDS = 2
- self.ACCOUNT_REQUEST_MAX_ATTEMPTS = 3
-
- self.accounts_created = 0
- self.accounts_moved = 0
-
- def get_syncdata_from_giphouse(self) -> list[SyncData]:
- """
- Create a list of SyncData struct containing email, slug and semester.
-
- Slug and semester combined are together an uniqueness constraint.
-
- :return: list of SyncData structs with email, slug and semester
- """
- sync_data_list = []
- current_semester = Semester.objects.get_or_create_current_semester()
-
- for project in Project.objects.filter(mailinglist__isnull=False, semester=current_semester).values(
- "slug", "semester", "mailinglist"
- ):
- project_slug = project["slug"]
- project_semester = str(Semester.objects.get(pk=project["semester"]))
- project_email = MailingList.objects.get(pk=project["mailinglist"]).email_address
-
- sync_data = SyncData(project_email, project_slug, project_semester)
- sync_data_list.append(sync_data)
- return sync_data_list
-
- def generate_aws_sync_list(self, giphouse_data: list[SyncData], aws_data: list[SyncData]) -> list[SyncData]:
- """
- Generate the list of users that are registered on the GiPhouse website, but are not yet invited for AWS.
-
- This includes their ID and email address, to be able to put users in the correct AWS organization later.
- """
- return [project for project in giphouse_data if project not in aws_data]
-
- def get_tag_value(self, tags: list[dict[str, str]], key: str) -> str:
- """Return the value of the tag with the given key, or None if no such tag exists."""
- for tag in tags:
- if tag["Key"] == key:
- return tag["Value"]
- return None
-
- def extract_aws_setup(self, parent_ou_id: str) -> AWSTree:
- """
- Give a list of all the children of the parent OU.
-
- :param parent_ou_id: The ID of the parent OU.
- :return: A AWSTree object containing all the children of the parent OU.
- """
- aws_tree = AWSTree(
- "root",
- parent_ou_id,
- [
- Iteration(
- ou["Name"],
- ou["Id"],
- member_accounts := [
- SyncData(
- account["Email"],
- self.get_tag_value(tags, "project_slug"),
- self.get_tag_value(tags, "project_semester"),
- )
- for account in self.api_talker.list_accounts_for_parent(parent_id=ou["Id"])
- for tags in [self.api_talker.list_tags_for_resource(resource_id=account["Id"])]
- ],
- )
- for ou in self.api_talker.list_organizational_units_for_parent(parent_id=parent_ou_id)
- ],
- )
-
- incomplete_accounts = [
- account for account in member_accounts if not (account.project_slug and account.project_semester)
- ]
-
- if incomplete_accounts:
- raise Exception(f"Found incomplete accounts in AWS: {incomplete_accounts}.")
-
- return aws_tree
-
- def get_or_create_course_ou(self, tree: AWSTree) -> str:
- """Create organizational unit under root with name of current semester."""
- root_id = tree.ou_id
- course_ou_name = str(Semester.objects.get_or_create_current_semester())
- course_ou_id = next((ou.ou_id for ou in tree.iterations if ou.name == course_ou_name), None)
-
- if not course_ou_id:
- course_ou = self.api_talker.create_organizational_unit(root_id, course_ou_name)
- course_ou_id = course_ou["OrganizationalUnit"]["Id"]
-
- return course_ou_id
-
- def attach_policy(self, target_id: str, policy_id: str) -> None:
- """Attach policy to target resource."""
- try:
- self.api_talker.attach_policy(target_id, policy_id)
- except ClientError as error:
- if error.response["Error"]["Code"] != "DuplicatePolicyAttachmentException":
- raise
-
- def get_current_policy_id(self) -> str:
- """Get the currrent policy stored on the GiPHouse website."""
- for policy in AWSPolicy.objects.all():
- if policy.is_current_policy:
- return policy.policy_id
- raise Exception("No current policy found")
-
- def create_and_move_accounts(
- self, new_member_accounts: list[SyncData], root_id: str, destination_ou_id: str
- ) -> bool:
- """
- Create multiple accounts in the organization of the API caller and move them from the root to a destination OU.
-
- :param new_member_accounts: List of SyncData objects.
- :param root_id: The organization's root ID.
- :param destination_ou_id: The organization's destination OU ID.
- :returns: True iff **all** new member accounts were created and moved successfully.
- """
- for new_member in new_member_accounts:
- # Create member account
- response = self.api_talker.create_account(
- new_member.project_email,
- new_member.project_slug,
- [
- {"Key": "project_slug", "Value": new_member.project_slug},
- {"Key": "project_semester", "Value": new_member.project_semester},
- ],
- )
- # Repeatedly check status of new member account request.
- request_id = response["CreateAccountStatus"]["Id"]
-
- for _ in range(self.ACCOUNT_REQUEST_MAX_ATTEMPTS):
- time.sleep(self.ACCOUNT_REQUEST_INTERVAL_SECONDS)
-
- try:
- response_status = self.api_talker.describe_create_account_status(request_id)
- except ClientError as error:
- self.logger.debug(error)
- self.logger.debug(f"Failed to get status of account with e-mail: '{new_member.project_email}'.")
- break
-
- request_state = response_status["CreateAccountStatus"]["State"]
- if request_state == "SUCCEEDED":
- account_id = response_status["CreateAccountStatus"]["AccountId"]
-
- self.accounts_created += 1
- try:
- self.api_talker.move_account(account_id, root_id, destination_ou_id)
- self.accounts_moved += 1
- except ClientError as error:
- self.logger.debug(error)
- self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.")
- break
-
- elif request_state == "FAILED":
- failure_reason = response_status["CreateAccountStatus"]["FailureReason"]
- self.logger.debug(
- f"Failed to create account with e-mail: {new_member.project_email}. "
- f"Failure reason: {failure_reason}"
- )
- break
-
- accounts_to_create = len(new_member_accounts)
- success = accounts_to_create == self.accounts_created == self.accounts_moved
- return success
diff --git a/website/projects/tests/tests_aws/test_awsapitalker.py b/website/projects/tests/tests_aws/test_awsapitalker.py
index dd7747ea..907d1efb 100644
--- a/website/projects/tests/tests_aws/test_awsapitalker.py
+++ b/website/projects/tests/tests_aws/test_awsapitalker.py
@@ -194,3 +194,20 @@ def test_describe_create_account_status(self):
request_state = request["CreateAccountStatus"]["State"]
self.assertEqual(request_state, "SUCCEEDED")
+
+ def test_untag_resource(self):
+ self.create_organization()
+
+ tag_key = "Test Key"
+ tag_value = "Test Value"
+ tag = {"Key": tag_key, "Value": tag_value}
+ account = self.api_talker.create_account("test@example.com", "Test", [tag])
+ account_id = account["CreateAccountStatus"]["AccountId"]
+
+ received_tags = self.api_talker.org_client.list_tags_for_resource(ResourceId=account_id)["Tags"]
+ self.assertIn(tag, received_tags)
+
+ self.api_talker.untag_resource(account_id, [tag_key])
+
+ received_tags = self.api_talker.org_client.list_tags_for_resource(ResourceId=account_id)["Tags"]
+ self.assertEqual(received_tags, [])
diff --git a/website/projects/tests/tests_aws/test_awssync.py b/website/projects/tests/tests_aws/test_awssync.py
index 64f6938f..a33dd9fa 100644
--- a/website/projects/tests/tests_aws/test_awssync.py
+++ b/website/projects/tests/tests_aws/test_awssync.py
@@ -1,959 +1,433 @@
"""Tests for awssync.py."""
-
import json
from unittest.mock import MagicMock, patch
-import boto3
-import botocore
from botocore.exceptions import ClientError
-from django.test import TestCase
+from django.contrib.auth import get_user_model
+from django.test import Client, TestCase
+from django.urls import reverse
-from moto import mock_organizations, mock_sts
+from moto import mock_iam, mock_organizations, mock_sts
from courses.models import Semester
from mailing_lists.models import MailingList
-from projects.aws import awssync
-from projects.models import Project
+from projects.aws.awssync import AWSSync
+from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
+from projects.models import AWSPolicy, Project
+from registrations.models import Employee
-class AWSSyncTest(TestCase):
- """Test AWSSync class."""
+User: Employee = get_user_model()
+
+@mock_organizations
+@mock_sts
+@mock_iam
+class AWSSyncTest(TestCase):
def setUp(self):
"""Set up testing environment."""
- self.sync = awssync.AWSSync()
+ self.sync = AWSSync()
+ self.api_talker = self.sync.api_talker
+
+ self.admin = User.objects.create_superuser(github_id=0, github_username="super")
+ self.client = Client()
+ self.client.force_login(self.admin)
+
+ self.logger = MagicMock()
+ self.sync.logger = self.logger
+ self.sync.checker.logger = self.logger
+
+ def setup_policy(self):
+ policy_name = "DenyAll"
+ policy_description = "Deny all access."
+ policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ policy = self.sync.api_talker.org_client.create_policy(
+ Name=policy_name,
+ Description=policy_description,
+ Content=json.dumps(policy_content),
+ Type="SERVICE_CONTROL_POLICY",
+ Tags=[{"Key": "no_permissions", "Value": "true"}],
+ )
+ AWSPolicy.objects.create(
+ policy_id=policy["Policy"]["PolicySummary"]["Id"],
+ is_current_policy=True,
+ tags_key="no_permissions",
+ tags_value="true",
+ )
+
+ def test_get_syncdata_from_giphouse_normal(self):
+ """Test get_emails_with_teamids function in optimal conditions."""
self.semester = Semester.objects.create(year=2023, season=Semester.SPRING)
- self.mailing_list = MailingList.objects.create(address="test1")
- self.project = Project.objects.create(id=1, name="test1", semester=self.semester, slug="test1")
- self.mailing_list.projects.add(self.project)
- self.mock_org = mock_organizations()
- self.mock_org.start()
-
- def tearDown(self):
- self.mock_org.stop()
-
- def simulateFailure(self):
- self.sync.fail = True
-
- def test_button_pressed(self):
- """Test button_pressed function."""
- return_value = self.sync.button_pressed()
- self.assertTrue(return_value)
-
- def test_create_aws_organization(self):
- moto_client = boto3.client("organizations")
- org = self.sync
- org.create_aws_organization()
- describe_org = moto_client.describe_organization()["Organization"]
- self.assertEqual(describe_org, org.org_info)
-
- def test_create_aws_organization__exception(self):
- org = self.sync
- with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
- org.create_aws_organization()
- self.assertTrue(org.fail)
- self.assertIsNone(org.org_info)
-
- def test_create_course_iteration_OU(self):
- moto_client = boto3.client("organizations")
- org = self.sync
- org.create_aws_organization()
- org.create_course_iteration_OU("1")
- describe_unit = moto_client.describe_organizational_unit(OrganizationalUnitId=org.iterationOU_info["Id"])[
- "OrganizationalUnit"
- ]
- self.assertEqual(describe_unit, org.iterationOU_info)
-
- def test_create_course_iteration_OU_without_organization(self):
- org = self.sync
- org.create_course_iteration_OU("1")
- self.assertTrue(org.fail)
-
- def test_create_course_iteration_OU__exception(self):
- org = self.sync
- org.create_aws_organization()
- with patch("boto3.client") as mocker:
- mocker().list_roots.side_effect = ClientError({}, "list_roots")
- org.create_course_iteration_OU("1")
- self.assertTrue(org.fail)
-
- def test_get_all_mailing_lists(self):
- """Test get_all_mailing_lists function."""
- mailing_lists = self.sync.get_all_mailing_lists()
- self.assertIsInstance(mailing_lists, list)
-
- def test_get_emails_with_teamids_normal(self):
- """Test get_emails_with_teamids function."""
- email_id = self.sync.get_emails_with_teamids()
+ for i in range(3):
+ self.mailing_list = MailingList.objects.create(address="test" + str(i))
+ self.project = Project.objects.create(
+ id=i, name="test" + str(i), semester=self.semester, slug="test" + str(i)
+ )
+ self.mailing_list.projects.add(self.project)
+
+ email_id = self.sync.get_syncdata_from_giphouse()
self.assertIsInstance(email_id, list)
- self.assertIsInstance(email_id[0], awssync.SyncData)
- expected_result = [awssync.SyncData("test1@giphouse.nl", "test1", "Spring 2023")]
+ self.assertIsInstance(email_id[0], SyncData)
+ expected_result = [
+ SyncData("test0@giphouse.nl", "test0", "Spring 2023"),
+ SyncData("test1@giphouse.nl", "test1", "Spring 2023"),
+ SyncData("test2@giphouse.nl", "test2", "Spring 2023"),
+ ]
self.assertEqual(email_id, expected_result)
- def test_get_emails_with_teamids_no_project(self):
- """Test get_emails_with_teamids function."""
+ def test_get_syncdata_from_giphouse_no_project(self):
+ """Test get_emails_with_teamids function where the mailinglist is not assigned to a project"""
MailingList.objects.all().delete()
self.mailing_list = MailingList.objects.create(address="test2")
- email_id = self.sync.get_emails_with_teamids()
+ email_id = self.sync.get_syncdata_from_giphouse()
self.assertIsInstance(email_id, list)
self.assertEqual(email_id, [])
- def test_get_emails_with_teamids_no_mailing_list(self):
- """Test get_emails_with_teamids function."""
+ def test_get_syncdata_from_giphouse_no_mailing_list(self):
+ """Test get_emails_with_teamids function where no mailinglists exist"""
MailingList.objects.all().delete()
Project.objects.all().delete()
- email_id = self.sync.get_emails_with_teamids()
+ email_id = self.sync.get_syncdata_from_giphouse()
self.assertIsInstance(email_id, list)
self.assertEqual(email_id, [])
- def test_get_emails_with_teamids_different_semester(self):
- """Test get_emails_with_teamids function."""
+ def test_get_syncdata_from_giphouse_different_semester(self):
+ """Test get_emails_with_teamids function where the semester is not equal to the current semester"""
MailingList.objects.all().delete()
new_semester = Semester.objects.create(year=2022, season=Semester.FALL)
- self.mailing_list = MailingList.objects.create(address="test2")
- self.project = Project.objects.create(id=2, name="test2", semester=new_semester, slug="test2")
+ self.mailing_list = MailingList.objects.create(address="test4")
+ self.project = Project.objects.create(id=4, name="test4", semester=new_semester, slug="test4")
self.mailing_list.projects.add(self.project)
- email_id = self.sync.get_emails_with_teamids()
+ email_id = self.sync.get_syncdata_from_giphouse()
self.assertIsInstance(email_id, list)
self.assertEqual(email_id, [])
- def test_create_scp_policy(self):
- self.sync.create_aws_organization()
-
- policy_name = "DenyAll"
- policy_description = "Deny all access."
- policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
- policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
-
- self.assertFalse(self.sync.fail)
- self.assertEqual(policy["PolicySummary"]["Name"], policy_name)
- self.assertEqual(policy["PolicySummary"]["Description"], policy_description)
- self.assertEqual(policy["Content"], json.dumps(policy_content))
-
- def test_create_scp_policy__exception(self):
- self.sync.create_aws_organization()
-
- policy_name = "DenyAll"
- policy_description = "Deny all access."
- policy_content = {
- "Version": "2012-10-17",
- "Statement": [{"Effect": "NonExistentEffect", "Action": "*", "Resource": "*"}],
- }
- with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
- policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
-
- self.assertTrue(self.sync.fail)
- self.assertIsNone(policy)
-
- def test_attach_scp_policy(self):
- moto_client = boto3.client("organizations")
- self.sync.create_aws_organization()
-
- policy_name = "DenyAll"
- policy_description = "Deny all access."
- policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
- policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
-
- policy_id = policy["PolicySummary"]["Id"]
- root_id = moto_client.list_roots()["Roots"][0]["Id"]
- self.sync.attach_scp_policy(policy_id, root_id)
-
- current_scp_policies = moto_client.list_policies_for_target(TargetId=root_id, Filter="SERVICE_CONTROL_POLICY")
- current_scp_policy_ids = [scp_policy["Id"] for scp_policy in current_scp_policies["Policies"]]
-
- self.assertIn(policy_id, current_scp_policy_ids)
- self.assertFalse(self.sync.fail)
-
- def test_attach_scp_policy__exception(self):
- self.sync.create_aws_organization()
-
- policy_name = "DenyAll"
- policy_description = "Deny all access."
- policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
- policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
-
- policy_id = policy["PolicySummary"]["Id"]
- root_id = self.sync.org_info["Id"] # Retrieves organization ID, not root ID, resulting in ClientError.
- self.sync.attach_scp_policy(policy_id, root_id)
-
- self.assertTrue(self.sync.fail)
-
- @mock_sts
- def test_check_aws_api_connection(self):
- success, caller_identity_info = self.sync.check_aws_api_connection()
-
- self.assertTrue(success)
- self.assertIsNotNone(caller_identity_info)
-
- @mock_sts
- def test_check_aws_api_connection__exception(self):
- with patch("boto3.client") as mocker:
- mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity")
- mocker.return_value = mocker
- success, caller_identity_info = self.sync.check_aws_api_connection()
-
- self.assertFalse(success)
- self.assertIsNone(caller_identity_info)
-
- # IAM simulate_principal_policy is not covered by moto.
- def test_check_iam_policy(self):
- iam_user_arn = "daddy"
- desired_actions = []
- mock_evaluation_results = {
- "EvaluationResults": [
- {
- "EvalActionName": "organizations:CreateOrganizationalUnit",
- "EvalDecision": "allowed",
- "EvalResourceName": "*",
- "MissingContextValues": [],
- }
- ]
- }
-
- # success == True
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.return_value = mock_evaluation_results
- success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
- self.assertTrue(success)
-
- # success == False
- mock_evaluation_results["EvaluationResults"][0]["EvalDecision"] = "implicitDeny"
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.return_value = mock_evaluation_results
- success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
- self.assertFalse(success)
-
- def test_check_iam_policy__exception(self):
- iam_user_arn = "daddy"
- desired_actions = []
-
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.side_effect = ClientError({}, "simulate_principal_policy")
- success = self.sync.check_iam_policy(iam_user_arn, desired_actions)
-
- self.assertFalse(success)
-
- def test_check_organization_existence(self):
- moto_client = boto3.client("organizations")
- organization_create_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
- success, organization_describe_info = self.sync.check_organization_existence()
-
- self.assertTrue(success)
- self.assertEqual(organization_create_info, organization_describe_info)
-
- def test_check_organization_existence__exception(self):
- with patch("boto3.client") as mocker:
- mocker.describe_organization.side_effect = ClientError({}, "describe_organization")
- mocker.return_value = mocker
- success, organization_info = self.sync.check_organization_existence()
-
- self.assertFalse(success)
- self.assertIsNone(organization_info)
-
- @mock_sts
- def test_check_is_management_account(self):
- moto_client = boto3.client("organizations")
-
- moto_client.create_organization(FeatureSet="ALL")["Organization"]
- _, caller_identity_info = self.sync.check_aws_api_connection()
- _, organization_info = self.sync.check_organization_existence()
-
- # is_management_account == True
- success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info)
- self.assertTrue(success_acc)
-
- # is_management_account == False
- caller_identity_info["Account"] = "daddy"
- success_acc = self.sync.check_is_management_account(caller_identity_info, organization_info)
- self.assertFalse(success_acc)
-
- def test_check_scp_enabled(self):
- moto_client = boto3.client("organizations")
-
- # SCP enabled.
- organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
- scp_is_enabled = self.sync.check_scp_enabled(organization_info)
- self.assertTrue(scp_is_enabled)
-
- # SCP semi-disabled (pending).
- organization_info["AvailablePolicyTypes"][0]["Status"] = "PENDING_DISABLE"
- scp_is_enabled = self.sync.check_scp_enabled(organization_info)
- self.assertFalse(scp_is_enabled)
-
- # SCP disabled (empty list).
- organization_info["AvailablePolicyTypes"] = []
- scp_is_enabled = self.sync.check_scp_enabled(organization_info)
- self.assertFalse(scp_is_enabled)
-
- @mock_sts
- def test_pipeline_preconditions__all_success(self):
- # Create organization.
- moto_client = boto3.client("organizations")
- moto_client.create_organization(FeatureSet="ALL")["Organization"]
-
- # Mock return value of simulate_principal_policy.
- iam_user_arn = "daddy"
- desired_actions = []
- mock_evaluation_results = {
- "EvaluationResults": [
- {
- "EvalActionName": "organizations:CreateOrganizationalUnit",
- "EvalDecision": "allowed",
- "EvalResourceName": "*",
- "MissingContextValues": [],
- }
- ]
- }
-
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.return_value = mock_evaluation_results
- check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
-
- # Mock return value of check_iam_policy.
- with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker:
- mocker.return_value = check_iam_policy
- success = self.sync.pipeline_preconditions()
-
- self.assertTrue(success)
-
- @mock_sts
- def test_pipeline_preconditions__no_connection(self):
- with patch("boto3.client") as mocker:
- mocker.get_caller_identity.side_effect = ClientError({}, "get_caller_identity")
- mocker.return_value = mocker
- success = self.sync.pipeline_preconditions()
-
- self.assertFalse(success)
-
- def test_pipeline_preconditions__no_iam(self):
- # Mock return value of simulate_principal_policy.
- iam_user_arn = "daddy"
- desired_actions = []
- mock_evaluation_results = {
- "EvaluationResults": [
- {
- "EvalActionName": "organizations:CreateOrganizationalUnit",
- "EvalDecision": "implicitDeny",
- "EvalResourceName": "*",
- "MissingContextValues": [],
- }
- ]
- }
-
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.return_value = mock_evaluation_results
- check_api_actions = self.sync.check_iam_policy(iam_user_arn, desired_actions)
-
- # Mock return value of check_iam_policy.
- with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker:
- mocker.return_value = check_api_actions
- success = self.sync.pipeline_preconditions()
-
- self.assertFalse(success)
-
- @mock_sts
- def test_pipeline_preconditions__no_organization(self):
- # Mock return value of simulate_principal_policy.
- iam_user_arn = "daddy"
- desired_actions = []
- mock_evaluation_results = {
- "EvaluationResults": [
- {
- "EvalActionName": "organizations:CreateOrganizationalUnit",
- "EvalDecision": "allowed",
- "EvalResourceName": "*",
- "MissingContextValues": [],
- }
- ]
- }
-
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.return_value = mock_evaluation_results
- check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
-
- # Mock return value of check_iam_policy.
- with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker:
- mocker.return_value = check_iam_policy
- success = self.sync.pipeline_preconditions()
-
- self.assertFalse(success)
-
- @mock_sts
- def test_pipeline_preconditions__no_management(self):
- moto_client = boto3.client("organizations")
- moto_client.create_organization(FeatureSet="ALL")
-
- # Mock return value of simulate_principal_policy.
- iam_user_arn = "daddy"
- desired_actions = []
- mock_evaluation_results = {
- "EvaluationResults": [
- {
- "EvalActionName": "organizations:CreateOrganizationalUnit",
- "EvalDecision": "allowed",
- "EvalResourceName": "*",
- "MissingContextValues": [],
- }
- ]
- }
-
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.return_value = mock_evaluation_results
- check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
-
- # Mock return value of check_iam_policy.
- with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker_iam:
- mocker_iam.return_value = check_iam_policy
- with patch("projects.aws.awssync.AWSSync.check_aws_api_connection") as mocker_api:
- mocker_api.return_value = True, {"Account": "daddy", "Arn": "01234567890123456789"}
- success = self.sync.pipeline_preconditions()
-
- self.assertFalse(success)
-
- @mock_sts
- def test_pipeline_preconditions__no_scp(self):
- moto_client = boto3.client("organizations")
-
- organization_info = moto_client.create_organization(FeatureSet="ALL")["Organization"]
-
- # Mock return value of simulate_principal_policy.
- iam_user_arn = "daddy"
- desired_actions = []
- mock_evaluation_results = {
- "EvaluationResults": [
- {
- "EvalActionName": "organizations:CreateOrganizationalUnit",
- "EvalDecision": "allowed",
- "EvalResourceName": "*",
- "MissingContextValues": [],
- }
- ]
- }
-
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.return_value = mock_evaluation_results
- check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
-
- # Mock return value of check_iam_policy.
- with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker_iam:
- mocker_iam.return_value = check_iam_policy
-
- # Mock return value of check_organization_existence with no SCP policy enabled.
- organization_info["AvailablePolicyTypes"] = []
- with patch("projects.aws.awssync.AWSSync.check_organization_existence") as mocker:
- mocker.return_value = True, organization_info
- success = self.sync.pipeline_preconditions()
-
- self.assertFalse(success)
-
- """
- def test_pipeline_create_scp_policy(self):
- self.sync.create_aws_organization()
-
- policy_name = "DenyAll"
- policy_description = "Deny all access."
- policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
-
- policy = self.sync.pipeline_create_scp_policy()
-
- self.assertFalse(self.sync.fail)
- self.assertEqual(policy["PolicySummary"]["Name"], policy_name)
- self.assertEqual(policy["PolicySummary"]["Description"], policy_description)
- self.assertEqual(policy["Content"], json.dumps(policy_content))
-
- def test_pipeline_create_scp_policy__exception(self):
- self.sync.create_aws_organization()
-
- with patch("botocore.client.BaseClient._make_api_call", AWSAPITalkerTest.mock_api):
- policy = self.sync.pipeline_create_scp_policy()
-
- self.assertTrue(self.sync.fail)
- self.assertIsNone(policy)
- """
-
- def test_pipeline_policy(self):
- self.sync.create_aws_organization()
-
- policy_name = "DenyAll"
- policy_description = "Deny all access."
- policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
- policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
- self.sync.policy_id = policy["PolicySummary"]["Id"]
-
- ou_id = self.sync.create_course_iteration_OU("Test")
-
- success = self.sync.pipeline_policy(ou_id)
- self.assertTrue(success)
-
- def test_pipeline_policy__exception(self):
- self.sync.create_aws_organization()
-
- ou_id = self.sync.create_course_iteration_OU("Test")
-
- success = self.sync.pipeline_policy(ou_id)
- self.assertFalse(success)
-
- def test_pipeline_policy__failure_attach(self):
- self.sync.create_aws_organization()
-
- policy_name = "DenyAll"
- policy_description = "Deny all access."
- policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
- policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
- self.sync.policy_id = policy["PolicySummary"]["Id"]
-
- ou_id = self.sync.create_course_iteration_OU("Test")
-
- self.sync.attach_scp_policy = MagicMock(side_effect=self.simulateFailure())
+ def test_AWS_sync_list_both_empty(self):
+ gip_list = []
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_empty_AWS(self):
+ test1 = SyncData("test1@test1.test1", "test1", "test1")
+ test2 = SyncData("test2@test2.test2", "test2", "test2")
+ gip_list = [test1, test2]
+ aws_list = []
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list)
+
+ def test_AWS_sync_list_empty_GiP(self):
+ test1 = SyncData("test1@test1.test1", "test1", "test1")
+ test2 = SyncData("test2@test2.test2", "test2", "test2")
+ gip_list = []
+ aws_list = [test1, test2]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
+
+ def test_AWS_sync_list_both_full(self):
+ test1 = SyncData("test1@test1.test1", "test1", "test1")
+ test2 = SyncData("test2@test2.test2", "test2", "test2")
+ test3 = SyncData("test3@test3.test3", "test3", "test3")
+ gip_list = [test1, test2]
+ aws_list = [test2, test3]
+ self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [test1])
+
+ def test_get_tag_value(self):
+ tags = [{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}]
+ self.assertEquals(self.sync.get_tag_value(tags, "project_semester"), "2021")
+ self.assertEquals(self.sync.get_tag_value(tags, "project_slug"), "test1")
+ self.assertEquals(self.sync.get_tag_value(tags, "project_name"), None)
+
+ def test_extract_aws_setup(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.api_talker.list_roots()[0]["Id"]
+
+ ou_response = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1")
+ ou_id = ou_response["OrganizationalUnit"]["Id"]
+
+ account_response = self.api_talker.create_account(
+ email="account_1@gmail.com",
+ account_name="account_1",
+ tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}],
+ )
+ account_id = account_response["CreateAccountStatus"]["AccountId"]
+ self.api_talker.move_account(account_id=account_id, source_parent_id=root_id, dest_parent_id=ou_id)
- success = self.sync.pipeline_policy(ou_id)
- self.assertFalse(success)
+ aws_tree = self.sync.extract_aws_setup(root_id)
- @mock_sts
- def test_pipeline(self):
- moto_client = boto3.client("organizations")
+ expected_sync_data = [SyncData("account_1@gmail.com", "test1", "2021")]
+ expected_iteration = Iteration("OU_1", ou_id, expected_sync_data)
+ expected_tree = AWSTree("root", root_id, [expected_iteration])
- # pipeline_preconditions() == False
- success = self.sync.pipeline()
- self.assertFalse(success)
+ self.assertEqual(aws_tree, expected_tree)
- # pipeline_preconditions() == True
- moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ def test_extract_aws_setup_no_slugs(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.api_talker.list_roots()[0]["Id"]
- policy_name = "DenyAll"
- policy_description = "Deny all access."
- policy_content = {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
- policy = self.sync.create_scp_policy(policy_name, policy_description, policy_content)
- self.sync.policy_id = policy["PolicySummary"]["Id"]
-
- iam_user_arn = "daddy"
- desired_actions = []
- mock_evaluation_results = {
- "EvaluationResults": [
- {
- "EvalActionName": "organizations:CreateOrganizationalUnit",
- "EvalDecision": "allowed",
- "EvalResourceName": "*",
- "MissingContextValues": [],
- }
- ]
- }
+ response_OU_1 = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1")
+ OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
+ response_account_1 = self.api_talker.create_account(
+ email="account_1@gmail.com",
+ account_name="account_1",
+ tags=[],
+ )
+ account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
- with patch("boto3.client") as mocker:
- mocker().simulate_principal_policy.return_value = mock_evaluation_results
- check_iam_policy = self.sync.check_iam_policy(iam_user_arn, desired_actions)
+ self.api_talker.move_account(account_id=account_id_1, source_parent_id=root_id, dest_parent_id=OU_1_id)
- with patch("projects.aws.awssync.AWSSync.check_iam_policy") as mocker:
- mocker.return_value = check_iam_policy
- success = self.sync.pipeline()
+ with self.assertRaises(Exception) as context:
+ self.sync.extract_aws_setup(root_id)
+ self.assertIn("Found incomplete accounts in AWS", str(context.exception))
- self.assertTrue(success)
+ def test_get_or_create_course_ou__new(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+ tree = AWSTree("root", root_id, [])
- def test_pipeline__exception_list_roots(self):
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ current_semester_name = str(Semester.objects.get_or_create_current_semester())
+ course_ou_id = self.sync.get_or_create_course_ou(tree)
- with patch("boto3.client") as mocker:
- mocker().list_roots.side_effect = ClientError({}, "list_roots")
- success = self.sync.pipeline()
-
- self.assertFalse(success)
+ course_ou_exists = any(
+ ou["Id"] == course_ou_id and ou["Name"] == current_semester_name
+ for ou in self.sync.api_talker.list_organizational_units_for_parent(root_id)
+ )
- def test_pipeline__edge_case_double_emails(self):
- moto_client = boto3.client("organizations")
- moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ self.assertTrue(course_ou_exists)
- aws_tree = awssync.AWSTree(
- "Root",
- "123",
+ def test_get_or_create_course_ou__already_exists(self):
+ tree = AWSTree(
+ "root",
+ "r-123",
[
- awssync.Iteration(
- "Spring 2023",
- "456",
- [
- awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
- ],
- )
+ Iteration("Spring 2023", "ou-456", [SyncData("alice@giphouse.nl", "alices-project", "Spring 2023")]),
+ Iteration("Fall 2023", "ou-789", [SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023")]),
],
)
- gip_teams = [
- awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
- awssync.SyncData("email1@example.com", "project2", "Spring 2023"),
- ]
-
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
- self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
- self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
- success = self.sync.pipeline()
-
- self.assertFalse(success)
+ course_ou_id = self.sync.get_or_create_course_ou(tree)
+ self.assertEqual("ou-456", course_ou_id)
- def test_pipeline__edge_case_incorrectly_placed(self):
- moto_client = boto3.client("organizations")
- moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ def test_attach_policy__not_attached(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
- aws_tree = awssync.AWSTree(
- "Root",
- "123",
- [
- awssync.Iteration(
- "Fall 2023",
- "456",
- [
- awssync.SyncData("email1@example.com", "project1", "Spring 2023"),
- ],
- )
- ],
+ new_policy_content = json.dumps(
+ {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
+ )
+ new_policy_id = self.sync.api_talker.org_client.create_policy(
+ Content=new_policy_content, Description="Deny all access.", Name="DenyAll", Type="SERVICE_CONTROL_POLICY"
+ )["Policy"]["PolicySummary"]["Id"]
+
+ self.sync.attach_policy(root_id, new_policy_id)
+ attached_policies = self.sync.api_talker.org_client.list_policies_for_target(
+ TargetId=root_id, Filter="SERVICE_CONTROL_POLICY"
+ )["Policies"]
+ attached_policy_ids = [policy["Id"] for policy in attached_policies]
+
+ self.assertIn(new_policy_id, attached_policy_ids)
+
+ def test_attach_policy__caught_exception(self):
+ # Error code "DuplicatePolicyAttachmentException" can not be simulated by moto, so it is mocked.
+ attach_policy_hard_side_effect = ClientError(
+ {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy"
)
+ with patch.object(
+ self.sync.api_talker.org_client, "attach_policy", side_effect=attach_policy_hard_side_effect
+ ):
+ return_value = self.sync.attach_policy("r-123", "p-123")
- gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
+ self.assertIsNone(return_value)
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
- self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
- self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
- with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
- self.assertRaises(Exception, self.sync.pipeline)
+ def test_attach_policy__reraised_exception(self):
+ self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123")
- def test_pipeline__edge_case_double_iteration_names(self):
- moto_client = boto3.client("organizations")
- moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ def test_get_current_policy_id(self):
+ self.policy_id1 = AWSPolicy.objects.create(
+ policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False
+ )
+ self.policy_id2 = AWSPolicy.objects.create(
+ policy_id="Test-Policy2", tags_key="Test-Policy-Id2", is_current_policy=True
+ )
+ current_policy_id = self.sync.get_current_policy_id()
+ self.assertIsInstance(current_policy_id, str)
+ self.assertEqual(current_policy_id, self.policy_id2.policy_id)
- aws_tree = awssync.AWSTree(
- "Root",
- "123",
- [
- awssync.Iteration(
- "Spring 2023", "456", [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
- ),
- awssync.Iteration("Spring 2023", "789", []),
- ],
+ def test_get_current_policy__no_current_policy_id(self):
+ self.policy_id1 = AWSPolicy.objects.create(
+ policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False
)
+ self.assertRaises(Exception, self.sync.get_current_policy_id)
- gip_teams = [awssync.SyncData("email1@example.com", "project1", "Spring 2023")]
+ def test_create_move_account(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
- self.sync.extract_aws_setup = MagicMock(return_value=aws_tree)
- self.sync.get_emails_with_teamids = MagicMock(return_value=gip_teams)
- with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
- self.assertRaises(Exception, self.sync.pipeline)
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
- def test_pipeline__failed_creating_iteration_ou(self):
- moto_client = boto3.client("organizations")
- moto_client.create_organization(FeatureSet="ALL")["Organization"]
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
+ self.assertTrue(success)
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
- with patch("boto3.client") as mocker:
- mocker().create_organizational_unit.side_effect = ClientError({}, "create_organizational_unit")
- success = self.sync.pipeline()
+ def test_create_move_account__exception_failure(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
- self.assertFalse(success)
-
- def test_pipeline__exception_attaching_policy(self):
- self.sync.create_aws_organization()
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
- with patch("boto3.client") as mocker:
- mocker().attach_policy.side_effect = ClientError(
- {"Error": {"Code": "PolicyTypeNotEnabledException"}}, "attach_policy"
- )
- success = self.sync.pipeline()
+ with patch.object(self.sync.api_talker, "move_account", side_effect=ClientError({}, "move_account")):
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
self.assertFalse(success)
- def test_pipeline__already_attached_policy(self):
- self.sync.create_aws_organization()
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
-
- with patch("boto3.client") as mocker:
- mocker().attach_policy.side_effect = ClientError(
- {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy"
- )
- success = self.sync.pipeline()
-
- self.assertFalse(success)
+ def test_create_move_account__no_move(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
- def test_pipeline__failed_create_and_move_account(self):
- self.sync.create_aws_organization()
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
- with patch("boto3.client") as mocker:
- mocker().move_account.side_effect = ClientError({}, "move_account")
- success = self.sync.pipeline()
+ with patch.object(
+ self.sync.api_talker,
+ "describe_create_account_status",
+ side_effect=ClientError({}, "describe_create_account_status"),
+ ):
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
self.assertFalse(success)
- def test_pipeline__exception_extract_aws_setup(self):
- self.sync.pipeline_preconditions = MagicMock(return_value=True)
+ def test_create_move_account__failed(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
- with patch("boto3.client") as mocker:
- mocker().list_organizational_units_for_parent.side_effect = ClientError(
- {}, "list_organizational_units_for_parent"
- )
- success = self.sync.pipeline()
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("alice@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
+ with patch.object(
+ self.sync.api_talker.org_client,
+ "describe_create_account_status",
+ return_value={"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}},
+ ):
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
self.assertFalse(success)
- def test_pipeline_update_current_course_iteration_ou___failure_check_current_ou(self):
-
- self.sync.check_current_ou_exists = MagicMock(return_value=(False, None))
-
- self.sync.create_aws_organization()
- success, id = self.sync.pipeline_update_current_course_iteration_ou(None)
- self.assertTrue(success)
- self.assertFalse(id is None)
-
- def test_pipeline_update_current_course_iteration_ou___success(self):
-
- self.sync.check_current_ou_exists = MagicMock(return_value=(True, "1234"))
-
- self.sync.create_aws_organization()
- success, id = self.sync.pipeline_update_current_course_iteration_ou(None)
- self.assertTrue(success)
- self.assertEquals(id, "1234")
+ def test_create_move_account__in_progress(self):
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
- def test_pipeline_update_current_course_iteration_ou___failure_create_ou(self):
-
- self.sync.check_current_ou_exists = MagicMock(return_value=(False, None))
- self.sync.create_course_iteration_OU = MagicMock(side_effect=self.simulateFailure())
+ dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
+ dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
+ members = [
+ SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
+ SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
+ ]
- self.sync.create_aws_organization()
- success, failure_reason = self.sync.pipeline_update_current_course_iteration_ou(None)
+ with patch.object(
+ self.sync.api_talker.org_client,
+ "describe_create_account_status",
+ return_value={"CreateAccountStatus": {"State": "IN_PROGRESS"}},
+ ):
+ success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
self.assertFalse(success)
- self.assertEquals(failure_reason, "ITERATION_OU_CREATION_FAILED")
- self.assertTrue(self.sync.fail)
-
- def test_pipeline_create_account(self):
- self.sync.create_aws_organization()
- success, response = self.sync.pipeline_create_account(
- awssync.SyncData("alice@example.com", "alice", "Spring 2023")
+ def test_pipeline__no_accounts_no_ou(self):
+ self.sync.checker.api_talker.simulate_principal_policy = MagicMock(
+ return_value={"EvaluationResults": [{"EvalDecision": "allowed"}]}
)
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ self.setup_policy()
+ pipeline_success = self.sync.pipeline()
- self.assertTrue(success)
- self.assertIsNotNone(response)
-
- def test_pipeline_create_account__exception_create_account(self):
- self.sync.create_aws_organization()
-
- with patch("boto3.client") as mocker:
- mocker().create_account.side_effect = ClientError({}, "create_account")
- success, response = self.sync.pipeline_create_account(
- awssync.SyncData("alice@example.com", "alice", "Spring 2023")
- )
-
- self.assertFalse(success)
- self.assertEquals(response, "CLIENTERROR_CREATE_ACCOUNT")
-
- def test_pipeline_create_account__exception_describe_account_status(self):
- self.sync.create_aws_organization()
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
+ root_ous = self.sync.api_talker.list_organizational_units_for_parent(root_id)
+ root_ou_names = [ou["Name"] for ou in root_ous]
- with patch("boto3.client") as mocker:
- mocker().describe_create_account_status.side_effect = ClientError({}, "describe_create_account_status")
- success, response = self.sync.pipeline_create_account(
- awssync.SyncData("alice@example.com", "alice", "Spring 2023")
- )
+ current_semester = str(Semester.objects.get_or_create_current_semester())
+ current_accounts = self.sync.api_talker.org_client.list_accounts()["Accounts"]
- self.assertFalse(success)
- self.assertEquals(response, "CLIENTERROR_DESCRIBE_CREATE_ACCOUNT_STATUS")
+ self.assertIn(current_semester, root_ou_names)
+ self.assertTrue(pipeline_success)
- def test_pipeline_create_account__state_failed(self):
- self.sync.create_aws_organization()
+ self.assertEqual(len(current_accounts), 1)
+ self.assertEqual(current_accounts[0]["Name"], "master")
- with patch("boto3.client") as mocker:
- response = {"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}}
- mocker().describe_create_account_status.return_value = response
- success, response = self.sync.pipeline_create_account(
- awssync.SyncData("alice@example.com", "alice", "Spring 2023")
- )
-
- self.assertFalse(success)
- self.assertEquals(response, "EMAIL_ALREADY_EXISTS")
-
- def test_pipeline_create_account__state_in_progress(self):
- self.sync.create_aws_organization()
-
- with patch("boto3.client") as mocker:
- response = {
- "CreateAccountStatus": {
- "State": "IN_PROGRESS",
- }
- }
- mocker().describe_create_account_status.return_value = response
- success, response = self.sync.pipeline_create_account(
- awssync.SyncData("alice@example.com", "alice", "Spring 2023")
- )
-
- self.assertFalse(success)
- self.assertEquals(response, "STILL_IN_PROGRESS")
+ def test_pipeline__new_accounts_existing_ou(self):
+ self.sync.checker.api_talker.simulate_principal_policy = MagicMock(
+ return_value={"EvaluationResults": [{"EvalDecision": "allowed"}]}
+ )
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ self.setup_policy()
- def test_pipeline_create_and_move_accounts(self):
- moto_client = boto3.client("organizations")
- self.sync.create_aws_organization()
+ self.sync.api_talker.create_organization(feature_set="ALL")
+ root_id = self.sync.api_talker.list_roots()[0]["Id"]
- new_member_accounts = [
- awssync.SyncData("alice@example.com", "alice", "Spring 2023"),
- awssync.SyncData("bob@example.com", "bob", "Spring 2023"),
- ]
- root_id = moto_client.list_roots()["Roots"][0]["Id"]
- course_iteration_id = self.sync.create_course_iteration_OU("Spring 2023")
+ current_semester = str(Semester.objects.get_or_create_current_semester())
+ course_ou = self.sync.api_talker.create_organizational_unit(root_id, current_semester)
+ course_ou_id = course_ou["OrganizationalUnit"]["Id"]
- success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
- self.assertTrue(success)
+ self.sync.get_syncdata_from_giphouse = MagicMock(
+ return_value=[
+ SyncData("alice@giphouse.nl", "alices-project", current_semester),
+ SyncData("bob@giphouse.nl", "bobs-project", current_semester),
+ ]
+ )
- def test_pipeline_create_and_move_accounts__email_exists(self):
- moto_client = boto3.client("organizations")
- self.sync.create_aws_organization()
+ pipeline_success = self.sync.pipeline()
+ course_accounts = self.sync.api_talker.list_accounts_for_parent(course_ou_id)
+ course_account_emails = [account["Email"] for account in course_accounts]
- new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")]
- root_id = moto_client.list_roots()["Roots"][0]["Id"]
- course_iteration_id = self.sync.create_course_iteration_OU("2023Fall")
+ self.assertTrue(pipeline_success)
+ self.assertEqual(["alice@giphouse.nl", "bob@giphouse.nl"], course_account_emails)
- with patch("projects.aws.awssync.AWSSync.pipeline_create_account") as mocker:
- mocker.return_value = False, "EMAIL_ALREADY_EXISTS"
- success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
+ def test_synchronise__success(self):
+ with patch("projects.aws.awssync.AWSSync.pipeline", return_value=True):
+ response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True)
- self.assertFalse(success)
+ self.assertEqual(response.status_code, 200)
+ self.assertContains(response, self.sync.SUCCESS_MSG)
- def test_pipeline_create_and_move_accounts__exception_move_account(self):
- moto_client = boto3.client("organizations")
- self.sync.create_aws_organization()
+ def test_synchronise__failure(self):
+ with patch("projects.aws.awssync.AWSSync.pipeline", return_value=False):
+ response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True)
- new_member_accounts = [("alice@example.com", "alice"), ("bob@example.com", "bob")]
- root_id = moto_client.list_roots()["Roots"][0]["Id"]
- course_iteration_id = self.sync.create_course_iteration_OU("2023Fall")
+ self.assertEqual(response.status_code, 200)
+ self.assertContains(response, self.sync.FAIL_MSG)
- self.sync.pipeline_create_account = MagicMock(return_value=(True, 1234))
- with patch("boto3.client") as mocker:
- mocker().move_account.side_effect = ClientError({}, "move_account")
- success = self.sync.pipeline_create_and_move_accounts(new_member_accounts, root_id, course_iteration_id)
+ def test_synchronise__api_error(self):
+ api_error = ClientError({"Error": {"Code": "AccessDeniedException"}}, "create_organization")
+ with patch("projects.aws.awssync.AWSSync.pipeline", side_effect=api_error):
+ response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True)
- self.assertFalse(success)
+ self.assertEqual(response.status_code, 200)
+ self.assertContains(response, self.sync.API_ERROR_MSG)
- @mock_organizations
- def test_get_aws_data(self):
- moto_client = boto3.client("organizations")
- self.sync.create_aws_organization()
- root_id = moto_client.list_roots()["Roots"][0]["Id"]
+ def test_synchronise__sync_error(self):
+ sync_error = Exception("Synchronization Error")
+ self.sync.api_talker.create_organization(feature_set="ALL")
- response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1")
- OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
- response_account_1 = moto_client.create_account(
- Email="account_1@gmail.com",
- AccountName="account_1",
- Tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}],
- )
- account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
- moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id)
+ with patch("projects.aws.awssync.AWSSync.pipeline", side_effect=sync_error):
+ response = self.client.get(reverse("admin:synchronise_to_aws"), follow=True)
- aws_tree = self.sync.extract_aws_setup(root_id)
- iteration_test = awssync.Iteration("OU_1", OU_1_id, [awssync.SyncData("account_1@gmail.com", "test1", "2021")])
- aws_tree_test = awssync.AWSTree("root", root_id, [iteration_test])
- self.assertEquals(aws_tree, aws_tree_test)
-
- @mock_organizations
- def test_get_aws_data_no_root(self):
- boto3.client("organizations")
- self.sync.create_aws_organization()
- self.sync.extract_aws_setup("NonExistentRootID")
- self.assertTrue(self.sync.fail)
-
- @mock_organizations
- def test_get_aws_data_no_slugs(self):
- moto_client = boto3.client("organizations")
- self.sync.create_aws_organization()
- root_id = moto_client.list_roots()["Roots"][0]["Id"]
-
- response_OU_1 = moto_client.create_organizational_unit(ParentId=root_id, Name="OU_1")
- OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
- response_account_1 = moto_client.create_account(
- Email="account_1@gmail.com",
- AccountName="account_1",
- Tags=[],
- )
- account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
- moto_client.move_account(AccountId=account_id_1, SourceParentId=root_id, DestinationParentId=OU_1_id)
- self.sync.extract_aws_setup(root_id)
- self.assertTrue(self.sync.fail)
-
-
-class AWSAPITalkerTest(TestCase):
- def mock_api(self, operation_name, kwarg):
- if operation_name == "CreateOrganization":
- raise ClientError(
- {
- "Error": {
- "Message": "The AWS account is already a member of an organization.",
- "Code": "AlreadyInOrganizationException",
- },
- "ResponseMetadata": {
- "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "HTTPStatusCode": 400,
- "HTTPHeaders": {
- "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "content-type": "application/x-amz-json-1.1",
- "content-length": "111",
- "date": "Sun, 01 Jan 2023 00:00:00 GMT",
- "connection": "close",
- },
- "RetryAttempts": 0,
- },
- "Message": "The AWS account is already a member of an organization.",
- },
- "create_organization",
- )
- if operation_name == "CreateOrganizationalUnit":
- raise ClientError(
- {
- "Error": {
- "Message": "The OU already exists.",
- "Code": "ParentNotFoundException",
- },
- "ResponseMetadata": {
- "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "HTTPStatusCode": 400,
- "HTTPHeaders": {
- "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "content-type": "application/x-amz-json-1.1",
- "content-length": "111",
- "date": "Sun, 01 Jan 2023 00:00:00 GMT",
- "connection": "close",
- },
- "RetryAttempts": 0,
- },
- "Message": "The OU already exists.",
- },
- "create_organizational_unit",
- )
- if operation_name == "CreatePolicy":
- raise ClientError(
- {
- "Error": {
- "Message": """The provided policy document does not meet the
- requirements of the specified policy type.""",
- "Code": "MalformedPolicyDocumentException",
- },
- "ResponseMetadata": {
- "RequestId": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "HTTPStatusCode": 400,
- "HTTPHeaders": {
- "x-amzn-requestid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
- "content-type": "application/x-amz-json-1.1",
- "content-length": "147",
- "date": "Sun, 01 Jan 2023 00:00:00 GMT",
- "connection": "close",
- },
- "RetryAttempts": 0,
- },
- "Message": """The provided policy document does not meet the
- requirements of the specified policy type.""",
- },
- "create_policy",
- )
- return botocore.client.BaseClient._make_api_call(self, operation_name, kwarg)
+ self.assertEqual(response.status_code, 200)
+ self.assertContains(response, self.sync.SYNC_ERROR_MSG)
diff --git a/website/projects/tests/tests_aws/test_awssync_checks.py b/website/projects/tests/tests_aws/test_awssync_checks.py
index 6a141759..9989201b 100644
--- a/website/projects/tests/tests_aws/test_awssync_checks.py
+++ b/website/projects/tests/tests_aws/test_awssync_checks.py
@@ -87,6 +87,9 @@ def setUp(self):
],
)
+ self.logger = MagicMock()
+ self.checks.logger = self.logger
+
def test_check_members_in_correct_iteration(self):
# Test when correct
self.assertIsNone(self.checks.check_members_in_correct_iteration(self.aws_tree1))
diff --git a/website/projects/tests/tests_aws/test_awssync_refactored.py b/website/projects/tests/tests_aws/test_awssync_refactored.py
deleted file mode 100644
index 52619820..00000000
--- a/website/projects/tests/tests_aws/test_awssync_refactored.py
+++ /dev/null
@@ -1,320 +0,0 @@
-"""Tests for awssync_refactored.py."""
-import json
-from unittest.mock import patch
-
-
-from botocore.exceptions import ClientError
-
-from django.test import TestCase
-
-from moto import mock_organizations
-
-from courses.models import Semester
-
-from mailing_lists.models import MailingList
-
-from projects.aws.awssync_refactored import AWSSyncRefactored
-from projects.aws.awssync_structs import AWSTree, Iteration, SyncData
-from projects.models import AWSPolicy, Project
-
-
-@mock_organizations
-class AWSSyncRefactoredTest(TestCase):
- def setUp(self):
- """Set up testing environment."""
- self.sync = AWSSyncRefactored()
- self.api_talker = self.sync.api_talker
-
- def test_get_syncdata_from_giphouse_normal(self):
- """Test get_emails_with_teamids function in optimal conditions."""
- self.semester = Semester.objects.create(year=2023, season=Semester.SPRING)
- for i in range(3):
- self.mailing_list = MailingList.objects.create(address="test" + str(i))
- self.project = Project.objects.create(
- id=i, name="test" + str(i), semester=self.semester, slug="test" + str(i)
- )
- self.mailing_list.projects.add(self.project)
-
- email_id = self.sync.get_syncdata_from_giphouse()
-
- self.assertIsInstance(email_id, list)
- self.assertIsInstance(email_id[0], SyncData)
- expected_result = [
- SyncData("test0@giphouse.nl", "test0", "Spring 2023"),
- SyncData("test1@giphouse.nl", "test1", "Spring 2023"),
- SyncData("test2@giphouse.nl", "test2", "Spring 2023"),
- ]
- self.assertEqual(email_id, expected_result)
-
- def test_get_syncdata_from_giphouse_no_project(self):
- """Test get_emails_with_teamids function where the mailinglist is not assigned to a project"""
- MailingList.objects.all().delete()
- self.mailing_list = MailingList.objects.create(address="test2")
- email_id = self.sync.get_syncdata_from_giphouse()
- self.assertIsInstance(email_id, list)
- self.assertEqual(email_id, [])
-
- def test_get_syncdata_from_giphouse_no_mailing_list(self):
- """Test get_emails_with_teamids function where no mailinglists exist"""
- MailingList.objects.all().delete()
- Project.objects.all().delete()
- email_id = self.sync.get_syncdata_from_giphouse()
- self.assertIsInstance(email_id, list)
- self.assertEqual(email_id, [])
-
- def test_get_syncdata_from_giphouse_different_semester(self):
- """Test get_emails_with_teamids function where the semester is not equal to the current semester"""
- MailingList.objects.all().delete()
- new_semester = Semester.objects.create(year=2022, season=Semester.FALL)
- self.mailing_list = MailingList.objects.create(address="test4")
- self.project = Project.objects.create(id=4, name="test4", semester=new_semester, slug="test4")
- self.mailing_list.projects.add(self.project)
- email_id = self.sync.get_syncdata_from_giphouse()
- self.assertIsInstance(email_id, list)
- self.assertEqual(email_id, [])
-
- def test_AWS_sync_list_both_empty(self):
- gip_list = []
- aws_list = []
- self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
-
- def test_AWS_sync_list_empty_AWS(self):
- test1 = SyncData("test1@test1.test1", "test1", "test1")
- test2 = SyncData("test2@test2.test2", "test2", "test2")
- gip_list = [test1, test2]
- aws_list = []
- self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), gip_list)
-
- def test_AWS_sync_list_empty_GiP(self):
- test1 = SyncData("test1@test1.test1", "test1", "test1")
- test2 = SyncData("test2@test2.test2", "test2", "test2")
- gip_list = []
- aws_list = [test1, test2]
- self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [])
-
- def test_AWS_sync_list_both_full(self):
- test1 = SyncData("test1@test1.test1", "test1", "test1")
- test2 = SyncData("test2@test2.test2", "test2", "test2")
- test3 = SyncData("test3@test3.test3", "test3", "test3")
- gip_list = [test1, test2]
- aws_list = [test2, test3]
- self.assertEquals(self.sync.generate_aws_sync_list(gip_list, aws_list), [test1])
-
- def test_get_tag_value(self):
- tags = [{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}]
- self.assertEquals(self.sync.get_tag_value(tags, "project_semester"), "2021")
- self.assertEquals(self.sync.get_tag_value(tags, "project_slug"), "test1")
- self.assertEquals(self.sync.get_tag_value(tags, "project_name"), None)
-
- def test_extract_aws_setup(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.api_talker.list_roots()[0]["Id"]
-
- ou_response = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1")
- ou_id = ou_response["OrganizationalUnit"]["Id"]
-
- account_response = self.api_talker.create_account(
- email="account_1@gmail.com",
- account_name="account_1",
- tags=[{"Key": "project_semester", "Value": "2021"}, {"Key": "project_slug", "Value": "test1"}],
- )
- account_id = account_response["CreateAccountStatus"]["AccountId"]
- self.api_talker.move_account(account_id=account_id, source_parent_id=root_id, dest_parent_id=ou_id)
-
- aws_tree = self.sync.extract_aws_setup(root_id)
-
- expected_sync_data = [SyncData("account_1@gmail.com", "test1", "2021")]
- expected_iteration = Iteration("OU_1", ou_id, expected_sync_data)
- expected_tree = AWSTree("root", root_id, [expected_iteration])
-
- self.assertEqual(aws_tree, expected_tree)
-
- def test_extract_aws_setup_no_slugs(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.api_talker.list_roots()[0]["Id"]
-
- response_OU_1 = self.api_talker.create_organizational_unit(parent_id=root_id, ou_name="OU_1")
- OU_1_id = response_OU_1["OrganizationalUnit"]["Id"]
- response_account_1 = self.api_talker.create_account(
- email="account_1@gmail.com",
- account_name="account_1",
- tags=[],
- )
- account_id_1 = response_account_1["CreateAccountStatus"]["AccountId"]
-
- self.api_talker.move_account(account_id=account_id_1, source_parent_id=root_id, dest_parent_id=OU_1_id)
-
- with self.assertRaises(Exception) as context:
- self.sync.extract_aws_setup(root_id)
- self.assertIn("Found incomplete accounts in AWS", str(context.exception))
-
- def test_get_or_create_course_ou__new(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.sync.api_talker.list_roots()[0]["Id"]
- tree = AWSTree("root", root_id, [])
- current_semester_name = "Spring 2023"
-
- with patch.object(Semester.objects, "get_or_create_current_semester", return_value=current_semester_name):
- course_ou_id = self.sync.get_or_create_course_ou(tree)
-
- course_ou_exists = any(
- ou["Id"] == course_ou_id and ou["Name"] == current_semester_name
- for ou in self.sync.api_talker.list_organizational_units_for_parent(root_id)
- )
-
- self.assertTrue(course_ou_exists)
-
- def test_get_or_create_course_ou__already_exists(self):
- tree = AWSTree(
- "root",
- "r-123",
- [
- Iteration("Spring 2023", "ou-456", [SyncData("alice@giphouse.nl", "alices-project", "Spring 2023")]),
- Iteration("Fall 2023", "ou-789", [SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023")]),
- ],
- )
-
- with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2023"):
- course_ou_id = self.sync.get_or_create_course_ou(tree)
- self.assertEqual("ou-456", course_ou_id)
-
- def test_attach_policy__not_attached(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.sync.api_talker.list_roots()[0]["Id"]
-
- new_policy_content = json.dumps(
- {"Version": "2012-10-17", "Statement": [{"Effect": "Deny", "Action": "*", "Resource": "*"}]}
- )
- new_policy_id = self.sync.api_talker.org_client.create_policy(
- Content=new_policy_content, Description="Deny all access.", Name="DenyAll", Type="SERVICE_CONTROL_POLICY"
- )["Policy"]["PolicySummary"]["Id"]
-
- self.sync.attach_policy(root_id, new_policy_id)
- attached_policies = self.sync.api_talker.org_client.list_policies_for_target(
- TargetId=root_id, Filter="SERVICE_CONTROL_POLICY"
- )["Policies"]
- attached_policy_ids = [policy["Id"] for policy in attached_policies]
-
- self.assertIn(new_policy_id, attached_policy_ids)
-
- def test_attach_policy__caught_exception(self):
- # Error code "DuplicatePolicyAttachmentException" can not be simulated by moto, so it is mocked.
- attach_policy_hard_side_effect = ClientError(
- {"Error": {"Code": "DuplicatePolicyAttachmentException"}}, "attach_policy"
- )
- with patch.object(
- self.sync.api_talker.org_client, "attach_policy", side_effect=attach_policy_hard_side_effect
- ):
- return_value = self.sync.attach_policy("r-123", "p-123")
-
- self.assertIsNone(return_value)
-
- def test_attach_policy__reraised_exception(self):
- self.assertRaises(ClientError, self.sync.attach_policy, "r-123", "p-123")
-
- def test_get_current_policy_id(self):
- self.policy_id1 = AWSPolicy.objects.create(
- policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False
- )
- self.policy_id2 = AWSPolicy.objects.create(
- policy_id="Test-Policy2", tags_key="Test-Policy-Id2", is_current_policy=True
- )
- current_policy_id = self.sync.get_current_policy_id()
- self.assertIsInstance(current_policy_id, str)
- self.assertEqual(current_policy_id, self.policy_id2.policy_id)
-
- def test_get_current_policy__no_current_policy_id(self):
- self.policy_id1 = AWSPolicy.objects.create(
- policy_id="Test-Policy1", tags_key="Test-Policy-Id1", is_current_policy=False
- )
- self.assertRaises(Exception, self.sync.get_current_policy_id)
-
- def test_create_move_account(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.sync.api_talker.list_roots()[0]["Id"]
-
- dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
- dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
- members = [
- SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
- SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
- ]
-
- success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
- self.assertTrue(success)
-
- def test_create_move_account__exception_failure(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.sync.api_talker.list_roots()[0]["Id"]
-
- dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
- dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
- members = [
- SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
- SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
- ]
-
- with patch.object(self.sync.api_talker, "move_account", side_effect=ClientError({}, "move_account")):
- success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
-
- self.assertFalse(success)
-
- def test_create_move_account__no_move(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.sync.api_talker.list_roots()[0]["Id"]
-
- dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
- dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
- members = [
- SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
- SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
- ]
-
- with patch.object(
- self.sync.api_talker,
- "describe_create_account_status",
- side_effect=ClientError({}, "describe_create_account_status"),
- ):
- success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
-
- self.assertFalse(success)
-
- def test_create_move_account__failed(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.sync.api_talker.list_roots()[0]["Id"]
-
- dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
- dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
- members = [
- SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
- SyncData("alice@giphouse.nl", "bobs-project", "Fall 2023"),
- ]
-
- with patch.object(
- self.sync.api_talker.org_client,
- "describe_create_account_status",
- return_value={"CreateAccountStatus": {"State": "FAILED", "FailureReason": "EMAIL_ALREADY_EXISTS"}},
- ):
- success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
-
- self.assertFalse(success)
-
- def test_create_move_account__in_progress(self):
- self.sync.api_talker.create_organization(feature_set="ALL")
- root_id = self.sync.api_talker.list_roots()[0]["Id"]
-
- dest_ou = self.sync.api_talker.create_organizational_unit(root_id, "destination_ou")
- dest_ou_id = dest_ou["OrganizationalUnit"]["Id"]
- members = [
- SyncData("alice@giphouse.nl", "alices-project", "Spring 2023"),
- SyncData("bob@giphouse.nl", "bobs-project", "Fall 2023"),
- ]
-
- with patch.object(
- self.sync.api_talker.org_client,
- "describe_create_account_status",
- return_value={"CreateAccountStatus": {"State": "IN_PROGRESS"}},
- ):
- success = self.sync.create_and_move_accounts(members, root_id, dest_ou_id)
-
- self.assertFalse(success)
diff --git a/website/projects/tests/tests_aws/test_awssync_structs.py b/website/projects/tests/tests_aws/test_awssync_structs.py
index 3ecb722c..3915bed0 100644
--- a/website/projects/tests/tests_aws/test_awssync_structs.py
+++ b/website/projects/tests/tests_aws/test_awssync_structs.py
@@ -1,11 +1,7 @@
"""Tests for awssync_structs.py."""
-from unittest.mock import patch
-
from django.test import TestCase
-from courses.models import Semester
-
from projects.aws import awssync
@@ -161,27 +157,6 @@ def test_repr_SyncData(self):
def test_awstree_to_syncdata_list(self):
self.assertEqual(self.aws_tree1.awstree_to_syncdata_list(), self.treelist)
- def test_check_for_double_member_email(self):
- # Test when there are no duplicate emails
- self.assertFalse(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
-
- # Test when there is a duplicate email
- self.sync_list.append(awssync.SyncData("email4@example.com", "Spring 2022", "Project G"))
- self.assertTrue(self.sync.check_for_double_member_email(self.aws_list, self.sync_list))
-
- def test_check_current_ou_exists(self):
- # Test when current semester OU does not exist
- with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Fall 2022"):
- self.assertTrue(Semester.objects.get_or_create_current_semester() == "Fall 2022")
- val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
- self.assertEqual((val1, val2), (False, None))
-
- # Test when current semester OU exists
- with patch.object(Semester.objects, "get_or_create_current_semester", return_value="Spring 2021"):
- self.assertTrue(Semester.objects.get_or_create_current_semester() == "Spring 2021")
- val1, val2 = self.sync.check_current_ou_exists(self.aws_tree1)
- self.assertEqual((val1, val2), (True, "98765"))
-
def test_AWSTree_equals(self):
self.assertEqual(self.aws_tree1, self.aws_tree1)
self.assertNotEqual(self.aws_tree1, self.aws_tree2)
From df330ad9c99ff8149ef65546d219f4327ecb861c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Filip=20=C5=81ysak?=
<92109241+FilipLysak001@users.noreply.github.com>
Date: Tue, 6 Jun 2023 11:23:38 +0200
Subject: [PATCH 28/32] changes for resolving security (#68)
---
website/projects/aws/awssync.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/website/projects/aws/awssync.py b/website/projects/aws/awssync.py
index 6939751c..b6252fdd 100644
--- a/website/projects/aws/awssync.py
+++ b/website/projects/aws/awssync.py
@@ -163,6 +163,7 @@ def create_and_move_accounts(
[
{"Key": "project_slug", "Value": new_member.project_slug},
{"Key": "project_semester", "Value": new_member.project_semester},
+ {"Key": "course_iteration_tag", "Value": "no-rights"},
],
)
# Repeatedly check status of new member account request.
@@ -187,6 +188,7 @@ def create_and_move_accounts(
try:
self.api_talker.move_account(account_id, root_id, destination_ou_id)
self.accounts_moved += 1
+ self.api_talker.untag_resource(account_id, ["course_iteration_tag"])
except ClientError as error:
self.logger.debug(f"Failed to move account with e-mail: {new_member.project_email}.")
self.logger.debug(error)
From f5974ceb8da79efea6029ec70efb7b2135cfe491 Mon Sep 17 00:00:00 2001
From: 1058274 <70607431+1058274@users.noreply.github.com>
Date: Tue, 6 Jun 2023 10:20:57 +0000
Subject: [PATCH 29/32] Documentation AWS integration feature (#70)
* Add initial draft AWS integration documentation
* Add pipeline flowchart and remove create organization
* Apply review changes
---
README.md | 67 ++++++++++++++++++++++--
resources/pipeline-flowchart.drawio.png | Bin 0 -> 130607 bytes
2 files changed, 64 insertions(+), 3 deletions(-)
create mode 100644 resources/pipeline-flowchart.drawio.png
diff --git a/README.md b/README.md
index e2561a30..7cb75dee 100644
--- a/README.md
+++ b/README.md
@@ -16,8 +16,9 @@ This is the code for the website of [GiPHouse](http://giphouse.nl/) powered by [
- [Questionnaires](#questionnaires)
- [Room Reservations](#room-reservations)
- [Course, Project and Static Information](#course-project-and-static-information)
- - [Projects and Repositories](#projects-and-repositories)
+ - [Projects, Repositories and AWS](#projects-repositories-and-aws)
- [GitHub Synchronization](#github-synchronization)
+ - [AWS Synchronization](#aws-synchronization)
- [Mailing Lists](#mailing-lists)
- [Tasks](#tasks)
- [Development and Contributing](#development-and-contributing)
@@ -122,10 +123,10 @@ The room reservation is built using [FullCalendar](https://fullcalendar.io/), a
### Course, Project and Static Information
Admin users can add information about the course lectures and the projects in the backend. There are also a small amount of static HTML webpages with information about GiPHouse.
-### Projects and Repositories
+### Projects, Repositories and AWS
+#### GitHub Synchronization
The projects module provides synchronisation functionality with a GitHub organization using the [GitHub API v3](https://developer.github.com/v3/). For this, a repository model is included in Django. Project(team)s can have one or multiple repositories, which are then synchronised with GitHub. For this functionality, a [GitHub App](https://developer.github.com/v3/apps/) must be registered and installed in the organization. Details on this are explained later.
-#### GitHub Synchronization
Projects and repositories contain a field `github_team_id` and `github_repo_id` that corresponds to the respective `id` of the object on GitHub. These fields are automatically set and should not be touched under normal circumstances. Teams and repositories on GitHub that do not match one of these id's will not be touched by the GitHub synchronization.
If the `github_team_id` or `github_repo_id` are `None`, it is assumed the objects do not exist and new objects will be created on synchronization (except for archived projects and teams).
@@ -149,6 +150,66 @@ Synchronization can only be initialized via actions on specific sets of objects
Synchronization currently does not regard the role of directors of GipHouse. This needs to be configured manually. Note that it is however not possible to add directors manually to a team on GitHub, since they will be removed after each sync.
+#### AWS Synchronization
+The projects module provides synchronisation functionality with [AWS Organizations](https://aws.amazon.com/organizations/) using the official [boto3 Python AWS SDK](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html).
+The AWS synchronisation process only applies to the current semester and is one-directional (from GiPHouse to AWS, but not vice versa).
+
+Each project in the current semester with a team mailing list gets its own AWS member account that is part of GiPHouse's AWS organization.
+Since all AWS member accounts have isolated environments, each team is able to configure their own AWS environment as desired.
+The AWS member accounts are restricted in their abilities using a pre-configured [SCP policy](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_scps.html) that is applied to the course semester Organizational Unit (OU) where all team member accounts reside.
+For example, the SCP policy can be set such that only (certain types of) [EC2](https://aws.amazon.com/ec2/) instances may be launched.
+Such specific configuration details can be found under the [Deployment](#deployment) section.
+
+The entire AWS synchronization process, also referred to as the pipeline, can be initiated in the Django admin interface under Projects by pressing the large `SYNCHRONIZE PROJECTS OF THE CURRENT SEMESTER TO AWS` at the top-right and roughly goes through the following stages:
+
+1. Preliminary checks
+ - Pipeline preconditions
+ 1. Locatable boto3 credentials and successful AWS API connection
+ 2. Check allowed AWS API actions based on IAM policy of caller
+ 3. Existing organization for AWS API caller
+ 4. AWS API caller acts under same account ID as organization's management account ID
+ 5. SCP policy type feature enabled for organization
+ - Edge case checks
+ 1. No duplicate course semester OU names
+2. Create current course semester OU (if non-existent)
+3. Attach SCP policy to current course semester OU (if non-existent)
+4. Synchronization
+ - Determine new accounts to be invited based on AWS and GiPHouse data.
+5. Create new AWS member accounts in AWS organization
+6. Move new AWS member accounts to course semester OU
+
+
+
+After the synchronization process has finished, success or failure is indicated by a green or red response box respectively.
+Verbose details for each synchronization run is logged using the `logging` module and can be accessed in the backend, for example to inspect causes of failed runs.
+
+An example of a possible AWS environment in the form a tree is the following:
+```
+root
+│
+├── Fall 2022 (OU)
+│ ├── team-alice@giphouse.nl (member account)
+│ └── team-bob@giphouse.nl (member account)
+│
+├── Spring 2023 (OU)
+│ ├── team-charlie@giphouse.nl (member account)
+│ └── team-david@giphouse.nl (member account)
+│
+└── admin@giphouse.nl (management account)
+```
+
+When an AWS member account has been created for a team mailing list as part of an AWS Organization, an e-mail is sent by AWS.
+This process might take some time and is under AWS' control.
+It is important to be aware that gaining initial access to the member account is only possible by formally resetting the password; there is no other way.
+Also note well that each project team member will receive such mails because the team mailing list works as a one-to-many mail forwarder.
+
+By default, all newly created member accounts under an AWS organization are placed under root with no possible alternative.
+Once the member accounts have been created, they are moved to the current course semester OU.
+Unfortunately, AWS does not specify how long it at most takes to finalize the status of a new member account request.
+This introduces the possibility of there being a time period between having a newly created member account under root and moving it to its corresponding OU that is restricted with an attached SCP policy, possibly giving the member account excessive permissions.
+To mitigate this risk, every newly created account comes with a pre-defined [tag](https://docs.aws.amazon.com/tag-editor/latest/userguide/tagging.html) and the SCP policy attached to root should deny all permissions for accounts under root with the specific tag (see [Deployment](#deployment) section for more details on SCP policy configuration).
+The tag gets removed after the account has been moved to its destination OU.
+
### Mailing Lists
Admin users can create mailing lists using the Django admin interface. A mailing list can be connected to projects, users and 'extra' email addresses that are not tied to a user. Relating a mailing list to a project implicitly makes the members of that project a member of the mailing list. Removing a mailing list in the Django admin will result in the corresponding mailing list to be archived or deleted in G suite during the next synchronization, respecting the 'archive instead of delete' property of the deleted mailing list. To sync a mailing list with G Suite, one can run the management command: `./manage.py sync_mailing_list` or use the button in the model admin. This will sync all mailing lists and the automatic lists into G Suite at the specified domain.
diff --git a/resources/pipeline-flowchart.drawio.png b/resources/pipeline-flowchart.drawio.png
new file mode 100644
index 0000000000000000000000000000000000000000..ec7e59812ca502a7ea03d0741a079a65e1a940cd
GIT binary patch
literal 130607
zcmeFa*{%OGZ7lJ9%0jGQ^6Gh$H_lo=s&n<`FH=Beboh;L)`nx2mET%oeuH&uJ!(*bMG7ZVjCZD
z5An0hn)*FW{7>YcHXk?oxs5-rM)8n^Fd*?A$$y8@&@uWU**DVQOYH+dD>Bc3hd<&U
zJ!Eg>rM{nLxbv?{nA6YP53J?5BJJzmKKraD@#(jz+C*^)9Y@ngw>U*oADu??$CW*{
z|G1DmKgW3wi!NB%@&*XVwbCQ&pvIEruIwz`>V{edDYP>&cM3N_iav1(*iD$
zfaY=E&u`B0Uws3Y(!MI+cR$8#{qHCCo@>{tn0+SJaQ7G=4~~u75T~{3&WA+`Kn+
z|EBI+AEWvI1VKS1_VK6UK~VhstbdXM@5eX0H}}7_7D2tW>-fD6z)=SXl=Lh4-@dQ?
z0}%~CL{R=-r0!GkBh~O5Nz4lkd6Z++^S}fHUxhjyU{?S3Y^?lLz`$fxI6>r1)jo
z{TywT;4cc|=iwy5>G<2q`zI;B5_r1r{V;_&SnL^ow@sOaNOOVKC9v@IT(vBG0^Qmn
z@288axa;F5eAJykW5nN~X27xCTNS)j1NG?&{QgwK{?iivP7E@r|KHls{=CS2i0P-W
zR@0AF14jPSs)jcyzETbBGxmEV^ZzxdfjxnJF1Y>)^uXKG{oS|-ZOo^j=vY5
zzo9SjYpnZ!fTez8uiBF2t*cs
z>^1%#jrH!+7yJDS>F)O?q~r$&{i2ZW-T|S1dLiAvoq^wtn*G%j-2LW$+22XczA^HZ
znzewx(Q2@p>ktCG|5VzO{9FM36GTCNZ?gCHTz*k|Qg6%uPp>^ceYvk>Pi-Jn3cDD;
z;NX13CjNON55;wT7utyO!G~#
zuhhtQ#I}U}9VW^KWXQh+!?uqX+J;*ZXn9LLi#c2=heR!_~6y|XwiqY&$#Pg!|e_9k4FP@^VJ*(*nxzd
zSP0br_4_)dAH*f&hefikG})$Xp0Ku$;=BJ^>GR=a{>N_oke}PPpDqvi=9jOdyPump
zUvK<;I`^Lf^7r4Z+x*US>GlopFSIuEI3y{2LjMaE=b1KD@PY^adW-X0
z7?eHsUZA}V3V!$rgYv1WzcwiNrJ6g1|#j`<@bh2FE|^+d~=A7`xiYbFcIEc_?6
zJUF&_o0^|9J)c(QAKdhOhB^NO*q*Nucl?FL`^U=t7hICR;Uf6Aa7pmP=VJBuzsTLb
zQZcn!MoRXm6IOYxlVc;dSQ``sR-=nETsbov>3`Ef7=7H20E#Q^8@
zs|;kn%o3ESA3|z1Y`&8WN>t4t#hiZMsIn+R8PN=cUSRhbJ5G4eE$vXU!>3BH2{i#p3`OJj-DG&A641)lIX~JMX
zN}PHd^H06?G3n1yn)|)`to8uG{FPb0(DJJ}{XBGk`}~aUTN8g{(;w$(|LxfnCq6SW
zzwqh1@Zgh!zK+fPJSTi`=zl$Y`Wg2B0iXVamirG5LB(}MquL!%nsrkn%?MWnD9(Zl
zNH6|HE%=Ky9iIW2UtCapMk9a21;t;*iu>&k;|wJA-tqoq`VZ9nJF?LKBKQAz$S;}D
zpF@5i`QI_*TLFJDK|ha~{{VIU4n_Z*7(hAKT4e}wvgjOreIw1Xd6^X+e1TOM>AL<)Fz7j%PCs2u)T^QXQ
zv7?wYH2l*nC*_?4K5^(PJ+ib1f0!#hR1&(tUK!cFs_?0^;n#^DdAe#x)abZiC2w3z
zs@91s;T(d?w3GJB<2E~L$(OEOut8U;q^P9{$27~7JVi~7l5TXTbn(**@h~-IqhkeW
zR8HAzZgo9IoMIAS@R@uUznWdBVgZ0B!UZtdyC6LWC-J(=yX`dkQvOI!?jVUpPVQi{
zO3-(kQ*_wvc9m;yMoQ}LdCbgba0&$p{udk3E&KgVG4Z<_Tagv?N7jqC74)t+5BM!L
zyT(7b<26(8SSl
zq)OJY%XmX6upHai$NL?cJY~9>Sn3v556>XBz1dKj@}MHArSy78$vBZVTju`E?Qj$6
z>@Fly=N>RqoIa=cMW*%(reN0!w(XVcK`-#OiFN7e9CY7sb6=p_K)2hZ^wQiXcKdDD
z&}CvV?e=Cfgs+R{_9a9_Vn8o1A!YcR$TO+N`<5zoi=rVA<#X*u?g>-{b93(4UVlmG
zbJ|L<6)KF%J$YR{sAmZx50451;R(8^gpMsv6CX>M)Z1t7eoHY!(~eixBXn$zaz_zQHilLZ>2z|Ajpm_*
zOLnCVOxyEW8J?mCCM3R0A7%L>?{~IBTxxzpc;>Jir46YqmN4I-97a%`3oa8o2Szmx
z=5#AI$z~GvhPZjfMr}U$7G~_3;(<l{y=KbaNM54)7C@uctWu?L%4U3IFKBfZpVJ7
zpK>QY7YaOS8A?dhJNaQx2W3a?*jXc@;bzBIkMz||Luh%=13PHtR=aY?jm_fy%S#e9
z=2}PPd#q6g+&^I&ZO9KbC+JG*f|x7`p;qQg2C|@3W>MV6YH?XzTlQl}&}JIvPgu1J
z>Nb_QSIUd0LWR9~re<#3U3?VIkvf4ovmG3y*>9rL!$Z|k&*{#LhQJ@BN4JD<0e?9S&WgNhyET^_XO|9pd-0@qfIeOCd;aE0MO=)-@zpRGd;syX7rl*K9Jr0Yzy^`h;pq9+;d+b6*eAvL2~_lDJ=`V
zebI?GkcaFXkl1ESb0c7a)a^HM-Xy1LBYIu8>3QLNe6>kXnH(1F^ktkyLpds*^3bf~
zVeoR|U@z8yXUd;W
z2N^EuXp-KOIB4umq{znDy-KF#W33TnrN&3S#|$>cyQh7=;<0dxN>8WfewXJlT9HpEhUN{NH*r}&-%r;G0bWJLRSqdtW*;^$Z
zvC<8febaDQ`N)JybiLW$6~@^f^VryGIUG8`=-uKoWdJryptzTATDim0O*w1)xp!tU
zSTgRJ2}YR(VkH&fi7~FN&n0Jh7P+z`)RYOTmtY=D-89TnO^n^mDxSqG@K3=y5*kT%
z%XSOb=u~ilQr98PYu~9Xej^XdBzRWJ^k6CwEhC3cui41yQ{j8@-5FJ-
zrn(@ZQcMdkNs||28C*>)%*w3tAI+;?SEwEvC;sKIM-vj!+w??#?Wecv3ipr;O+Q
zZu%@GVRcQ3mr6Lk=r0@H+AHamf_=t9E(o8dbUPQK>iZC_+elpgK;DK_X?rdrzz^`_xfjQjlE;L<
zYZq>+C<;qOHYZ@c~C?>DLkn3IHnV)JK0xm?*F$dyi7!@(#%#iqO7()j?E7Q4q#Q8icu8-xt!@}%BqZq)>iY1Yq{p67!e_1W
zaRT3?YerqAgpy9#=1RA>cqu|iy4v2!9Uf;-@!%Zw3F}9^Jy&|bE`&`_l}(Reb`{Vq
zA;YTzy#?I$xxCrfTH2Ca`a1hul=b&M?~N$w&v|-}C+wxQLw3{d?Cb{2<=!n11?CbF
z_q)GcEO(Q!n`aLg-|MO7kDXBCcsUj7Nw*1xZqp-}afSnvZUKk5v!W%c%h%px69f8%
zI#70;HW-Zr(jj$YpjT-_VSRGm^o}$%>LwctsTk`W37TFK;-MvvvwMQs1e}A5Z23r)
z)5g6}ZkZVg#jdyU`T3d6yo$g1s}x
zW9%KLWK{QUD&GuyuIN?<=jp0E86&IGa(EuCR}sGux9vI6vp{@>V;OE5j}sn}2U=ou
z6_Yo;*B~k2DZ_1jL+P^1j?E}3rhM~H*_?>sGdpDj-5nciDg=TfPCg>{E>n;Jb;pmo@5OUsaojcnDfoD^-j?L^}~Zq6WT!g<=A
zr_CA8>HrG$PXP>%HW9F@V9u|2jCCrkOG$Uq+dVla*TXFA{9v;`9lcb)-7>9ia7{;S>}
z^pca#nL>K|O&vZrA(r*KvE0NKeUOBj9*|)T_474)Tr{+f6V5@guLrx`@%>VJcv<)4lDPZYI$
zlmF)g;ak-5Th#Jf)bd-@@>|sMTh#Jf)bd-@@>|sMTh#Jf)bd-@@>|sMTh#Jf)bd-@
z@*g^CiGMkA_cLDfyA4|IVe=k^>V7YUSi4_aoV|yb_-`D8{5yr6-xU0PN87pqB}Gke
zS~|g=C5^bv3>CQJHiQZW4^sT(nwf7`tqkj1DQClIldC|xz7DGc
z9#qCQs4k?;(TU2^i^>^^YA3Tk%Slv~WrQh1%wZYa8)Z%%HI|-qPR?WrQW|n*j9EEZ
z?@J5?V*wn~D_mMC5X+apV3{w$#xvIpCo&?Vc|)r6W(ccrQ0-!)TZfN!Fjg$$T~MnB
z88Xl!iQb;(13VGFqD6fLq*9+2c~m)i2+3OwIIR@@y&C?V4gILR-Qak}?=2($9dBZ&x`%n}xpJfVwS<^Z1rLiNSWfbNubf?Y~dE)5x+9?k^
zH0KkSymVI`PkGX*88x3c{-v`ccFH}}9=?Y!wL~JNd@@e
z*$m+wW0kc+;DI!yW=n
z(Oh|iSGW(b0^9%xJP&-xB0R#qb)JAb3-J++_zHanIwPJyumUdEzd+xshiI`uIRDZ<
zLo@?EMtq6jU-=sNXysFY1!%U?9Km7(-&!l(5j?05fDh=p;s)Vee&gkmqj6UFfbXF%
z1P7we1hhoBMe>7q0eBu{4AD9p0S=f~zVa<~%7q--0$(_YzVIFO5AbF0&$;o^N^h0Z
zIAb&o+5w&^G%p!=6Y&Me(aIM9AK(mR6L9?@A8G!<^DF*<#}Xt1fU^SVhrxFL7ScVyBP&k;9iU(6AMl8M@FR?i+I(CCxIrcm-dEnf06!x=vg$05
zp9Jv=%mKcm{xhH>uP(rcFh&An8=Z!D4e-0poijieB%Kb|w7N-bkOzPVbWH*CHi4H{
zod>)QFtNbPt4_eC5B>yvBE5#-Mmk~T8^pt58h51v5!VV(1b?kW%spdX~~Fzkbd
z-}9-?B7Cp%3its&a}&TP(D9RpR=tbpi{@B#>Zg8Q^(*3I;4e-_`gs#@JfeB0AzrIK
zdD4Sx3g8TQ&W3hK&%>OcKfwD68Ig!k`Dx{&;2AUz*7S1bJg2OkABQy
z?ei0s70+580bM|E0KaI+HXz-ywg>!v>I#7UM=&6{TKhTKLIb?Bc+1Kw!1sWoRR^s$
z3gifVUu75W0sSpB4|sO{yUrKsxQ}Zq?N(eO*?;Q-g!2*fGVnL(QKY}#^aCD5Is$lc
zr61A}hz7`xth((z7sNZDi&p(DG^-9pa`HhxbRF=y$|um5|K9ek_y%2wbOEwesJ(5f
zU~iD!Tmry9=n25H2fU8>63q>0jbv_x<8zFUd!Qo#o>jLZn}XzevbYc4MtVfe9i-Pg
z9oaKiecK)+gV65>?cp=hNodX=_T^-+x)8<$zhJe2@co1Lk(@$1pyjF$0f+1Nm7joi
z@=FIB3ino-hi5-r2i4$!n^>FplcD2
zL*L6=mXPj*XIEMSKcN1R&IOrrRiGErWk}zywhOK!J+bl@?k#X{(GY
zB?r9#whYEbIA3`Io&%i(@I${~SCMW-uxBIiI@s`)UNEN&=|tzlMz8X{>J6apr!Nuq
zAPb1*V0+Mb$Uph?S9qAqhi_nmKLB&Vk=|YHDjEy${Gl6xR`3~ML3Rk~9DoVhtvGj(
zz6G6w=mqd1odJIVPUw5J(V$B|Xa+E>a*pH+bO7`rgX|-o^*R`Hr9F%feW9^O&?R6i
zfVVzq3-hT|#Mh{AuoEjEgWg3x#_Dg7-m>BcWE0?6GkL~De5R|Ovt#$I_I(GTqADmOel=XAhN
zfNz*T*l46*;W>~?&sc_46wCz+cENBb*@r1mS&!<%8!rPBdcFI
zBmP1D`#J_Z2R4886M=6L&tffVqH`74nh5p9UC!rZNt&0nZfxC*bI7J$G#~S=2i3
zsx7bvg4GwDoxWNheYI9n&JvSGVv?-uLR%(5wh<3v@RvmRU;UodKZmse(lKCYX2z*ishMEF^C1KbDyVfAB>d{L{P0P7G8
z#zk`kTp``P(td&YuCPIWV1HH6HQ*D$x@eUzE`Wn{$Em;xXoZ4SGoU>`v?E{|vPn(kvI_QU!88E5f7;H(n?g+`y4+W+Vq2@#itf~w}<
zDDM-eKF@o3M^LHHm-2m;_PO2u>GRD%UuS;)xS;qSAoKH`J^1?$!Tmy+u8vA^87Pce
z%XDr3);VzDGLo(1N~Yyq!z7fH0fmNc@4%ESUMSy%_EPRYZ*3iA23a;&NY3>pcVVP#
zk(5jd2~X-!;_~Ly${my`UG9-p9hGCd&=sEfKBXm7|9S_Hs>&=dqg)
zcazv3PVPAXo)E5RuF5#=QK<_->ErfrIRtfwGSK1Yyu?{FKF^jd_-t1%>NcP%2P!8?
zrQJVHqdH%rnRoZ6*P*_*v1nr`ja-;fGB~zf!Y5vD_VMHTJlQA>eQQB|AoHx*edym4
z997()L3XGDZY%B2L;gzdS*>jEhlBNmi|}(N0NwC+t`Uesj2+0s0Vri_SEB^v3c}zX
zrgMB`$8v`Xff-^h?>WLGyB*LwYMBceIlDP1CkVGAOu}!iO;a^nf_@IgXJt|p02+8!^=8&}iJl4`?7K}nU>|Xku4CCPtBL>;nSCbA-
zEUDE=qz}u)K2s=F6?`Wp(2Go&oKL;$y~e52?mB9QoKjK6&+?6Du9*60jNtB7W6Gm=O_tx@)`o#!#R`z$5sCXx{9*?%TJxt|UA8BU5G4fca
z)lE@uk>ytqm&S$4wy#&G>ibJflP>Ytl$TrJ?+8t+hsWy{#ZcS84ACKy>Wo#Hyt+D=
zsnaD?dZtFpg;ib3Svs~yya%!*gNq$)&CC_H!FX<&kyrqb{
z^Kv5F-Mp#K@I4>Y>#%TDxkKz~>i*pUI(cVQS!G{_VPE@v)OYn^ITc!K7X@egj3Jv8U`6APTHnu%*z3LjNPrn5uR<%+j+CIOYq`OAuzUw
zHWbp?wjWq<*|dYk-(rS+4CTEX>$aUO$G_!*qhr?4pBg?c0*jc#BOXsjROp6*81OVR
zlKT|5`=wy^hp{X9u7@i5-8|^dU=No#I0>4mnrrm9Q
z?VIjqE)RJs%GWV-4d%p~65~Q~{XM5%nSBXPi$vV1!{cH0Ndv%>W!=}0gVOJwIcFzV
zPbkaUK&4iF#%E_Kcey?y>YK%GxU}Mo2XiYdaT+U>|AA;TXCPO*#1B%h?
zds8s(Idu2p@$j13mLqlRez<8nfttI^bso3RgC
z_V%i)UGNmI__^$xhrdz%%~kbx22D5=);%9wog8*8J=u}09F_-QFA{j_RoPJAoka?q
zq6KB+$61tfJ`-JR;KP>kS%NydIG{(kZy|`n?j)~y*+DNcho>$VvK8>eW-QANH=XnG
z?r4tpx=q-)@D!AmR{9y^Tq-?1l3P{WuT$(EYzPUeWdq3-sd|V`qjJ
zks?o*gW9%Zi@U?hqbpO{8~i(1H*P7pS+tUEw^7(?ebjD=L|Ky2G~V
z9M`2?rqLmq26o)0p8G|6&*N;OEYG)RrO!)G-xo1Pf4S%(;Ta;Cx#ul0EMm
zs_(AaBVTx&;nK~meA&?B@U-Q}rtm2B(W3HJ*SLsn8|1gX!`!2y;gXWNN`svm`FzW+
zmOyh6x7mhlspK)aJa+L^PHLZ6*pa=_ryje}F13|6KC*vGcR2Z-Yq#ZNTXlzpxIB4G
zL}tWDl=>)#s&*j8<^kBG;&rVqxxWunH^a-_-Lwr!o&V$q25G-i4Exi`blU=9i&QPxU0jAt+_k}C`8q=;2H{mK&qV*!WFp847mQk(kRrC9YCxnf7N|;oih1dR=N!AjyCe+?)
zFE4I(%#oHJ%!SWoyI5=+0Nf&OVgvf+RXppN4NJzcC-M%eu|7k=U!7h4n1fA`i4KKPY8XZ(f|Cxe`Eps1giRhKV9&
zOubE3Qc_sHk(dfnzZ&O2eee
z67Qo8+*q{wQ#a4KXQnn;TyDatgW6O_Xak#NpepZdijkuf(`8W`7UMEuL%jC0iKire
zE89`4=}0gr{9;X@mihQ#9hcGbo#QryB3&N0D);oH>i)RglWCv9F%N)d<4!U75sK=S
zvWYh!K*I4>WJYDM(!*3Ih1njj;pL&}DjSshf})p}KF!$$bD(;azSPE3K6yj<8ZoQE
z`11iOajl0lVaztajXBm#qoQwLM7_Cd%oyc9W*zsSujc6}_!!r+J}-KzwO75?Gy*n}
z-I`X@bW{@E$!NA!;YO&fDxqcW0G`S;AigQ-7vn#W%L#`M
zD1sgo+JGpC$4#7VUF{C82X58p>8_hCy%}D-XG~$_zxZ?u`U}c3tqTPwAzFi2-4d-oGA!Wqo`yq8j_}`PcaJD}Xn_dl4d^LF%X@!5
z$oBvoOU}igwuTvQmgc^3E5_w9Dk*lXG_H~1RFtE6CDq8}(BV109YP^3lO?9I
zhiY@_7F<&}*M)z+Qkj;$s+GDy&jySV$+9>s*OSSn+*WLNpe1&;SwC36c+qemPfyWs
zE>V0bwDgLZa9|E2ErM^1ZjjvNc1P~a>Wdr%Vdo{JrW9h3a<&aPoYV!V*1*jPo(U?V$d#T2=zT4PfvYCjdxG<
zupdGJ8qvI@3wZsEc-GvdULizb_^Pt@EEXU;F-?l1$x$fyZB10q-keb6`_K;M$rSQ?bt2h=|>eXz23_N#(bC;%3R7MFL|}%B*UO(
z(Hz6FyvmAOU35zIng~DZEM)D_MR^;QaP~=eAn3v_k^8oLnDF~-+gXkIgLNOnY0JWu
zMw2?^7C6#NmU5mrdK{}i*R|-1HtrD6&v~^(Jjz?<3ou19d4`
zNRir1G=8s{(l%(rknyI?>t}pw(yegQk*BC}N6Jz6g6&F#(#SEg7=!i_ouY6!S-AKN
z94D~L($Ykf&zbF`@H{vk&5*Y)T&S+#$&2N)gX0~}qLKD}gliL%JWZS4qqsX(c!W
zhDkKz$|DDy=L0p}Q)Jf%Ir+-lY`1T>lC;<7hr{VQgr8j>LS3Z87<%nr4hRKwK$UtX
zsP#>G$OE;XeCtVX4!n906S0!1vNvuDt(g6p^U{zHFfEPgl5h*3L&tV1p^xbCO;EFzI8IO`VDrK4Xg2%e2SqM4Q~B
zq|f_#lnDHAWJ+~+HHu{1YPyq*94nq{!#*Z`QSce-(nGGFZR`vi_JK3MsI75oE@$t|
z8rehyLHM|k`X;Zhj14iAi=D~I#NjICn)-_Y!*DH%&GusYFh}yJ8DV2(p#B;DD)dfx
z7O;xC$0(~_|q^yXi$1z^8
zmo?p`Il(o%?gswd&nq~%@$6V~&9P2&Cf>1=s3UapD
zxBEGO^Ncttvn0hWo}IYgs-jp<`*cS!jcShM3}+@nHqVt;d2SBn!`|LY_^|hnI4*AO
z3vo=xB6a-4-t2Z0apse@Qeyo^gM(q-QshBdb$9odZ2KrxnviK=f0G0eKYN9Hi}=Z^
zSq(L#Pvg}`A26cX6|QuFQ*^xSt{YpPtK*wWG+;)#K|`wlRl2)Pl|H=F;c+0ilydtk
z@+CMye_cFPe#WMjncz-z3y4_V;B4X?_BUTmuzXH=Q2(?!kEZ*Sp)gAXRX75^HrkN6mw4=xw6N>LyUhf_8Lz
zYpD8ks>YG3Ul1V^Ao}TVhj_!lxg`TxX^oa&?4wBeyWW?=>rKx_qYjb%=CB>1pjcyy
zmUCnbpT+~=L>To29Px%~t8ME-cm(ba^ufK83%}xn#NG3jh>2=<-z!PIlv*y1Du8X(CdC*8HHj)1nnC5*jN2TPTkGF1p_G%w6Cgu`AgWwBYx(kXWjD>|Msu>X;KU6`KSl~%*G)jbzpnsN=o
zFtXt=CvJOkFlp|xPDpR(MaJM@J>&6oYS6Z<`k-O;DG+I^#uY7K$Y4~-!l20sB5BTq
z&cIUZ+`tX#h%TA4sp@r4L2-RidwWr~6rJq^eIj8c*n$kG6BE(jQ{Pr|hhdDP2MwkkJ
zAscho%_bbrv>4uHf3}?vR9%=s2z$_hvz$0=hyQDOYRK)jT~B&~U68
zL<%=UK*guB9P{DTMl6~n!_wzbx-{Imk05z6k=i*e0tv+2lQ3}n_%`eG5$Qx)PK)c;wa*tijP!==GtJ81%6EH)2)*%^=Ibuh_kyEP;H
zs+bhJ3r64>EaT4ORliU7eCLP9kOzKY#p;-GcS>H`{zRWT;}#z~U@~Eew^&1&cZOdM
z<{?eRf_C~cH$+;xJ}Rq!f+YNkECksuTxC0>6F
z2ity*x6-)W?b5{xX?|@U*PHB?Y~2tI-e^76Hq3Of
zLr?{9B&-oMc_YE$i+-W`>tVA`u(KOt!#?0XWe?*C=RymZ*3!?ySOp0qp4q&m%A-cH
zTWeoZyWW*w$4e6Oom%^uqX
z*N3>9kLN=o)uVA*
zUX~(mFK$)TR&4CsvDT_P#xqYi$JAyv&*9$OJM(p>2lwI{)Po>8%Yt`zVifyq_PG#?
zXSAY&$p;*nAK>GQ+>z6;sD=jQf(r}^Bh%gyi&WV{frNxW+uW&JLQ*cH7?}F~uC_tb
zu`(SBI9GGT=y19mY@dSj-1IS#+heMim2reHz{N`0^g=&-y-i?QE@4L_wXVV>2#|3G
zrx;#RJglV)9x@I;8hhh1g|%sh?f!aD8prLJ{rwP2_aVH8IpCA@!yk23I_r%A+o&*nhxXhkDJCx!5Ax>%evWr#BPLN!H
z(Sp~r!jX#?J?9SKV)c-ja86E}p3;xo)+%?S?{7DduC)S@qV3l6!hUjBBFsxQobN
zzLMMF`g{h}85_gIz=<+wSk53=%+kuI6j2aAgmyi-oRL>kQ?&2)b3??;9wJXP^Z{P2JJRq=_=bD38IWH&(8legLw?C8V=(*bSb~O^Oy41c)9b~j7*$q%S1nTo3
zmmzN3j27}*>5HWt*GlP-t$q=;txu?%8!H0<=cL)gmI@VInf`V)Q&~#;K-?@kY(L4B
zPy~bmB|Y%H!plhH2UO6c*J&S0CL|sxo_(PcgE-7Li-z&X$}33@D-MvkVKh%S|YE0_dAaGDkhz5B`;b^xuy%EGR%K
zPFOickgA3-j@o{dT0vSHDg()jK}O?4D*3xaYm3_14)*n&0M1Q%6A#(V+8`x3(1NF4C6
z(n|AsD}&mAVx%9!7K#&&(%pX0Z+%&ASEh@u?@}^iDsCB+o_+Q|%SSZ7FWgW>3Q+p%
z`K_gBH=^uA*e*ilYX}FYA3s-_`PX<3rOO<=+uuZWxVcjqxR+NXsc2r)pFLlj?XCI&
zulyNaBeAS**&b3;>7S&_*W*Ju-4|(sbHMu(l*5JW*UtiPg(*$2Jee7Lni9)LD5*XhUeSjT{vRiWo;rrIEzb~pjNGb9D}ny6*ojAxon!?-tC
zO*yj)C63+_j!;s#@`Q<%n!~kVeNee0q(Z}S$B0+n*9}Oo#BSAL4xqf!hLXqVntbp&
zzw*%bB9{H_g60h=PZrD}PjHP>;ed5x);LJWwG;e}C&_YfLoaVowy7c6{umFH9^`Mi
z7R_8QZoq$dy2_4=r#HeGLUB{S`@nqnR_VoI_7R_<@&bX+dudQYj(va|law
z2#67qKUY{2NUnsm@V6#I8t04l(2EH;;iI?r)8&n$KBVTYnT$$+BF&F$XBN^h-kRs<
zo;{05^JqdW8L$s?$N?&t`NsCe0~zH1_Wm6r9U=mm^i>c3U;*ct{Mg>qbk1|JEc+r??Q>HvawuPy?jFcDVZ|r5ei^8s}%swJ4;G&
z%Uk=lTK5lq|K)1u8z1@*Y`|N`wy8(1wgSq6wG_%^iw|B{?ZjuB+!uw{jkrLmsr*H+
zeAP$$7iBlUm0o2mUT&r!rxeIizw$T8)h&mz1y{*DTPr*pieE*Q)ZX%gWC(1J5Y@&O
zbb1A}NkiB?0T^D0x6M`8(vbAak20I8zPrI=ws_kSq^mUyYyhU$QFBAyK-P_F-i*M%
z!xv1kV5z}oI>cpqU)zHY>_Z;c^dfCx{2{C%p74qDF_12cUX;D(M&LbMMzAK3fAK{k
z*iZWorPbE{%X3i1yIP%A30yeR{-WWb#B?Bm^XI5PxbJB5%@u&A|Hgj*{5fuVL#z#<
zRD#S$O$4ukfO787k-z4aPrLvQR$I6QPiBm-NOz`~O-8S;X8NV)sr7&&K%rmNBWS=J
z%AuF<=LVYKr~H-=+yNvjFI(oCbjIc*l$w9QCJ!_
z&Y5KJWAxI-?_c7`QaOBm$)iBE_5sD%X@)cv+i|4L=)^21KT=|Vk^-9MpBlU`L~vs5
z+1HmtBf%SgrZpmbQ>(d-+E)fevC~<|vz@?>oTM4!QH1xlL42}l(~9BbgGwGrUQ;Z}
zY(lHGsQP)~HzyQ3map(@cFu88W7Gq(j@Yz^rJ_RYtkm-AyiHBXe)16fJC=1&)f{<0
zFa5Y%_X2-@*I6$l@k}cL4#KaRk~kc-PiC=!2>Cgq-Eq6*g9brL{@veLqQIAZ^VUhJV?Mvn;q%1L35|ByH?Mdgpz~t
zm3Z)~F4Nm9nXE5oEUVALG}ma~DQwZ6>)9aP+QMv*d%%#8lyapxCZrA{GBlG$tGox+-XmS2-N%i#O@b;T3%fV8D}rPs-_xkMmj~l
zIpfQ+-NmIVIgr{4L!X@4c-)VCo7Y*9`&Pi!9(<=eN-nPKMw
z)5`*}4|xpb5q6wOAHEbc6ud%Pa=Q-~-XJbElMPp;`v1v$@31D*cu`OpMzNqERxEVU
zv5;PDklsi_0wDxzDybx-K_Y@EDp&wTQNS*?QBg!hR76KbP!J0OVnIQIB!5`hop
z^3XBgkvSqRk|k!LE#VfVfx;4pvZSDIFBInsdHKa=q0JI553w?AAnQXB0mlf%8Xi`L
z!AlX4zZKvzRihws(0DwKMP<_QUbCIs_%5<3Z`
zkwyW(n=+ir2EkQfpy*97hJ%DtZ7LcgL?zb?R8#@@B@&dA(+NSw8~9tL57#r`bc9qP
z!G%T{A|lK%Vldx?wgrn3$|wdDcf}9J>s5A=oEs5hi3k?PK&w-e*(3zPC=gMg^H5l#
z!^~QQ3?E{qMFivMAnKBbV=?qmbh0Lni!@@*5~LyyrPqXE@S<=T7LUa%W3gsqlqdwN
z2D!E%0E=ykQb7p^a(P@FHqwj%^P1FhYZzZ5vqEn`3uQ2mjl?FfY$V9D2{pDQvLaRC
zN{rGRDN_K4m6-_B(LlBoAtVGPu`*C3m4*wNlriL7OsJU55@M7g^bjI1T5Yp|--0oq
zAtg4pku3xc1vM+yip7J}WDe8_lnAombYTiADTXKH3wa>Y4UZ&7VLw=i*;0KR!X!YOtppJ0LSic6Knx&8*%{K1XnPFYObS)POlT}-{eaX3pD3=f(rLL?ofj8K3CVG$2UC5ghwiAbQa9YR7ylx
zG{GPxBbXSABud1$sV!O-u-_7xJc2nY1|$x{ZMLXjgi>R}K<;lSD2pv;;%4
zu{e~OL=x%D!PrQNRxBf+!<06a8boRWyaN60!lG$hBQcz66Ki!!kOd#XG0BB+936@P
zM$*-BfWu?tNDQ9OhIpAB6G=v>;~gqlHu|H9}9v=uv1;cmjfAz8;ziA4WowF-ka%6-5$78!$38
z|9fzHgob5^L*XJAc48z5i!+!kaDf6~5Rpzsf-Zb&ER-%TiDOzokG;t6UE0EUBpARS
z2+G1$Ix`@<>IiUf!Qo~qM~^z_
zrYZ#(Ef;ug1ZZ9iLW_m~*NRg~$ZR_wh2xUN;bd&MhQtbo7zv%IJXs_j%iv46
zN`+cUBXEoi0n)|>NjN$)kt9X}w$6lWU;2D5AjL-|kNDVwzt0dA0f+&j`
z3zEx8(7}Q(ci|+9P7A^zIe=~v6*?Z0DuZ~NTu;D3SRztMLbx(L8YDfLl|&Ya59`
zlBp)WB@_W7XrP7Rr8Kfdrw1VpEMB;j2aRJAg@BR5Od6d4Nt1C{;%FLD3L4beR7eR7$UiK8xEN%M+lg3c
zJwycqVn#T-9whM#c?hgNmwrJQ9#t`Y-*#G0jf-x
zB?Kx@r6p)ddJ*8adNM53t|V~GbRNj;&>`6jGdCJ9AQFgj&;SsHMc7o)YCR4I%3fgO
zRD5Ho2p?@0D8t2eF$ZqKL~A4v9){xqUj-OTgi*9Q07_s{gm4=ht}>A@M584}r?PR4
zAxw!DWEbiAiVz-A%;H$h6s{hn=i
zm1uM%NKhe#$na=GNN^~W6d@(hMZC}`D#^|R-G8kn35`IA0>Y7msxzS^0u6@3F@a8r
z3_w??T)oK7r<%iqk@`3T91GfhqL~OiRZk3I(4yl&JxMSgv?n3Rbp_9xnPe*rv`U2d
zB1>RF5|nBsoyx%wBwV^31qW3?3_@Epi$xEMj1JL3U@f%h1)vlJ!vs?Ql;I*P3`k@|
z5(v})lohG5ODO~x1t29`fI!G$;5a1~Iv;93OjXB-Kqeu9pa%(ba4uVgq!KtbMHH9JLW7Qx96XIc)yJsWKnJFnKpYt}
zQf&%VfNBe(a9tQ97#F7i127Q5FdN8PGDY%0PClP(|BiQx5Cc3I&bEYy0L2j+24pef
zR0xd~Aye|H8l;>d=MW?|4mE;qMSw=AfI+
zjR=#`h*5YcN)BB(D_TWSm<#~ybY@`~5Dvw3VN@7+7;GxmqM;zwERc%E6Ea1yHqd&Q
zLSe0X&b5Qi-a=Cto=1!Vkz=$F
z7?CCy;B0YhnvtkN$$*d&L)KDB26-$~rIAAVEDUS|D;5!BDS9cA5`*R8qe4Iev0ZKj
znme=rn>Z#0|GnxAL{M2GE}jyLG=@lx$QV#}j4x3@0IH`6(9B?jf+pfJ331X;jRpKv
zXpfa*z?TGWEJH`p#K!7$W{i{y!wW5H3;_*991a)2XSfQ~z|yl}c$k{5keKv*q(%o4
zAK+wrY-|YA4(S*`%#q8(qyz?!PvDRQY^H%L;DzzvNJwUt*dmw=K};k!mP{aHQ6Pxb
z2B@=zBvMEOhDfm)ujX4pA}XFv05pOJQgcuW0~3dhWJkxM<0MRk&7`z}EG6Ld21SH;
zBs31P8pWZN5|$cf6qAJ<4L?L0fwNK6I-F9ZjaEkk>j0m?Bm_s`WO$k)#%$zg`yT>A`omI
zh8g``@<3>46e&cG*@9>S4BUe}Dvoaf@IZw@QZL2~a`~_p4T~j#hZ*S_n>^ZpmPeqF
z8VOFI0U2k|55g1*hLwfTVhx&TO*qbOpraIIAm+)1S}TAyJ~&Id0aUqzOJGqL8)#D-
z_JdL(WWpGWCMFyc0^+@qadA?BNfg2NRYZyud?bd8w**J1;qdPgG2J9nGdWZqFBHNu
z44oB&j{y1YAyyKRjD(n1j8K8X(a3rXD5H*p1v5kRcmgB@P?1(TC)z*_!846W84RSk
z2*2w*u_!i?K@r+?BC0@dfHRl`LTr>0B=xa4(J?p<0T?}ijRIT}FBqw1aPSy0G7cn#
z>Z4!`E-w-*MOu(jpyw*%FdQ<8VC2B?HZ@Bij8R4^#l(6b+4M(95J|Axcb-vO$(q
z1rti_z@b2O0;DYzrH%b=b28DGAh;Mt70ARS`uCz#p$cIPH<%X2#K=LVm2e;#8$j$$
z3^Obaujj-9>BazB(#J71a<)~@SAt|pHXYO=rm1-_4n?jo6SaB-Un7P?`~}Ra*BD?1
z7!Ft&AwA40L@7XAcPx(}0A)hdbXr`Pkj&JFV2wbar5eoPaS9m+RPUpZ$VQEZ3&Onk
zpb!8AloE3!A8C%p!eUvX2#SHPkwNkTsCdr^F=N9bqWB6ife@--D45b9`R3|^`OVKj#C7N@94uGGLqqKHhb6=sT)p{y_x1ea1^
zrL?Gtv@i`#U`2r-+*k#$fRd3>;e;3rAzBp+D%pc2NXXG~S|SH-klLhr6M;umMnE?X
z64D^}HxgpOt0E#okGC=v3J_XOf!jnT0Yy&_Rq>^i7-$6W1o&S#Fo`O43}tj=1l~Z^
zL?CcT2^Jbd2njk=X0#bCF~%vZAt*M74}L=pQK+q;U>7}H7orC_H&QbanoAyK0Qr6_
zF{ElmnWEyD6mX&85isx;q8b)VwAf%)gJcY@M2h6<@ltJEFg-?LL~A1ia-$B0Budrc
zb~Q>tkR+xA;UuyY>Amo
z47Y-L2q3T1t^`Fk(Xk>N6;#OpGG
zhG1aD4CMuDEFj1zS|pYdSm5TkbV9J2r4Lrak?dG~q+NiI1pWs!#GQ<>F(5etrBzrt
z!6+3Tj0MN(Em5>6F+zv2;3T0K@DN1l!T&&g$}qeNlyBm5?a`44w9#PXB1524mayc&
zZ=gbiqPc?DFlq>ZK&qH4iLo-^R+R~Bl0gz59HYU@f=Mwn7(N_<#70Mj!tDw!8>5q>
z#Wa|RN@VdkvC0r&B1eQ$aFQ6R4XF2ms9+5~JVcMeWBDAbiY|odnIM-8tOlGTv@V0i
zfRk~+duSb8T8IW{sAO`iO37s_1Y{#94`5fpcnl4ZiN=RBLMe81s09O(_~|kbG|EJn
z6p#rSPsfTul_4x2Z#QZ~SXiuCiHNqrDKSbcM`b{9>?Aa(X<~>2iE*@0K8(zkskBN!
z1C*exITc32!PLM?&ql|pZNvs%kv{I_JfOvbgG)ftT1w|_j
zpm+e1gchM7QA3R}tI1fYK?2?o(Q;lS+#pi$m?kLUlL`vbSuHW)p`k!%fZAabqqv~%
zg~3c==^|rLu>dgbJdA=Vi=stZ1RSJMO99+c&S&DKdW#e(jlo=7M5(ECm3CBT|3ICt^M?iq2E;53q7s$de
zGDIX){t!5+wA3(7sD%gWP5i%AG82u(Oh*D$8So6)kG?}cs%ZYBY9(rTn662!F9zdRc3jpK@7xlBstb7Hh|BR
zX3%@)z5pqh2Eys$^y*_yh^QROkVnQrV%$;m`noj&BfuU&Qwb{yK51+-R1^fyclT
z-#pCkQ$Umz8S{&Oe)xDlu~eCS^nq&J$C`Y3(0SWEf)Q{vxu1_MX__cOtIU`+q>
zrqpt?+4k4Ph%ILQf4s;4;Vl9Cn(_PHgD21oA>N>Y3lJ(R#*V}rA{O$eO~V3>4?6k(
zWvBc9|8$x9>qPzwvYO>$K>1-hu}Z6gQUPIFmFYXKDvWyQ3mPc$r?-4ZQH37D&i^Uy
z{vUw0=pX3!7uq6Fe*yphhG_fOrT_1Owm-1(e~Gqm0A~=WK}6{P9c!Bi-+UP&fmMjNcQm2ZBm}aybXdwR+ehVln0=Vg|fL($=BoKEpi#31Zx#>Hv
zmg;o|EnvM6GyXpic~R*9Mm!e@tn5^yUT^mMuTsVL?|>Kqgc&7U57ONK!!zH%m8ry0
zMnJ6p{PABuP^cgg2@+D~uA80Jr;leJ&^0eCZbAFD0r$t>{`!qePxkg*-;ml6>6kkF
zNZl~XCKrv%hJos?%OfwHyE7qg1G7Dh-oC7Xd2sK^)#!~OtdwD?^OdqMfp4B9M*Cxt
zpRV7ZiHudeZq2Rluy4OV)6}xHCOf-hc2iBP3VoEamh9xz*UQr}d8vzw1AFcH+uT6i
z$)<$GYtH{Nbk?GTu4@Z1aCi69F3Jgxy>Gtt)~t0$Uma&{5G$Tmtgl8I)`k7!=ft4D
z-UOw&0M`JLR=78R;LpJeeBRzS)nqq)eI47Kw
zQoHA)l;C(ILS#J1@tw+o@W6J=gB{bBw=_
zw0aGgpk>?Qb@P6l&24X3Of#7!<76W)OKzOh&7C{NZI`vBDjWH#Y08lk8~jjbEHCR$
zM5rDScha34_ht#M-u13NUhe%bqf$i(T1GQy4W-mVZHF5Lw`R$(qzuc`!J`AoMPu=M*7QOm1Zs`2^qkazb
z^Mw0)WoR79iWYl!t?dV-5IqMv=u1Cj`Tf>pYaPEh@=*AZsl2XxO}*B0iV5*=35)Na
zwRN<&9y)sN^z7&(MRlu<`0-O5lY^)(POPw%v#0x9L
zd)P)^dX4Mw7r%qe{WzDOvn9>*_Cz2z-`>X=)c406JeMuAY{&f&-##2o+{8KaGXG4~
z^_g$x6=a`zbM9Ve+wJ}4su?d2O9xa7g!9o?pIM+&b0+Q#mmII_a7P8t5C0d?N)FoP
z>XI#b-gf8F-*Zn6-523)YQB`}2YY?$$&f-Y(@KX{Fc<2~tCMrv8loSZ77
zQ>H`f!PB0O;NX;zj^{o%
z4`RvYd}%NA8%#}~p!;}FUv?T+@_6Og;BUQOfgA92+rF$D+NN_$=D&I&13zmfcusDH
zAGAIhQxy3(X8gQ`zFr|(0MH{BC!lBi*eZL~`TB*4y40%u+wH%jK7xC>-qZW3EdAFt
zWj)UV6yLsn3MeYNtH>1*3+JD)7MwUr1;ZV1PRbi_CGXx*UF{6kzk8CmzLTs
z;CGK*=xdb0FK0Za9@+DcU3@>EKS%8kVU2V$-#N6|4|VUPF1O-C?Ex@NuTvfywr|Po>N7t<>F+6=k`y!m
z0EJC`-OK*^I?2Hk1_&*W<9yx6emU
zcT7H??u1$$p{=!`-~0#A`tc^m70~u-+)MYuA2TfT>a)HJu%eArbo^(7>5?lg4{_Di
zhq}cleb#aFkyph{4~OLp1DoDJ*?BWrvghcIsi)QR|Me=YE-u;G+e$*XKTj~p-R1d%
zGsPtUYFSZ3#$u|*E$O_8J`wh1a$@)G9aWDmEN3}G#75Ec`CSqA0218cfs~-|luL&@
zNyTga^^ClI0QgoE9xC_&zLRhcOS(JZu}w>2RU1jGhM=#EkDW)NE_u9SRE`&E<;MKd
zj_WgX&%8c;$ZN?>%8H=%t`+V{TZ(#z+g_tFKQ8mf`GA!xIRJ7ot!Fj0KRz_3pMQPk
zO*9}UX}55IlFTywF~A>Xp4Yk~@f`l$B{$ZR23_%kD(I+g%xPe?hh}c%G1Wr
zeer2b)K)Pk@M~xAx+hi08dT@CN`U*)^0i|!56<}=N%Z78JzX)WiWS&_`t;(^zg+_)
zA@u+In(uqwC`(K|Xrh;QpMz-goKQ?cY@Uw<)dzy3TI`6eSR8?|dHoQLt^8
zgNW*>cN9PD&V5!QZ2CFOU-alZzAar^5&WS4-}{;FG8$*tS@QbyOXiZjm}e!e+jHB_
z?J(bBtc={w`Pc*|e^pekI9>B%t4qZ&2b*8Nj>b1rt4q88ji(dOdwC*4KP~&Yu?(Bz
zkQvWCO&{(EH+SP@6)vU=zmAffYcjw6{3IOuEWx=_S5}Uysxl>`9w(b+E^rvt}pXD|
z0P580153v>r*t`8KlbW5&MPF$aW%7Ngk>J3bSQfIzbw9DB*1|}w8x8eKd<4&vOWi8
ztDMJ%|FJBraPxet$mXh53*(K5gmt{dvTHPt-*Lvm~@~X8%C35S$}t);qGP
zx2tt?dC|AtFS(kdr6&zfWI_GM8P3N`XFg7LtFpCj6IK+cLx?+vIaE{MqVL67KK%;<
z140Oi;UByYbo^j!PDv3ixDU0V)qwHOS076J*4xtk@@V?rSBEi(Md$lK*v0?E!9N>r
zsuM-Yi_W~*?25B^mJON?18_F^g#n#EksNes+qb+y
zn~L`+2a)HW$|d1e
zTx8Jx?IKKom=>0_%-u7>;mVWPzP)wxU-l6a@0hc_YU>Qk?_JOLq*q<>
zNYsA*0$>n*dp{SuBeA!n3rKQR!i7uZ=X$#}9e@w#065+9rtI6ps;&6jRXONuy;a+0
z7MnLuJ#mA$kDZJ8xLc%=mRx;K@k9HyvjBQqw#7b#zd83M-v%V*>Zg^N&MW^o>{Naq=u3CZZMQC)Up3x2
ze6IV2q8eR~(i|zu>T2UAsy$hLKMwSL%K7iI@Qmj>`SryOl0hASrA1uvV89%3JYCt>
zk1g-6cm#|$-w_D2fn_4(*np%eA5UWbxyi#4K0MxE_>&C%BqDcQToS{aceTeaoP@}H
zes%K#d*xX`R$IU{=%b|{b8KHf)K>#7br|KW;86I%dC~`#di2X9#oI-RUq4j$8>?M(
zI`&QUo*6%vczo@7Oa1()enZAZyl6g}@L|f>^(9X?uoNZtvfJ(fVj=ZGeS4RlxUt&|
zn0*dVzXOp=!f44scn9z1<`Vy8=`7su|Dsxb-V@*SOUXp!4qMBZi#4X2*-^b+HHnSN
zYX^vGA^+_3;`_#r`$PA0KCTKomY3{@o;q2=7{R(4;Z3J|v4+cE)*WeGla+iKQta1#
z^c^wn=J33Iew1kk)eI5qMI^t019Y_B`1h6mV1VM}sk9NhaoC4((!$D#xmy`g7
z`yqvRPH|d0VRqx-(XgGh+kg^ROO*jExFu?nOz(aNfGm>eR(hrKL;XU1_op{I*7P3A
ztX{Fy=~P_n+_>^J`y<`{!muB-WG5cnfJ{5`#_qSY5^rQlS1(Yg?uBW%v)(0ly-EBw
z_rpOS^@ia`AU4Fl%X|^eKZQs=bolsczlnw3E;~Kq-zNFV87o-KdHg4w98~4LtIlrX
z)6aLW$*IOR-Oo6uKXl~eu#)Uem4d+U#EYGBzM(+ME=%~z^!WykFxm%kaO||yv_9xk
zFBCw;u3MErh=c<*lG|BsUg<)2>g%en>@#Iv<^{o#|EybwjB~u?A*cz-iRztnTh_9F
zM}m3}wzs>!EN9gOdqq4@Z%6Eyx_c_4lsPb*UtoP%SDlelK-1rKHLHB<@v|7iJrgYxSp$QK2-Og!M8_@CAwMeYKff_CtDdsYg+pz3O6h$
zwP!V<98PyZ%RA6(jq)>e_X;mIzJ&^XnUF`fNo(JW!k*XwF#>
z+g#VOlX~EXkua##=~2dsYtzqc1bUSNFw0ptUs}~i)s}k0SF=15a6?g)U#bgzY+qMx
zoA>myw=d5X{uh8PfC${sZ?igR&VERE^uxF(>st2ZM!(wj~|Y{ZxQ@wntO{B)Wdrp(V7~_+*Z{C@eznf
z=9hkq>;TkvI&5rhobT^TcZ`p{5AV9GK0XC9iec6T1SX*AkP4fV^h^I7KrL^a0P0)i
zjhlu)!DVN-qvy2JA#ly%f|ctk=9#L|8XaKQL5~1$I(h2bm!@yi$2-5y*gDKB1Lfdd
zyMcPj_~nnXg3m8s{Gir9wt6Xj>E}J4;p=Wmvby%vR#XAPauKo+&r9ffruf)?x8rgB
z?}910FE)DYrL8e)?!-<_p4hgeD;ukS7{3m(O^!2Gc(ok6I0Ok9BVU}!xE}59xZVC@
zGU>rdpkv-T`=M@b()#o4U+63QGoOBm)Eto~e0{m_UgOGK-AU2P#fS3cDm`L}FOLvUfXnpnE69ATo2-
zU0gL3i@k5S=%57KyWRPwsj0oVDM8V;cXOJDu4weYq=2%XTi0uz-`rE3Jiu)vZdDf^
zFj{9c;B@DQCJ7hVE^h_2xCIC{Zb5j;qE+Lqu-2`nw7ZLzI~!*>SkCaAW_3T`lSR+P
zoA$l>7+SC)^5RUTZtFOL1_pwo9odB#Yq?x@Zb@v)=&+i-(hBoga_^(;&bVBxS
zUE}E%pWh^xXD#TC>unW4FR(V(=XkFBYsg
z(CAfz$4E|FUQQgoB4|Kg`rE$0^mAIj%+mmP>Rf-fR2Qe^=bxkpMh`dvWPbI_G4AWXYBT5i
zESWWaYln5j>{eTD!PT#KOb@qo)tYKY-_Jb#`}RF~8SJC;-_hE{<6AemS9)CLC=$Bc
z_O~1^yp908zAb*VtJnF7xjXXbpD9n>(a7wR1Z*(%kd3PMlyYorHl&G$a%bN$zrK%8
znsTUeme-^GPJEN4X7(pkAYdQtRYS(VM{V8r;w3PoL~l>*wV8l$(CA*iU_G%Rte9B6
z+;7@Y%b2pRma+HxX&fxb#huT2rra3?362Zm-%(};JeN96wrv@;cSF9%+@eK?CvfjN
z`ET?I5^j|J(zlw6Y4LisfbPK^`|fkT|B_jArmlIoVS7i<^<R+N-v0-_u@2sonk@
zzj}4V$MjK&8^7WQ-o8kEODoureQ~8T60oDwU4Y=Sp3K)4T!t)F)i-G#x*68?(f#_5
z&-8Kx1`}k*{=l!5u;Qk~Z%YnFv8EOz<2$O5ukJH*-1>Si#Q+$-^{DI1>M8j!phLx&
z95y`Ree2lq4b|Tx{+JC2kE}h9^pyH%aeTMe-Gw!CG(LSMUrjr9&aLl()_XFW+kTV}K%Co>e*F>I8H+X&vmt|Z@hh>
zi?#^xtEqP?k}KkYsxUk?`-$rEZ5_D3>8?4oo~MAp&os;1GJCRP%JUVIr1OKaBGr?}
zc70m2u*dr#>-PulE^#f{QSGyM^?eAVmf!GIFYtEU?d`d2>Ti;Dvln#SiRig~_H^ev
z)J;xra17M1x)zZto}uCgeTkkG9teK8*8Qp*{X;bs%$O)6^5&`z3~9
zvu(|({*fE5O#y_uc-@V8i(eHLF5T1Z;^UAxk$v1X8~ea~JN^hUa!2e#8+-wSEg#eO
z7OVG#uJ784`C5LDEPn9*jHQbYBvYE7RPdvVbsIO);ps$GoN;^0BRm|yn>NZoh
zyVI)zB9z!cjwSA`ncO+5)@$;r2q0;+$6MY!oGpA35;biXe9g^z>+dIsyf*RiGxcwINY?{hze2wLj$K~bH1(A#mfxl;0
zLF`!jU>tq0|AW&OBt*&D9-N6?a7+-WAuj@E(-z=|zzlz3aTq{+khc!gc%VGo61l*|
zQ`hOYd7k79Yh9Hlzhq*-d|}`rPs91X{Wq$W6D+knO!Wz})6(<6eWXq;NT1VP;Xrh^
zr4&pnzR&L?AKiCgAyPkG&;wvs*{TC1r^ayfPSIh{3|}v6{rp%_fBY>$03iMQghv-Z
zcC6z!AF3*Wf}+}yP8l^Hb%eFgzRkBa;7-zW$vG#wN5JcKUQ5oNGL5y!#5+2Gq>v53
z{a)7SMQlpV*g5g1MDk67-xe6#8wwOVmV6XF+H&@AI`9piDgnAz6%a+W+m00Qs+4MmYTC&&vF)beM+D=y{p#2`PA(q#+b2Q~M0hJFOuXQ6hpbp&
zd$#H1xg6o520(Z2`(8PE5}9(I;lJ%f5P71-?c3W`fwE)8FZlDEIsvyi=IJ$b&gY$X
zfwa`Dj(mk0omAHQ;b=>7+1DeleoY$y+%QT*y18qr=i8%+U$43M_;a^w&)nX}9-AAP
zReNbeyHAk&TgVIyshn0T(4>lzTY&j9^Xv6PtDH%m4Da@~{W~y_=>w3;;p#HqLqr@Ro6=?
zq24H9T$hgf8alSf$8*V9Yy0tpeVy+P*M!)=4(iyNnJNVeU7_X$q-51!Vg;1e&I3Bv
z-q~jsrk8zrJu`RyCyxMCLdka78%R>1(05Cd$2c|&h
z@@RkBxR~S*jvI%&B>}f#w*FjK`H&%RYKDxTG2FD1jr&bK)ZE^1GcCT-Z!>CP;#Z5?
z?wwwXT|H+zl3Rd7P!qTkw$v%pjdbp{8^&Jc({Fm7ras>~K`_j7VaJwmedfgT2j-r*
z7kwrh=zwJ?-wLbxtQEMW7m*=1j5E4qe{i*xTAG
z88a1CqakBJRyVQsz?2<99@KU2a-Z3Tx+gu~fZDb4xcl=n8N24KGG##W;K$e%Bf^gr
zwWO|o2(+)N(=YQiD*;1q+7(}(@M3dW&tA*mU5UU5QFIfvc2A@)yY`D`Ti4n~kMSNu
z9RNwc0AF*2?orcx7`2Zp-fkMlOU-X@D*G0{O?Jd9b6|7voDJdu4rX8@Ex5Gbt!~zv
zZs0@@=*|z#|2XEZbHu31TPv2|3ZJW!*N4|>>J|;&lGhav7|szNHw@)y#@?i&2~mu+
z{R@F;y&c#iW3}J(Ktn=CT^|QoDk5^-PESQY64AbWEH1mNtbz#X@a)v-kLT>U^v6!Z
z@+#mQ)Gb*23Z2VaZ5i*d1-&)r{qrj_pYBJ?hqk=j?1LE
zC+^);q9*1YZC7RR*>w#knt3!-ScyNz=TMor>p0DGQi%Ta>9_f(NB&m*?y(ekbqUGs*&sQ$_^dKrQ^XbsOPU9*(
z`~h*?=Vz&lkgn@-rq1RJPC}&wq)_SSJrT20s(e>WSYNJ9jyUAmr({*?u>IGO<&lGP
z;#c)uKlqoxuWj6Ej~TW8mIQd-YaneZ(r15!jm9^{cT{(IBV(E`h%##%ahmn#UhG^x
zAU4uDoE+30S)g~R))%y5+n@h&zd!E$EP!oE8ug8CC()`xXJ2W_
zsSmFHxjQYZ(SurX=dSGPKi8!!u_L#s;lt45F2?kYMK!ZJ*7{*%n#B(~2Wn;}Prfqy
zTTh2#&9sxpQMSjy0->q5LP+RDdlaIP}
zOnkAu$TRflnb$K8`Nv&Ouj+i^>~av-mvm~zxRP?<0+(0vj&pW;Y|Jcvs7cOlI`(OG
z+8}BWZq%38H+mGBgv%as&79+Pxwy}JLSz$}SAg+Uy?x2YyTkg`o`_jZu3O4WbYIn{
z--KJq_zR;~@J|Dz#^#5fHnuy_MJeNN1)~piRo9wPhmqRnUelU@4$1d;`>-76RT$+Q$pcpVFWfV|H*Sw7c9AH{DJuch)X_i
zjsf>w-B#n($t$}HoIdO!znH?JSZsjQ~Jc
zqi?vmqGaf?qM*Yi=n)Ps-s{#n;3|&wb{$E_HH7&fV?Gav+#hk#bBfpC)HLs58_`1%
zZp#dX)tJKMCy>!9m0X8A9^g{&d_G3c
z*yv-`VXLd2Ms!oc9aF-cB6d&q^1RUh_(xz@Dt>7kjc}VgYKQEj4YXJnM#joCANP`>G}G!n&S)Nyd{$-#uRC5jm{S
z8@d14dv{N;-lv~G74xL!_LF(ZV;nQ{Y_9ao=ib#*%bEFY$tPm+NG+yOgua!lVq7my
zy16#(Qu0>vJoU`pbB~q{syd;%J$T6G1s9Ilfs-PF6yN~vb)u@Nry_Vat>4fI+CB8M
z4yl!`X;qgDl{44db4RP@=ZFyXHx|+bZ!D@o(tw*t;KbY1R(B*9ov@6M^Wos_t*tFf
za&C<&=`1oEqH6`HvUS8cbL*W=9zmB1@;9tMKP7%;sPlDv!pl9Ilc#tT&Gnd!bv?m3
za^e=V1?UIG2Or1$(FKIW2ydk5XxBJICOXZ1dq#Gi#ZPxgh#N`VcZ_;k7I~a
zWEJ6~RLrK*j_2bSZ|SP88=qbC(5W^4`rL*EnuLqqo7!WV(>Gi$PB~J4)kENmJUHOu
z%CDP*3ktSaUf;iKAF(F{DCd5TuyMRWZNO+=w2GH9;g?Ot6V|&_1%P{cKQmTr&+l?x
z&W}mIkUPZtcCPUSa6=bO9^-eJ(C0~JEXQkI%8jszE1IifHm=WO8(jT}je^&t7iw1>
zV|L}Yp4PyR$cKcCG_pVvmwfgxDA#g$@4jb)0?sa4w=pB9)c)c4`dor+qGj5_vbH;1
z29gyvXos|FMJIXx-n3D?alofi)GuT=*^|zQp)Ks%p0nWj+0n9Vl~n-b>)mrwpFaHx
zYReluDIS+5qi4g@Zaqx+@@ka4+uI}eV?6EkP7VnC=qw6>J^Scct&iS6y)JE4$x!;9
zt2y?2y5hS^da`9j1#4Ub@S?o`u)g*sd5FF6*s1GhKCC~Tk??Z3#jDac=m?8{F!YyP
zdsZI%$}azCgxmN{vj;kQzeK*A`5}M6$rPqyxA0L#EbtD`{bSRIE?_b^?4HdC^FOG%
z$=EU~|Jvu9v&W@1u)J*8yE3;!ZaZmbfxSurM4Qgi0Dw@3v0<5*iCfpqewWi!UUT#_
zq>C+B95N_zI9+l4?S@{b%T5p)7zIU
zHB|!%IC|T%W6uiqjZf@(m(@8a(nB@&%Y-4b8&qeT=X<0^bk5!XdF}gOYu7!Etlct^
zJySlpc_OlMS&$W%5}HymLOAc{zHE6_?U_sAZNewF0uXLrHy3T*Sa2x+^zWBbEED!m
zKKi++|9Zyo-!%Ax8ZWZf)bVL~msZ^Oy|uAud(H!#=2u2Tm-FJX=eR`24VT93$C|k>
z&LZA$4zzMM=1;+96qgVJ{WU|oVKvxgoWZF>g-e0o{63^d?(H*a&RpPH(RL#3(=4W#
zeOu6)tPAjFpd~pSel>UK9L9D1?62x9(`66zwP1hj^#eUT&+OEgHIxa2m_--%9V;d~
zkqJv3UdP=z6XreolBmJAiyCy5c&_zmbiL7`uAfl99#sMfQRe`5&ShEqJ?Cg
zczk`v>M=>R$;VbaA|DvfC1dXL46ApBIj5zN+j!WKtNNx9j!k)cza+9a=6d{wjDh84mJ-x
zmr9%DeJItx32A=NzpHd;-qB~Vum9+r*DQiLf^6Hv;G+1uRSoK
zjZQ3e8Sy!AqFhh_xeX5VmM`(IZe#QBflwHhnqSik3`S_+kKSMdz6_o7ysdrdG3>Tq
zZ$(N2J(=U0k1zi88smrD>%Jk&w-wuVuC;W@JDP3B1?`8xsfj20WQ!8K0|IXO0^&W}
zDPzJ~%B@tV_x2mwo)W-Y1N$>R26g26p1!O30;KUG@;=rS?$En~oyXx|i?<2Vhmn>J
zC*<`v5!G={?5L-V_CM68ynxGF?bM!Kdf@e{Dh~T{8e`$L*0U>K6}4ce@-NTfd`fC5
zZg9#&p7!10QxUu@(esK|z`Y_(lx83Q%-2h9+!L~WYxwOTn#&I};3E{wc~`!9e*5;^
z=ad&mHe60SQ7NPN&K;)X9kKwcM3!ggK-UfJOW{E1e@*0?2vJNDtS@B?SARap7M1E5*IU?2WLakx^)ViH{aFx4iXe)6N~P64Lj(;Xq)Ce%~gFk
z)FzsPj_7dlVm{5o&U9-f5c3Pl-7!+1DbWI2VrxLrOC(5w4Prtudyr>{ok-gRf8dzEt#Zs~(If5}<)
z`0Um=I5RnDoKr^2(%SKHZ#{v#;Q0JmeqAJRWW6m@nd#^<-i@no3bfNb5sY7-IQE}7
zF59h$dPXFWgpv>&^A?8HiKI&y`=0cXs{lTasUDfA(
zWjhC*9*Tes7+@uIW~fjxtt&2W0?S^c{Y(rYvnDLyQlvAO{=
z@>3T8Cr#r=y%T|d>0IY67RK)ta0mRhY(
zM$5C8PeIt=^bs%EYPv^aPeJv7>(@4B#4oU(h3yBn)sb_x@WmqH<1X#zw~s%5i(7ua
z*EBd_{(=f9dUb-wR2TBbW25u@9{%c^uelDKFD=urET8!MeW0(`;n-yn2j~HhX4!Y2
z+;r(3h!RP^3qEJDpFF!=q^b9vz8B+02Dh~Akz%K+THkM?*W>9`x!9Wg5x}~&sEwJo
zb=Ns+;~(X(8>Hz)5BF)$Wz91(<_hng3HN=lzb5e8ef#0&U#GxZKyWj9l}AF{fPsdj
z8=dFICWrRAOd800cE_U0{uM)eekJc+^Zb-26F(%u|HwE#Na|d)^uQMY2we4mQgAvQ
z^Kapw73>OFJ^b06$yejU+*birsOC5zGaX~C<_{11lgaC+zuzKFD>zx0C%bPwva
zPrQ#z^c!XW*j^H3ufZJ^yy+>wyV<*VlkfGks`lIa)7g7gZ)aTcLnDEBy{ghmIx+E!
z-pu@1(m-_e_A5xdO{*X<&oA=vTRm*-S12drRa0K{2c}ClqtoZgFnAYGUK&pvhvZb(
zosja7M86!Jwg9Ag1ke2?Q_1e&N%D*d1)$%j6X9)f*WzvJ%`V%&-I3UO0_m?(=uL
z1qrerM{UTg@l?6Ok_P)xboVzr?J0jy?|q`)srP`>Db?VcE(b1oY^n&>JNX9`0lU_;
zMu8gVPI6e0Y#T7+NuQF-yk&8H3!Tis2@XPYCeT2NM898du3onMQ~!P;@7MSwAIbcZ
zE4{TrEt@E%BTrjyv#~|xs*M*9U!AbHoyQN^bQO1~Fsx)D5clzV@!}(0&TGOF()TES
z%^mI!xiJs#+Ue@RTj@6JU|dS?#at6`njITEX|U6sy!dUGVZ%0=r4OFAZeUG6Xe~K?
zjyu=?!2ZHTF(B@FbIu}8gCpyGYS4P8-zS~UZ$icbvwjYU@V>gQITAP%4)33(Dm~D)
zyx2COqAK#CiTC=x_k#dl`L<%|-lNZS<-0!!JH|R6-92Icni)>*N-6p!N;e1E3**<*d7PzA-B%7NKkZ!ZqLw
z7nYi~hD>q0;<)weX<)^vn`dI*6PBmG9ddroH-J%hyLUR(ActIS_@XKUP}B0qpnzcC
zhKZG3>%J_QN?g3|QTpnox#ce7lRSGXHe{3O!>F&5pC{)H?699n+uv`3SKQhlb+a>(
z6TM*L+53|@Yx@uQ`ebo;O8vlJ*NTQa-MixE9+1ymc0p2+;guEklrlqd$?4EW7evjP
z@sI7Jkr7iN2gR3W|M!{ZGjUg32G*b_?+hOP9N26Q*AGzjefxBxB&lM^V4q2br$JYc
zt5&Rn8^Tz9{-`x<42_>Y*vVseTR-ANAYr9u`^i|+hue@m!
zW<6!bcHM-}Y9?TSMRd)efunKX9uG}~fdHDKL9M_8uPc53HbZ(p?#tWpf%7z}epx9A
zt0=hTwht`^|HhGLeY*&NG$pkBp0!<_pdm~PY<)Cf_2x`BI{9qTSg$xyVSd3wJC=9&
z%i(m~B*N537uFpc0?dq_+;`+rYS?fl=9ItWx
zK#VwaGqB?Q`H8pJP;NAG6Q`eg_(Wp_P;tW{_?v-4DNg{%`g^
z=e+X`ewblB_j9jnUDtPwGPQM1j#!3X+F$RUkR3DqS-K9rl0pagm=~~4*iS|W#2!96
zdK6eJQ3uTm%{V5(rSSQ+!JvL~t5VWdD~n
zs`z4gJtQg|-m$8eQbyb=6~u86lq`FlBvX6+cfW$r+PE}kdT@A%K9hmfK9de220PTI!QUeI^~2UIS#Pzq?n`IBKF9G
z?)eJt>WlYfWgwWI(4w1WEnH?NHSM^z1#Wju#5u0^=$n^(7u6*bcBHPzyY5wY*~Ry8
zZTl@9`?LV6x%K-dZ!?(_#!DNW;0SmA^ItBZ)A=i^z(6_asnIYsj>1hECoC?t_sg4d
z`%2IFd(hvYbpJ@JH@y>GF|4@UjNVhD3J`sDhE4dVPfR^-ia#&Op8URZ+JT(b)w*pS
zDYSIINr;GgPllgZg&U2XMbG*#7S6$z^%#psxjd*360wG-NE3;tSH|N3=7+LCIfO{c
zzERC9UW0|6f}sT4^d)DYBCx$+;6Cud&QBsKBnSVp3eb_!g9>;O{RhQ`r>XqnV+eY7^5u+_GzL2WGp$UqN;S
zZ?unUk=3IZyz;5_zxGKgpSwH=m?O>(+b@=pJF+VcTk>3uTkU_Zc4n};7mQgDai5(p
z7sgFeGC)l_5Iu>UCI;?O-~`*5BGf;0H}|H78{wHpG=XIzh1P;afE>xkoVOY7HL4`#
ze3FS7=6*{*?ljD#-SmT|CoTWHk%v?M_Xd>hr?eB4777@=P(!9gR&*{WwwIA@8b70G
zTOYK-#<8oazWjr7Je2X;<(t?$>fgGJRLD#cjA9<&GFh)pp39J~g}ETGlw9yd{=@
zlI~mgyPa}k?S)vwy?|L&co!suj`7}@VrpVF!cNO}{7uJLo?4!7ofV9|^-<+dfWAQ!
zk}E>F6!9h!C}?b)P+z_oGdW?J_q{xl34YqkGU^jPlhf_jDJJ_#f#}cKE}|O}-7Ry-
z+Djd`P{uCScVd#}ya_~qj?}VI91&P$OcCKEN(xgr#6$foZd^nHVLU4Eje4r9F@I((8w#rxjU?drGbyzS
z)iV}0N?V1e&exiD6@G>Gx4>|H#ZLKcOtTm+Rv=y!A-H~@N(dM@SuTiB>T|+Ty?Rx_
zd$GT)bfQxg)D!yhh7W)*y`LjdT{)&C7O+k&|K-NG5@&>Uyd#kPZ$5NzHF>BKVLru#vqtP%Smr?zspOWUoO
z&Vn81o@uVhDX>Yv%ug5^KjD@F@KI!LHUjaL7igU1BX$@t7EnDTIbb?xOQiC+CMXaJ
zn6-M;&x4YpIHr$ukqF(zZ^JGe&Ul)OtpL*IYT71(@wWmvdK?n%g1rwOTqs
zx=hBfA~dcRcqB=k1PlEyAPTd6$=frVCI_)VJ!NKXKYf~(SkO8ErotlT*~BgXa04@r97!X5ANT2RF&&>%2)J$Asv`jzIK
zXAjo$gnt5+UYsKX%VxPI`-k+$A9>Vq3CQZ>X(O<4))Z8#B2Y}y{_b%g?^&{8gZ*13
z>I@+%;S3KOp03;tKiTr?98~X|_CIEg1Sql6?w!|j*`p(tjGPT7Bp-OKYsa%K42G5S
z#d8DlKi;D%xhYVl?iN{>cyANu{(ma<(&wLsE4{b{%QSl0LeWLltC)Av+NE
zV^^aL4wIt505OB(t>B|{>*yr>Dog)lC@iu_qUZ@cbWl6_ogq#ydyICGQewHnMMv@9
z{XuOeBJmW_k#{!;ua%9L@M#WXlfm8fs1AdFgy|XR^b1ac7u=ZPHkilTQ_YEv@bS3Q
z-j^s@0#BMq?%oR4&}E;T3mOW%@?jN0>XVK)FIDEsMU6;=!L3lo@mkR&Okw-q~Y
z7Z=cnD(hUC@cBIn9hE{{yQTTq=Ohar2wjf=srFxpTK3l>|k^a~FoUq8+1oN?)k)~zb
zvoW`t*;macQCW+(Y$H#QgQ`Er_$cCDH-R6OVulcq;kCk0dCpYM?**LxxS_rw)uhJ1o>T-l`02X3r4}By9bY{2
zFDDVnVcYG1{hLjJie$dtX8qWet+Cl?A@69
z7F6S(yit;@tMHvxFY`1