diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..23d3c98f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,43 @@ +# refer: https://github.com/dependabot/dependabot-core/blob/main/.github/dependabot.yml +version: 2 +updates: + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates managed by Composer once a week + interval: "weekly" + day: "wednesday" + time: "03:00" + timezone: "Europe/London" + + # python + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + day: "wednesday" + time: "03:00" + timezone: "Europe/London" + groups: + common: + patterns: + - hashin + - cython + - flake8 + # Keep the package managers themselves separate because they are higher risk + # and also higher visibility--ie, users generally want latest, so we don't + # want breakage in the `common` group to prevent updating package manager versions + pip: + patterns: + - pip + pip-tools: + patterns: + - pip-tools + pipenv: + patterns: + - pipfile + - pipenv + poetry: + patterns: + - poetry diff --git a/README.rst b/README.rst index 10525804..e0248177 100644 --- a/README.rst +++ b/README.rst @@ -237,7 +237,7 @@ Example usage: -u https://app.datatrails.ai \ --client-id \ --client-secret \ - functests/test_resources/richness_story.yaml + functests/test_resources/subjects_story.yaml Example Yaml Snippet @@ -251,7 +251,7 @@ the yaml representation of the request body for an asset or event. The confirm: field is a control variable for the PythonSDK that ensures that the asset or event is confirmed before returning. This is optional and is only required -3rd parties need to immediately retrieve and cryptographically +if 3rd parties need to immediately retrieve and cryptographically verify your Assets, which can take a few seconds to propagate. It is typically not necessary to wait unless your workflow involves near-real-time communication with 3rd parties and the 3rd party needs instant cryptographic @@ -269,9 +269,6 @@ verification of your new Asset. .. code:: yaml --- - # Demonstration of applying a Richness compliance policy to an asset that undergoes - # events that may or may not make the asset compliant or non-compliant. - # # The operation field is a string that represents the method bound to an endpoint and # the args and kwargs correspond to the arguments to such a method. # diff --git a/archivist/archivist.py b/archivist/archivist.py index 36babfd5..4e22ea9b 100644 --- a/archivist/archivist.py +++ b/archivist/archivist.py @@ -47,8 +47,6 @@ from .assetattachments import _AssetAttachmentsClient from .assets import _AssetsRestricted from .attachments import _AttachmentsClient -from .compliance import _ComplianceClient -from .compliance_policies import _CompliancePoliciesClient from .composite import _CompositeClient from .confirmer import MAX_TIME from .constants import ( @@ -96,8 +94,6 @@ class Archivist(ArchivistPublic): # pylint: disable=too-many-instance-attribute "appidp": _AppIDPClient, "applications": _ApplicationsClient, "attachments": _AttachmentsClient, - "compliance": _ComplianceClient, - "compliance_policies": _CompliancePoliciesClient, "composite": _CompositeClient, "events": _EventsRestricted, "locations": _LocationsClient, @@ -144,8 +140,6 @@ def __init__( self.assets: _AssetsRestricted self.assetattachments: _AssetAttachmentsClient self.attachments: _AttachmentsClient - self.compliance: _ComplianceClient - self.compliance_policies: _CompliancePoliciesClient self.composite: _CompositeClient self.events: _EventsRestricted self.locations: _LocationsClient diff --git a/archivist/compliance.py b/archivist/compliance.py deleted file mode 100644 index 27ec03e2..00000000 --- a/archivist/compliance.py +++ /dev/null @@ -1,121 +0,0 @@ -"""Compliance interface - - Access to the compliance endpoint. - - The user is not expected to use this class directly. It is an attribute of the - :class:`Archivist` class. - - For example instantiate an Archivist instance and execute the methods of the class: - - .. code-block:: python - - with open(".auth_token", mode="r") as tokenfile: - authtoken = tokenfile.read().strip() - - # Initialize connection to Archivist - arch = Archivist( - "https://app.datatrails.ai", - authtoken, - ) - asset = arch.compliance.compliant_at(...) - -""" - -from logging import getLogger -from typing import TYPE_CHECKING, Any - -if TYPE_CHECKING: - # pylint:disable=cyclic-import # but pylint doesn't understand this feature - from .archivist import Archivist - -from .constants import ( - COMPLIANCE_LABEL, - COMPLIANCE_SUBPATH, -) - -LOGGER = getLogger(__name__) - - -class Compliance(dict): - """Compliance - - Compliance object has dictionary of all the compliance attributes. - - """ - - -# pylint: disable=too-few-public-methods -class _ComplianceClient: # pylint: disable=too-few-public-methods - """ComplianceClient - - Access to compliance entities using CRUD interface. This class is usually - accessed as an attribute of the Archivist class. - - Args: - archivist (Archivist): :class:`Archivist` instance - - """ - - def __init__(self, archivist_instance: "Archivist"): - self._archivist = archivist_instance - self._subpath = f"{archivist_instance.root}/{COMPLIANCE_SUBPATH}" - self._label = f"{self._subpath}/{COMPLIANCE_LABEL}" - - def __str__(self) -> str: - return f"ComplianceClient({self._archivist.url})" - - def compliant_at( - self, - asset_id, - *, - compliant_at: "bool|None" = None, - report: "str|None" = None, - ) -> Compliance: - """ - Reads compliance of a particular asset. - - Args: - asset_id (str): asset identity e.g. assets/xxxxxxxxxxxxxxxxxxxxxxx - compliant_at (str): datetime to check compliance at a particular time (optional). - format: rfc3339 - UTC only - https://datatracker.ietf.org/doc/html/rfc3339#section-4.1 - report (bool): if true output report - page_size (int): optional page size. (Rarely used). - - Returns: - :class:`Compliance` instance - - """ - params = {"compliant_at": compliant_at} if compliant_at is not None else None - response = self._archivist.get( - f"{self._label}/{asset_id}", - params=params, - ) - if report is True: - self.compliant_at_report(response) - return Compliance(**response) - - def compliant_at_report(self, compliance: "dict[str, Any]"): - """ - Prints report of compliance_at request - - Args: - compliance (dict): compliance object encapsulating response from compliant_at - """ - - LOGGER.info("Compliant %s", compliance["compliant"]) - for outcome in compliance["compliance"]: - if outcome["compliant"]: - continue - - # get the compliance policy - policy = self._archivist.compliance_policies.read( - outcome["compliance_policy_identity"] - ) - - # print the policy name and the reason - LOGGER.info( - "NON-COMPLIANCE -> Policy: %s: Reason %s", - policy["display_name"], - outcome["reason"], - ) diff --git a/archivist/compliance_policies.py b/archivist/compliance_policies.py deleted file mode 100644 index b7220946..00000000 --- a/archivist/compliance_policies.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Compliance Policies interface - - Access to the compliance_policies endpoint. - - The user is not expected to use this class directly. It is an attribute of the - :class:`Archivist` class. - - For example instantiate an Archivist instance and execute the methods of the class: - - .. code-block:: python - - with open(".auth_token", mode="r") as tokenfile: - authtoken = tokenfile.read().strip() - - # Initialize connection to Archivist - arch = Archivist( - "https://app.datatrails.ai", - authtoken, - ) - - # A 'Since' policy - asset = arch.compliance_policies.create( - ComplianceTypeSince(...) - ) - -""" - -from copy import deepcopy -from logging import getLogger -from typing import TYPE_CHECKING, Any, Union - -if TYPE_CHECKING: - # pylint:disable=cyclic-import # but pylint doesn't understand this feature - from .archivist import Archivist - from .compliance_policy_requests import ( - CompliancePolicyCurrentOutstanding, - CompliancePolicyDynamicTolerance, - CompliancePolicyPeriodOutstanding, - CompliancePolicyRichness, - CompliancePolicySince, - ) - -from .constants import ( - COMPLIANCE_POLICIES_LABEL, - COMPLIANCE_POLICIES_SUBPATH, -) -from .dictmerge import _deepmerge - -LOGGER = getLogger(__name__) - - -class CompliancePolicy(dict): - """CompliancePolicy - - CompliancePolicy object has dictionary of all the compliance policy attributes. - - """ - - @property - def name(self): - """str: name of the compliance policy""" - return self.get("display_name") - - -class _CompliancePoliciesClient: - """CompliancePoliciesClient - - Access to compliance policy entities using CRUD interface. This class is usually - accessed as an attribute of the Archivist class. - - Args: - archivist (Archivist): :class:`Archivist` instance - - """ - - def __init__(self, archivist_instance: "Archivist"): - self._archivist = archivist_instance - self._subpath = f"{archivist_instance.root}/{COMPLIANCE_POLICIES_SUBPATH}" - self._label = f"{self._subpath}/{COMPLIANCE_POLICIES_LABEL}" - - def __str__(self) -> str: - return f"CompliancePoliciesClient({self._archivist.url})" - - def create( - self, - policy: Union[ - "CompliancePolicySince", - "CompliancePolicyCurrentOutstanding", - "CompliancePolicyPeriodOutstanding", - "CompliancePolicyDynamicTolerance", - "CompliancePolicyRichness", - ], - ) -> CompliancePolicy: - """Create A compliance policy - - Args: - policy (CompliancePolicy): the policy object. - One of: - CompliancePolicySince - CompliancePolicyCurrentOutstanding - CompliancePolicyPeriodOutstanding - CompliancePolicyDynamicTolerance - CompliancePolicyRichness - - Returns: - :class:`CompliancePolicy` instance - - """ - return self.create_from_data(policy.dict()) - - def create_from_data(self, data: "dict[str, Any]") -> "CompliancePolicy": - """Create compliance_policy - - Creates compliance_policy with request body from data stream. - Suitable for reading data from a file using json.load or yaml.load - - Args: - data (dict): request body of compliance_policy. - - Returns: - :class:`CompliancePolicy` instance - - """ - return CompliancePolicy(**self._archivist.post(self._label, data)) - - def read(self, identity: str) -> CompliancePolicy: - """Read compliance policy - - Reads compliance policy. - - Args: - identity (str): compliance policy identity - e.g. compliance_policies/xxxxxxxxxxxxxxxxxxxxxxx - - Returns: - :class:`CompliancePolicy` instance - - """ - return CompliancePolicy(**self._archivist.get(f"{self._subpath}/{identity}")) - - def delete(self, identity: str) -> "dict[str, Any]": - """Delete Compliance Policy - - Deletes compliance policy. - - Args: - identity (str): compliance policy identity - e.g. compliance_policies/xxxxxxxxxxxxxxxxxxxxxxx - - Returns: - :class:`CompliancePolicy` instance - empty? - - """ - return self._archivist.delete(f"{self._subpath}/{identity}") - - def __params(self, props: "dict[str, Any]|None") -> "dict[str, Any]": - params = deepcopy(props) if props else {} - # pylint: disable=protected-access - return _deepmerge( - self._archivist.fixtures.get(COMPLIANCE_POLICIES_LABEL), params - ) - - def count(self, *, props: "dict[str, Any]|None" = None) -> int: - """Count compliance policies. - - Counts number of compliance policies that match criteria. - - Args: - props (dict): e.g. {"compliance_type": "COMPLIANCE_RICHNESS" } - - Returns: - integer count of compliance policies. - - """ - return self._archivist.count( - self._label, - params=self.__params(props), - ) - - def list( - self, *, page_size: "int|None" = None, props: "dict[str, Any]|None" = None - ): - """List compliance policies. - - Lists compliance policies that match criteria. - - Args: - props (dict): optional e.g. {"compliance_type": "COMPLIANCE_DYNAMIC_TOLERANCE" } - page_size (int): optional page size. (Rarely used). - - Returns: - iterable that returns :class:`CompliancePolicy` instances - - """ - return ( - CompliancePolicy(**a) - for a in self._archivist.list( - self._label, - COMPLIANCE_POLICIES_LABEL, - page_size=page_size, - params=self.__params(props), - ) - ) - - def read_by_signature(self, *, props: "dict[str, Any]|None" = None): - """Read compliance policy by signature. - - Reads compliance policy that meets criteria. Only one compliance policy is expected. - - Args: - props (dict): e.g. {"display_name": "foo" } - - Returns: - :class:`CompliancePolicy` instance - - """ - return CompliancePolicy( - **self._archivist.get_by_signature( - self._label, - COMPLIANCE_POLICIES_LABEL, - params=self.__params(props), - ) - ) diff --git a/archivist/compliance_policy_requests.py b/archivist/compliance_policy_requests.py deleted file mode 100644 index 30b0c6e9..00000000 --- a/archivist/compliance_policy_requests.py +++ /dev/null @@ -1,99 +0,0 @@ -"""Archivist Compliance Policy Requests - - Dataclasses that represent the different types of compliance policies - -""" - -from dataclasses import asdict, dataclass - -from .compliance_policy_type import CompliancePolicyType -from .or_dict import and_list - -# NB: the order of the fields is important. Fields with default values must -# appear after fields without. This is why the compliance_type is last -# in every case. - - -@dataclass(frozen=True) -class CompliancePolicyBase: - """ - Compliance policy base definition - """ - - description: str - display_name: str - asset_filter: "list[list]" - - def dict(self): - """Emit dictionary representation""" - d = asdict(self) - d["asset_filter"] = and_list(d["asset_filter"]) - return d - - -@dataclass(frozen=True) -class CompliancePolicySince(CompliancePolicyBase): - """ - Compliance policy that indicates if an event has 'expired' - """ - - event_display_type: str - time_period_seconds: int - compliance_type: str = CompliancePolicyType.COMPLIANCE_SINCE.name - - -@dataclass(frozen=True) -class CompliancePolicyCurrentOutstanding(CompliancePolicyBase): - """ - Compliance policy that indicates if an event has been 'closed' - """ - - event_display_type: str - closing_event_display_type: str - compliance_type: str = CompliancePolicyType.COMPLIANCE_CURRENT_OUTSTANDING.name - - -@dataclass(frozen=True) -class CompliancePolicyPeriodOutstanding(CompliancePolicyBase): - """ - Compliance policy that indicates if an event has been 'closed' within - a specified time - """ - - event_display_type: str - closing_event_display_type: str - time_period_seconds: int - compliance_type: str = CompliancePolicyType.COMPLIANCE_PERIOD_OUTSTANDING.name - - -@dataclass(frozen=True) -class CompliancePolicyDynamicTolerance(CompliancePolicyBase): - """ - Compliance policy that indicates if the average time between opening - and closing events in a specified period of time does not exceed a - specified number of standard deviations from the mean. - """ - - event_display_type: str - closing_event_display_type: str - dynamic_window: int - dynamic_variability: float - compliance_type: str = CompliancePolicyType.COMPLIANCE_DYNAMIC_TOLERANCE.name - - -@dataclass(frozen=True) -class CompliancePolicyRichness(CompliancePolicyBase): - """ - Compliance policy that indicates if an asset has an attribute that - complies with a set of assertions. - """ - - richness_assertions: "list[list]" - compliance_type: str = CompliancePolicyType.COMPLIANCE_RICHNESS.name - - def dict(self): - """Emit dictionary representation""" - d = asdict(self) - d["asset_filter"] = and_list(d["asset_filter"]) - d["richness_assertions"] = and_list(d["richness_assertions"]) - return d diff --git a/archivist/compliance_policy_type.py b/archivist/compliance_policy_type.py deleted file mode 100644 index e603a5fa..00000000 --- a/archivist/compliance_policy_type.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Archivist Compliance Policy Type - - Enumerated type that allows user to select the compliance policy type when - creating a compliance policy. - -""" - -from enum import Enum - - -class CompliancePolicyType(Enum): - """ - Enumerate types of compliance policy - """ - - COMPLIANCE_TYPE_UNDEFINED = 0 - #: Time since specific event for specified period - COMPLIANCE_SINCE = 1 - #: Unresolved event currently on asset (e.g. vulnerability) - COMPLIANCE_CURRENT_OUTSTANDING = 2 - #: No unresolved events for longer than specified period - COMPLIANCE_PERIOD_OUTSTANDING = 3 - #: dynamic tolerance with dynamic window etc.. - COMPLIANCE_DYNAMIC_TOLERANCE = 4 - #: Compliance on comparison of asset attribute value to predefined comparator - COMPLIANCE_RICHNESS = 5 diff --git a/archivist/confirmer.py b/archivist/confirmer.py index 82b7d4f3..14aff058 100644 --- a/archivist/confirmer.py +++ b/archivist/confirmer.py @@ -9,8 +9,6 @@ if TYPE_CHECKING: # pylint:disable=cyclic-import # but pylint doesn't understand this feature - from backoff._typing import Details - from .assets import Asset, _AssetsPublic, _AssetsRestricted from .events import Event, _EventsPublic, _EventsRestricted @@ -34,7 +32,7 @@ def __lookup_max_time(): return MAX_TIME -def __on_giveup_confirmation(details: "Details"): +def __on_giveup_confirmation(details): identity: str = details["args"][1] elapsed: float = details["elapsed"] raise ArchivistUnconfirmedError( @@ -111,7 +109,7 @@ def _wait_for_confirmation(self: Managers, identity: str) -> ReturnTypes: return None # pyright: ignore -def __on_giveup_confirmed(details: "Details"): +def __on_giveup_confirmed(details): self: PrivateManagers = details["args"][0] count = self.pending_count elapsed: float = details["elapsed"] diff --git a/archivist/constants.py b/archivist/constants.py index 2a7a4418..b08ccb2e 100644 --- a/archivist/constants.py +++ b/archivist/constants.py @@ -56,12 +56,6 @@ ACCESS_POLICIES_SUBPATH = "iam/v1" ACCESS_POLICIES_LABEL = "access_policies" -COMPLIANCE_SUBPATH = "v1" -COMPLIANCE_LABEL = "compliance" - -COMPLIANCE_POLICIES_SUBPATH = "v1" -COMPLIANCE_POLICIES_LABEL = "compliance_policies" - LOCATIONS_SUBPATH = "v2" LOCATIONS_LABEL = "locations" diff --git a/archivist/notebooks/Check Asset Compliance using CURRENT OUTSTANDING Policy.ipynb b/archivist/notebooks/Check Asset Compliance using CURRENT OUTSTANDING Policy.ipynb deleted file mode 100644 index cdf29c18..00000000 --- a/archivist/notebooks/Check Asset Compliance using CURRENT OUTSTANDING Policy.ipynb +++ /dev/null @@ -1,611 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "d89c5225", - "metadata": {}, - "source": [ - "Create Asset Compliance: CURRENT_OUTSTANDING policy\n", - "-------------------------------------------------------------------------------------" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "c85b9e96", - "metadata": {}, - "outputs": [], - "source": [ - "# Define a compliance policy that alerts when an asset spends too long in a bad state.\n", - "\n", - "# Main function establishes a connection to DataTrails using an App Registration then uses that\n", - "# to create an access policy, test it in good and bad states, then cleans up." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "001b25e3", - "metadata": {}, - "outputs": [], - "source": [ - "from json import dumps as json_dumps\n", - "from os import getenv\n", - "from time import sleep\n", - "from uuid import uuid4\n", - "from warnings import filterwarnings\n", - "\n", - "from archivist.archivist import Archivist\n", - "from archivist.compliance_policy_requests import CompliancePolicyCurrentOutstanding\n", - "from archivist.constants import ASSET_BEHAVIOURS\n", - "from archivist.logger import set_logger\n", - "\n", - "filterwarnings(\"ignore\", message=\"Unverified HTTPS request\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "e4b24e8c", - "metadata": {}, - "outputs": [], - "source": [ - "%reload_ext dotenv\n", - "%dotenv -o notebooks.env" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "92835114", - "metadata": {}, - "outputs": [], - "source": [ - "# URL, CLIENT, SECRET are environment variables that represent connection parameters.\n", - "#\n", - "# URL = represents the url to the DataTrails application\n", - "# CLIENT = represents the client ID from an Application Registration\n", - "# SECRET = represents the client secret from an Application Registration\n", - "DATATRAILS_URL = getenv(\"DATATRAILS_URL\")\n", - "DATATRAILS_APPREG_CLIENT = getenv(\"DATATRAILS_APPREG_CLIENT\")\n", - "DATATRAILS_APPREG_SECRET = getenv(\"DATATRAILS_APPREG_SECRET\")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "d85dcf4c", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Connecting to DATATRAILS\n", - "DATATRAILS_URL https://app.datatrails.ai\n" - ] - } - ], - "source": [ - "\"\"\"\n", - "Main function of Asset and Event creation.\n", - "\n", - "* Connect to DataTrails with client ID and client secret\n", - "* Creates an Asset and two Events\n", - "* Prints response of Asset and Event creation\n", - "\"\"\"\n", - "\n", - "# Optional call to set the logger level. The argument can be either\n", - "# \"INFO\" or \"DEBUG\". For more sophisticated logging control see our\n", - "# documentation.\n", - "set_logger(\"INFO\")\n", - "\n", - "# Initialize connection to DATATRAILS\n", - "print(\"Connecting to DATATRAILS\")\n", - "print(\"DATATRAILS_URL\", DATATRAILS_URL)\n", - "arch = Archivist(\n", - " DATATRAILS_URL, (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), max_time=300\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "acdf240c", - "metadata": {}, - "outputs": [], - "source": [ - "def create_compliance_policy(arch):\n", - " \"\"\"Compliance policy which notices when process steps are\n", - " not executed - eg 'you must close the door after you open it'\n", - " or 'candidate software build must be approved before release'\n", - "\n", - " This example creates a policy that requires doors to be closed\n", - " after they are opened.\n", - " \"\"\"\n", - " compliance_policy = arch.compliance_policies.create(\n", - " CompliancePolicyCurrentOutstanding(\n", - " description=\"Vault doors should be closed according to site security policy section Phys.Integ.02\",\n", - " display_name=\"Phys.Integ.02\",\n", - " asset_filter=[\n", - " [\"attributes.arc_display_type=Vault Door\"],\n", - " ],\n", - " event_display_type=\"Open\",\n", - " closing_event_display_type=\"Close\",\n", - " )\n", - " )\n", - " print(\"CURRENT_OUTSTANDING_POLICY:\", json_dumps(compliance_policy, indent=4))\n", - " return compliance_policy" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "8889c68d", - "metadata": {}, - "outputs": [], - "source": [ - "def create_door(arch):\n", - " \"\"\"\n", - " Creates an Asset record to track a particular door.\n", - " \"\"\"\n", - "\n", - " door, _ = arch.assets.create_if_not_exists(\n", - " {\n", - " \"selector\": [\n", - " {\n", - " \"attributes\": [\n", - " \"arc_display_name\",\n", - " \"arc_display_type\",\n", - " ]\n", - " },\n", - " ],\n", - " \"behaviours\": ASSET_BEHAVIOURS,\n", - " \"attributes\": {\n", - " \"arc_display_name\": \"Gringott's Vault 2\",\n", - " \"arc_description\": \"Main door to the second level security vault in Gringott's Wizarding Bank\",\n", - " \"arc_display_type\": \"Vault Door\",\n", - " },\n", - " },\n", - " )\n", - " print(\"DOOR:\", json_dumps(door, indent=4))\n", - " return door" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "ba24d143", - "metadata": {}, - "outputs": [], - "source": [ - "def open_door(arch, door, tag):\n", - " \"\"\"\n", - " Open the vault door\n", - " \"\"\"\n", - " door_opened = arch.events.create(\n", - " door[\"identity\"],\n", - " {\n", - " \"operation\": \"Record\",\n", - " \"behaviour\": \"RecordEvidence\",\n", - " },\n", - " {\n", - " \"arc_description\": \"Open the door for Lucius Malfoy\",\n", - " \"arc_display_type\": \"Open\",\n", - " \"arc_correlation_value\": f\"{tag}\",\n", - " },\n", - " )\n", - " print(\"DOOR_OPENED:\", json_dumps(door_opened, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "bde8fc72", - "metadata": {}, - "outputs": [], - "source": [ - "def close_door(arch, door, tag):\n", - " \"\"\"\n", - " Close the vault door\n", - " \"\"\"\n", - " door_closed = arch.events.create(\n", - " door[\"identity\"],\n", - " {\n", - " \"operation\": \"Record\",\n", - " \"behaviour\": \"RecordEvidence\",\n", - " },\n", - " {\n", - " \"arc_description\": \"Closed the door after Lucius Malfoy exited the vault\",\n", - " \"arc_display_type\": \"Close\",\n", - " \"arc_correlation_value\": f\"{tag}\",\n", - " },\n", - " )\n", - " print(\"DOOR_CLOSED:\", json_dumps(door_closed, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "b7482420", - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Tag for this run: 5597da73-19e4-448b-80c0-33c79253961a\n" - ] - } - ], - "source": [ - "# Compliance policies with related events (eg open/close, order/ship/deliver\n", - "# type situations) require events to be linked through a correlation value.\n", - "# In many cases this will be obvious (a CVE tag for vulnerability management,\n", - "# or a works ticket number for maintenance, or even a timestamp) but here\n", - "# we'll just make a UUID to make sure it's unique and this test is repeatable\n", - "tag = uuid4()\n", - "print(f\"Tag for this run: {tag}\")" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "bb5b0651", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Refresh token\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CURRENT_OUTSTANDING_POLICY: {\n", - " \"identity\": \"compliance_policies/253a31f2-ef9f-44ba-8940-4dafd23e32f9\",\n", - " \"compliance_type\": \"COMPLIANCE_CURRENT_OUTSTANDING\",\n", - " \"description\": \"Vault doors should be closed according to site security policy section Phys.Integ.02\",\n", - " \"display_name\": \"Phys.Integ.02\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Vault Door\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Open\",\n", - " \"closing_event_display_type\": \"Close\",\n", - " \"time_period_seconds\": \"0\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n", - "compliance_policy {\n", - " \"identity\": \"compliance_policies/253a31f2-ef9f-44ba-8940-4dafd23e32f9\",\n", - " \"compliance_type\": \"COMPLIANCE_CURRENT_OUTSTANDING\",\n", - " \"description\": \"Vault doors should be closed according to site security policy section Phys.Integ.02\",\n", - " \"display_name\": \"Phys.Integ.02\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Vault Door\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Open\",\n", - " \"closing_event_display_type\": \"Close\",\n", - " \"time_period_seconds\": \"0\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n" - ] - } - ], - "source": [ - "# make a compliance policy that alerts when doors are left open\n", - "compliance_policy = create_compliance_policy(arch)\n", - "print(\"compliance_policy\", json_dumps(compliance_policy, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "f8ac6dda", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "asset with selector {},{'arc_display_name': \"Gringott's Vault 2\", 'arc_display_type': 'Vault Door'} already exists\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "DOOR: {\n", - " \"identity\": \"assets/652b3fdf-736e-455f-81ec-fed5088a351c\",\n", - " \"behaviours\": [\n", - " \"AssetCreator\",\n", - " \"RecordEvidence\",\n", - " \"Builtin\"\n", - " ],\n", - " \"attributes\": {\n", - " \"arc_display_name\": \"Gringott's Vault 2\",\n", - " \"arc_display_type\": \"Vault Door\",\n", - " \"arc_description\": \"Main door to the second level security vault in Gringott's Wizarding Bank\"\n", - " },\n", - " \"confirmation_status\": \"CONFIRMED\",\n", - " \"tracked\": \"TRACKED\",\n", - " \"owner\": \"0xe889E67FdBa658C6f27ccBDa98D9d1B5500Dbbce\",\n", - " \"at_time\": \"2023-01-16T11:51:30Z\",\n", - " \"storage_integrity\": \"TENANT_STORAGE\",\n", - " \"chain_id\": \"827586838445807967\",\n", - " \"public\": false,\n", - " \"tenant_identity\": \"tenant/9bfb80ee-81f6-40dc-b5c7-1c7fb2fb9866\"\n", - "}\n", - "gringotts_vault {\n", - " \"identity\": \"assets/652b3fdf-736e-455f-81ec-fed5088a351c\",\n", - " \"behaviours\": [\n", - " \"AssetCreator\",\n", - " \"RecordEvidence\",\n", - " \"Builtin\"\n", - " ],\n", - " \"attributes\": {\n", - " \"arc_display_name\": \"Gringott's Vault 2\",\n", - " \"arc_display_type\": \"Vault Door\",\n", - " \"arc_description\": \"Main door to the second level security vault in Gringott's Wizarding Bank\"\n", - " },\n", - " \"confirmation_status\": \"CONFIRMED\",\n", - " \"tracked\": \"TRACKED\",\n", - " \"owner\": \"0xe889E67FdBa658C6f27ccBDa98D9d1B5500Dbbce\",\n", - " \"at_time\": \"2023-01-16T11:51:30Z\",\n", - " \"storage_integrity\": \"TENANT_STORAGE\",\n", - " \"chain_id\": \"827586838445807967\",\n", - " \"public\": false,\n", - " \"tenant_identity\": \"tenant/9bfb80ee-81f6-40dc-b5c7-1c7fb2fb9866\"\n", - "}\n" - ] - } - ], - "source": [ - "# create an asset that matches the assets_filter field in the\n", - "# compliance policy.\n", - "gringotts_vault = create_door(arch)\n", - "print(\"gringotts_vault\", json_dumps(gringotts_vault, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "d93be01f", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "DOOR_OPENED: {\n", - " \"identity\": \"assets/652b3fdf-736e-455f-81ec-fed5088a351c/events/f4355906-bd65-4d3a-b726-4967936012dd\",\n", - " \"asset_identity\": \"assets/652b3fdf-736e-455f-81ec-fed5088a351c\",\n", - " \"event_attributes\": {\n", - " \"arc_display_type\": \"Open\",\n", - " \"arc_correlation_value\": \"5597da73-19e4-448b-80c0-33c79253961a\",\n", - " \"arc_description\": \"Open the door for Lucius Malfoy\"\n", - " },\n", - " \"asset_attributes\": {},\n", - " \"operation\": \"Record\",\n", - " \"behaviour\": \"RecordEvidence\",\n", - " \"timestamp_declared\": \"2023-01-16T11:51:36Z\",\n", - " \"timestamp_accepted\": \"2023-01-16T11:51:36Z\",\n", - " \"timestamp_committed\": \"2023-01-16T11:51:36.744394895Z\",\n", - " \"principal_declared\": {\n", - " \"issuer\": \"https://app.datatrails.ai/appidpv1\",\n", - " \"subject\": \"437bd138-dade-4346-aadd-dfdfee51ddf4\",\n", - " \"display_name\": \"Test Notebooks\",\n", - " \"email\": \"\"\n", - " },\n", - " \"principal_accepted\": {\n", - " \"issuer\": \"https://app.datatrails.ai/appidpv1\",\n", - " \"subject\": \"437bd138-dade-4346-aadd-dfdfee51ddf4\",\n", - " \"display_name\": \"Test Notebooks\",\n", - " \"email\": \"\"\n", - " },\n", - " \"confirmation_status\": \"CONFIRMED\",\n", - " \"transaction_id\": \"\",\n", - " \"block_number\": 0,\n", - " \"transaction_index\": 0,\n", - " \"from\": \"0xe889E67FdBa658C6f27ccBDa98D9d1B5500Dbbce\",\n", - " \"tenant_identity\": \"tenant/9bfb80ee-81f6-40dc-b5c7-1c7fb2fb9866\"\n", - "}\n", - "COMPLIANCE (should be false): {\n", - " \"compliant\": false,\n", - " \"compliance\": [\n", - " {\n", - " \"compliance_policy_identity\": \"compliance_policies/253a31f2-ef9f-44ba-8940-4dafd23e32f9\",\n", - " \"compliant\": false,\n", - " \"reason\": \"No closing event for Open\"\n", - " }\n", - " ],\n", - " \"next_page_token\": \"\",\n", - " \"compliant_at\": \"2023-01-16T11:51:42Z\"\n", - "}\n" - ] - } - ], - "source": [ - "# Open the door\n", - "open_door(arch, gringotts_vault, tag)\n", - "\n", - "# Check compliance: should fail because the door is open\n", - "sleep(5)\n", - "compliance_nok = arch.compliance.compliant_at(\n", - " gringotts_vault[\"identity\"],\n", - ")\n", - "print(\"COMPLIANCE (should be false):\", json_dumps(compliance_nok, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "6e304daa", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "DOOR_CLOSED: {\n", - " \"identity\": \"assets/652b3fdf-736e-455f-81ec-fed5088a351c/events/e1d5c641-c1a4-4a8c-b07b-621ff054e86c\",\n", - " \"asset_identity\": \"assets/652b3fdf-736e-455f-81ec-fed5088a351c\",\n", - " \"event_attributes\": {\n", - " \"arc_correlation_value\": \"5597da73-19e4-448b-80c0-33c79253961a\",\n", - " \"arc_description\": \"Closed the door after Lucius Malfoy exited the vault\",\n", - " \"arc_display_type\": \"Close\"\n", - " },\n", - " \"asset_attributes\": {},\n", - " \"operation\": \"Record\",\n", - " \"behaviour\": \"RecordEvidence\",\n", - " \"timestamp_declared\": \"2023-01-16T11:51:42Z\",\n", - " \"timestamp_accepted\": \"2023-01-16T11:51:42Z\",\n", - " \"timestamp_committed\": \"2023-01-16T11:51:42.809012247Z\",\n", - " \"principal_declared\": {\n", - " \"issuer\": \"https://app.datatrails.ai/appidpv1\",\n", - " \"subject\": \"437bd138-dade-4346-aadd-dfdfee51ddf4\",\n", - " \"display_name\": \"Test Notebooks\",\n", - " \"email\": \"\"\n", - " },\n", - " \"principal_accepted\": {\n", - " \"issuer\": \"https://app.datatrails.ai/appidpv1\",\n", - " \"subject\": \"437bd138-dade-4346-aadd-dfdfee51ddf4\",\n", - " \"display_name\": \"Test Notebooks\",\n", - " \"email\": \"\"\n", - " },\n", - " \"confirmation_status\": \"CONFIRMED\",\n", - " \"transaction_id\": \"\",\n", - " \"block_number\": 0,\n", - " \"transaction_index\": 0,\n", - " \"from\": \"0xe889E67FdBa658C6f27ccBDa98D9d1B5500Dbbce\",\n", - " \"tenant_identity\": \"tenant/9bfb80ee-81f6-40dc-b5c7-1c7fb2fb9866\"\n", - "}\n", - "COMPLIANCE (should be true): {\n", - " \"compliant\": true,\n", - " \"compliance\": [\n", - " {\n", - " \"compliance_policy_identity\": \"compliance_policies/253a31f2-ef9f-44ba-8940-4dafd23e32f9\",\n", - " \"compliant\": true,\n", - " \"reason\": \"\"\n", - " }\n", - " ],\n", - " \"next_page_token\": \"\",\n", - " \"compliant_at\": \"2023-01-16T11:51:49Z\"\n", - "}\n" - ] - } - ], - "source": [ - "# Now close the door\n", - "close_door(arch, gringotts_vault, tag)\n", - "\n", - "# Check compliance - should be OK because the door is now closed\n", - "sleep(5)\n", - "compliance_ok = arch.compliance.compliant_at(\n", - " gringotts_vault[\"identity\"],\n", - ")\n", - "print(\"COMPLIANCE (should be true):\", json_dumps(compliance_ok, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "2edac120", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "HISTORICAL COMPLIANCE (should be false): {\n", - " \"compliant\": false,\n", - " \"compliance\": [\n", - " {\n", - " \"compliance_policy_identity\": \"compliance_policies/253a31f2-ef9f-44ba-8940-4dafd23e32f9\",\n", - " \"compliant\": false,\n", - " \"reason\": \"No closing event for Open\"\n", - " }\n", - " ],\n", - " \"next_page_token\": \"\",\n", - " \"compliant_at\": \"2023-01-16T11:51:42Z\"\n", - "}\n" - ] - } - ], - "source": [ - "# However the fact that it is OK *now* is a bit of a red herring. It\n", - "# was non-compliant in the past and this may be an issue that needs to\n", - "# be verified during an investigation, insurance claim, or other dispute.\n", - "# We can check the audit history for compliance *at a point in time* and\n", - "# get a verifiable answer to the state of that asset at that time.\n", - "\n", - "# To make sure the example works with such short time frames we grab the\n", - "# time from the previous not OK compliance call, but you can choose any\n", - "# arbitrary time in a real forensic process\n", - "time_of_suspicion = compliance_nok[\"compliant_at\"]\n", - "compliance_nok = arch.compliance.compliant_at(\n", - " gringotts_vault[\"identity\"], compliant_at=time_of_suspicion\n", - ")\n", - "print(\"HISTORICAL COMPLIANCE (should be false):\", json_dumps(compliance_nok, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "40ffc716", - "metadata": {}, - "outputs": [], - "source": [ - "# finally clean up by deleting the compliance_policy\n", - "_ = arch.compliance_policies.delete(\n", - " compliance_policy[\"identity\"],\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b9aed548", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/archivist/notebooks/Check Asset Compliance using SINCE Policy.ipynb b/archivist/notebooks/Check Asset Compliance using SINCE Policy.ipynb deleted file mode 100644 index 8439540d..00000000 --- a/archivist/notebooks/Check Asset Compliance using SINCE Policy.ipynb +++ /dev/null @@ -1,485 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "8b5e7251", - "metadata": {}, - "source": [ - "Check Asset Compliance: SINCE policy\n", - "================================\n" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "c85b9e96", - "metadata": {}, - "outputs": [], - "source": [ - "# Define a compliance policy that alerts when an asset has expired.\n", - "\n", - "# Main function parses in a url to the Archivist and client credentials , which is\n", - "# a user authorization. The main function would initialize an archivist connection\n", - "# using the url and the credentials, called \"arch\", then call arch.access_policies.list()\n", - "# with suitable properties and attributes." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "001b25e3", - "metadata": {}, - "outputs": [], - "source": [ - "from json import dumps as json_dumps\n", - "from os import getenv\n", - "from time import sleep\n", - "from uuid import uuid4\n", - "from warnings import filterwarnings\n", - "\n", - "from archivist.archivist import Archivist\n", - "from archivist.compliance_policy_requests import (\n", - " CompliancePolicySince,\n", - ")\n", - "from archivist.logger import set_logger\n", - "\n", - "filterwarnings(\"ignore\", message=\"Unverified HTTPS request\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "96987aaf", - "metadata": {}, - "outputs": [], - "source": [ - "%reload_ext dotenv\n", - "%dotenv -o notebooks.env" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "a877ffed", - "metadata": {}, - "outputs": [], - "source": [ - "# URL, CLIENT, SECRET are environment variables that represent connection parameters.\n", - "#\n", - "# URL = represents the url to the DataTrails application\n", - "# CLIENT = represents the client ID from an Application Registration\n", - "# SECRET = represents the client secret from an Application Registration\n", - "DATATRAILS_URL = getenv(\"DATATRAILS_URL\")\n", - "DATATRAILS_APPREG_CLIENT = getenv(\"DATATRAILS_APPREG_CLIENT\")\n", - "DATATRAILS_APPREG_SECRET = getenv(\"DATATRAILS_APPREG_SECRET\")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "3fdc8e16", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Connecting to DATATRAILS\n", - "DATATRAILS_URL https://app.datatrails.ai\n" - ] - } - ], - "source": [ - "\"\"\"\n", - "Main function of Asset and Event creation.\n", - "\n", - "* Connect to DataTrails with client ID and client secret\n", - "* Creates an Asset and two Events\n", - "* Prints response of Asset and Event creation\n", - "\"\"\"\n", - "\n", - "# Optional call to set the logger level. The argument can be either\n", - "# \"INFO\" or \"DEBUG\". For more sophisticated logging control see our\n", - "# documentation.\n", - "set_logger(\"INFO\")\n", - "\n", - "# Initialize connection to DATATRAILS\n", - "print(\"Connecting to DATATRAILS\")\n", - "print(\"DATATRAILS_URL\", DATATRAILS_URL)\n", - "arch = Archivist(\n", - " DATATRAILS_URL, (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), max_time=300\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "acdf240c", - "metadata": {}, - "outputs": [], - "source": [ - "def create_compliance_policy(arch, tag):\n", - " \"\"\"Compliance policy which expires 10 seconds after a\n", - " Maintenance Performed event on a 'Traffic Light' has occurred.\n", - "\n", - " Usually the expiry time is on the order of days or weeks..\n", - "\n", - " Additionally the use of tag is simply to make this example\n", - " repeatable.\n", - " \"\"\"\n", - " compliance_policy = arch.compliance_policies.create(\n", - " CompliancePolicySince(\n", - " description=\"Maintenance should be performed every 10 seconds\",\n", - " display_name=\"Regular Maintenance of Traffic light\",\n", - " asset_filter=[\n", - " [\"attributes.arc_display_type=Traffic Light\"],\n", - " ],\n", - " event_display_type=f\"Maintenance Performed {tag}\",\n", - " time_period_seconds=10, # very short so we can test\n", - " )\n", - " )\n", - " print(\"SINCE_POLICY:\", json_dumps(compliance_policy, indent=4))\n", - " return compliance_policy" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "8889c68d", - "metadata": {}, - "outputs": [], - "source": [ - "def create_traffic_light(arch):\n", - " \"\"\"\n", - " Creates a traffic light.\n", - "\n", - " Note that arc_display_type siginfies a Traffic Light\n", - " \"\"\"\n", - "\n", - " traffic_light = arch.assets.create(\n", - " attrs={\n", - " \"arc_display_name\": \"Traffic light model 54\",\n", - " \"arc_description\": \"Traffic flow control light at A603 North East\",\n", - " \"arc_display_type\": \"Traffic Light\",\n", - " },\n", - " )\n", - " print(\"TRAFFIC_LIGHT:\", json_dumps(traffic_light, indent=4))\n", - " return traffic_light" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "ba24d143", - "metadata": {}, - "outputs": [], - "source": [ - "def perform_maintenance(arch, traffic_light, tag):\n", - " \"\"\"\n", - " Perform maintenance on traffic light\n", - " \"\"\"\n", - " maintenance_performed = arch.events.create(\n", - " traffic_light[\"identity\"],\n", - " {\n", - " \"operation\": \"Record\",\n", - " \"behaviour\": \"RecordEvidence\",\n", - " },\n", - " {\n", - " \"arc_description\": \"Maintenance performed on traffic light\",\n", - " \"arc_display_type\": f\"Maintenance Performed {tag}\",\n", - " },\n", - " )\n", - " print(\"MAINTENANCE_PERFORMED:\", json_dumps(maintenance_performed, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "b7482420", - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "tag = uuid4() # make this example repeatable" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "bb5b0651", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Refresh token\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "SINCE_POLICY: {\n", - " \"identity\": \"compliance_policies/458957bc-4da7-4cc3-b37f-43fa53abe0cc\",\n", - " \"compliance_type\": \"COMPLIANCE_SINCE\",\n", - " \"description\": \"Maintenance should be performed every 10 seconds\",\n", - " \"display_name\": \"Regular Maintenance of Traffic light\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Traffic Light\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Maintenance Performed a3f86bbf-737a-45d8-bd84-3d6612fb641e\",\n", - " \"closing_event_display_type\": \"\",\n", - " \"time_period_seconds\": \"10\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n", - "compliance_policy {\n", - " \"identity\": \"compliance_policies/458957bc-4da7-4cc3-b37f-43fa53abe0cc\",\n", - " \"compliance_type\": \"COMPLIANCE_SINCE\",\n", - " \"description\": \"Maintenance should be performed every 10 seconds\",\n", - " \"display_name\": \"Regular Maintenance of Traffic light\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Traffic Light\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Maintenance Performed a3f86bbf-737a-45d8-bd84-3d6612fb641e\",\n", - " \"closing_event_display_type\": \"\",\n", - " \"time_period_seconds\": \"10\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n" - ] - } - ], - "source": [ - "# make a SINCE compliance policy that alerts when the\n", - "# maintenance performed event has expired.\n", - "compliance_policy = create_compliance_policy(arch, tag)\n", - "print(\"compliance_policy\", json_dumps(compliance_policy, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "f8ac6dda", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "TRAFFIC_LIGHT: {\n", - " \"identity\": \"assets/b6f63a6d-24a1-4dd8-a6d7-21e50e603ceb\",\n", - " \"behaviours\": [\n", - " \"RecordEvidence\",\n", - " \"Builtin\",\n", - " \"AssetCreator\",\n", - " ],\n", - " \"attributes\": {\n", - " \"arc_description\": \"Traffic flow control light at A603 North East\",\n", - " \"arc_display_name\": \"Traffic light model 54\",\n", - " \"arc_display_type\": \"Traffic Light\"\n", - " },\n", - " \"confirmation_status\": \"CONFIRMED\",\n", - " \"tracked\": \"TRACKED\",\n", - " \"owner\": \"0xe889E67FdBa658C6f27ccBDa98D9d1B5500Dbbce\",\n", - " \"at_time\": \"2023-01-16T11:52:27Z\",\n", - " \"storage_integrity\": \"TENANT_STORAGE\",\n", - " \"chain_id\": \"827586838445807967\",\n", - " \"public\": false,\n", - " \"tenant_identity\": \"tenant/9bfb80ee-81f6-40dc-b5c7-1c7fb2fb9866\"\n", - "}\n", - "traffic_light {\n", - " \"identity\": \"assets/b6f63a6d-24a1-4dd8-a6d7-21e50e603ceb\",\n", - " \"behaviours\": [\n", - " \"RecordEvidence\",\n", - " \"Builtin\",\n", - " \"AssetCreator\",\n", - " ],\n", - " \"attributes\": {\n", - " \"arc_description\": \"Traffic flow control light at A603 North East\",\n", - " \"arc_display_name\": \"Traffic light model 54\",\n", - " \"arc_display_type\": \"Traffic Light\"\n", - " },\n", - " \"confirmation_status\": \"CONFIRMED\",\n", - " \"tracked\": \"TRACKED\",\n", - " \"owner\": \"0xe889E67FdBa658C6f27ccBDa98D9d1B5500Dbbce\",\n", - " \"at_time\": \"2023-01-16T11:52:27Z\",\n", - " \"storage_integrity\": \"TENANT_STORAGE\",\n", - " \"chain_id\": \"827586838445807967\",\n", - " \"public\": false,\n", - " \"tenant_identity\": \"tenant/9bfb80ee-81f6-40dc-b5c7-1c7fb2fb9866\"\n", - "}\n" - ] - } - ], - "source": [ - "# create an asset that matches the assets_filter field in the\n", - "# compliance policy.\n", - "traffic_light = create_traffic_light(arch)\n", - "print(\"traffic_light\", json_dumps(traffic_light, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "d93be01f", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MAINTENANCE_PERFORMED: {\n", - " \"identity\": \"assets/b6f63a6d-24a1-4dd8-a6d7-21e50e603ceb/events/eba5bb05-d4ff-4d99-9205-41236560d24d\",\n", - " \"asset_identity\": \"assets/b6f63a6d-24a1-4dd8-a6d7-21e50e603ceb\",\n", - " \"event_attributes\": {\n", - " \"arc_description\": \"Maintenance performed on traffic light\",\n", - " \"arc_display_type\": \"Maintenance Performed a3f86bbf-737a-45d8-bd84-3d6612fb641e\"\n", - " },\n", - " \"asset_attributes\": {},\n", - " \"operation\": \"Record\",\n", - " \"behaviour\": \"RecordEvidence\",\n", - " \"timestamp_declared\": \"2023-01-16T11:52:31Z\",\n", - " \"timestamp_accepted\": \"2023-01-16T11:52:31Z\",\n", - " \"timestamp_committed\": \"2023-01-16T11:52:31.599813432Z\",\n", - " \"principal_declared\": {\n", - " \"issuer\": \"https://app.datatrails.ai/appidpv1\",\n", - " \"subject\": \"437bd138-dade-4346-aadd-dfdfee51ddf4\",\n", - " \"display_name\": \"Test Notebooks\",\n", - " \"email\": \"\"\n", - " },\n", - " \"principal_accepted\": {\n", - " \"issuer\": \"https://app.datatrails.ai/appidpv1\",\n", - " \"subject\": \"437bd138-dade-4346-aadd-dfdfee51ddf4\",\n", - " \"display_name\": \"Test Notebooks\",\n", - " \"email\": \"\"\n", - " },\n", - " \"confirmation_status\": \"CONFIRMED\",\n", - " \"transaction_id\": \"\",\n", - " \"block_number\": 0,\n", - " \"transaction_index\": 0,\n", - " \"from\": \"0xe889E67FdBa658C6f27ccBDa98D9d1B5500Dbbce\",\n", - " \"tenant_identity\": \"tenant/9bfb80ee-81f6-40dc-b5c7-1c7fb2fb9866\"\n", - "}\n", - "Sleep 1 second...\n", - "COMPLIANCE (true): {\n", - " \"compliant\": true,\n", - " \"compliance\": [\n", - " {\n", - " \"compliance_policy_identity\": \"compliance_policies/458957bc-4da7-4cc3-b37f-43fa53abe0cc\",\n", - " \"compliant\": true,\n", - " \"reason\": \"\"\n", - " }\n", - " ],\n", - " \"next_page_token\": \"\",\n", - " \"compliant_at\": \"2023-01-16T11:52:33Z\"\n", - "}\n" - ] - } - ], - "source": [ - "# perform maintenance on the asset which is valid for 10 seconds.\n", - "perform_maintenance(arch, traffic_light, tag)\n", - "\n", - "# and check compliance - should be OK.\n", - "print(\"Sleep 1 second...\")\n", - "sleep(1)\n", - "compliance = arch.compliance.compliant_at(\n", - " traffic_light[\"identity\"],\n", - ")\n", - "print(\"COMPLIANCE (true):\", json_dumps(compliance, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "fe8e2a6e", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Sleep 15 seconds...\n", - "COMPLIANCE (false): {\n", - " \"compliant\": false,\n", - " \"compliance\": [\n", - " {\n", - " \"compliance_policy_identity\": \"compliance_policies/458957bc-4da7-4cc3-b37f-43fa53abe0cc\",\n", - " \"compliant\": false,\n", - " \"reason\": \"Duration 20s exceeds limit 10s\"\n", - " }\n", - " ],\n", - " \"next_page_token\": \"\",\n", - " \"compliant_at\": \"2023-01-16T11:52:52Z\"\n", - "}\n" - ] - } - ], - "source": [ - "# however waiting long enough (> 10s) will cause the asset to\n", - "# become non-compliant...\n", - "print(\"Sleep 15 seconds...\")\n", - "sleep(15)\n", - "compliance = arch.compliance.compliant_at(\n", - " traffic_light[\"identity\"],\n", - ")\n", - "print(\"COMPLIANCE (false):\", json_dumps(compliance, indent=4))" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "40ffc716", - "metadata": {}, - "outputs": [], - "source": [ - "# finally delete the compliance_policy\n", - "_ = arch.compliance_policies.delete(\n", - " compliance_policy[\"identity\"],\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bccec420", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/archivist/notebooks/Checking the Dog's Weight.ipynb b/archivist/notebooks/Checking the Dog's Weight.ipynb deleted file mode 100644 index 2f1fa676..00000000 --- a/archivist/notebooks/Checking the Dog's Weight.ipynb +++ /dev/null @@ -1,243 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "55d0e5a9-040a-4441-af39-438d66f14f65", - "metadata": {}, - "source": [ - "## Checking the Dog's Weight" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "5c6fb7af-c701-42e1-b3b3-7e1bd845ffcf", - "metadata": {}, - "outputs": [], - "source": [ - "# Create Compliance RICHNESS Policy\n", - "#\n", - "# Main function, establishes a connection to DataTrails using an App Registration then uses that\n", - "# to create a Compliance RICHNESS Policy.\n", - "#\n", - "# Note: The purpose of DataTrails Jupyter Notebooks is to provide simplified examples that one can easily execute and digest.\n", - "# The DataTrails Python SDK is authored to work cleanly with more advanced coding techniques.\n", - "#\n", - "# DataTrails Python SDK: https://github.com/datatrails/datatrails-python\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "123b4c7e-ecb2-439f-b8d7-b94036936efd", - "metadata": {}, - "outputs": [], - "source": [ - "from json import dumps as json_dumps\n", - "from os import getenv\n", - "\n", - "from archivist.archivist import Archivist\n", - "from archivist.compliance_policy_requests import (\n", - " CompliancePolicyRichness,\n", - ")\n", - "from archivist.logger import set_logger" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "da924db9-8182-4254-9b70-5f07d985961f", - "metadata": {}, - "outputs": [], - "source": [ - "%reload_ext dotenv\n", - "%dotenv -o notebooks.env" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "0109a24e-2ddf-4cb9-8347-91cdc0cf6631", - "metadata": {}, - "outputs": [], - "source": [ - "# DATATRAILS_URL, DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET are environment variables that represent connection parameters.\n", - "#\n", - "# DATATRAILS_URL = represents the url to the DataTrails application\n", - "# DATATRAILS_APPREG_CLIENT = represents the client ID from an Application Registration\n", - "# DATATRAILS_APPREG_SECRET = represents the client secret from an Application Registration\n", - "DATATRAILS_URL = getenv(\"DATATRAILS_URL\")\n", - "DATATRAILS_APPREG_CLIENT = getenv(\"DATATRAILS_APPREG_CLIENT\")\n", - "DATATRAILS_APPREG_SECRET = getenv(\"DATATRAILS_APPREG_SECRET\")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "1192eef6-6f8e-4bc0-9d52-889e0ef09ec3", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Connecting to DATATRAILS\n", - "DATATRAILS_URL https://app.datatrails.ai\n" - ] - } - ], - "source": [ - "\"\"\"\n", - "Main function of RICHNESS policy creation.\n", - "\n", - "* Connect to DataTrails with client ID and client secret\n", - "* Creates a Compliance RICHNESS Policy\n", - "\"\"\"\n", - "\n", - "# Optional call to set the logger level. The argument can be either\n", - "# \"INFO\" or \"DEBUG\". For more sophisticated logging control see our\n", - "# documentation.\n", - "set_logger(\"INFO\")\n", - "\n", - "# Initialize connection to DATATRAILS\n", - "print(\"Connecting to DATATRAILS\")\n", - "print(\"DATATRAILS_URL\", DATATRAILS_URL)\n", - "arch = Archivist(\n", - " DATATRAILS_URL, (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), max_time=300\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "271f947d-b9aa-43db-97fc-dfe372a3b551", - "metadata": {}, - "outputs": [], - "source": [ - "def create_compliance_policy(arch):\n", - " \"\"\"\n", - " Creates a RICHNESS compliance policy for a dog's weight. If the dog's weight is\n", - " over 60lbs then Golden Retriever Asset is out of compliance.\n", - " \"\"\"\n", - " richness_policy = arch.compliance_policies.create(\n", - " CompliancePolicyRichness(\n", - " description=\"Dog's weight not over 60lbs\",\n", - " display_name=\"Dog's weight not over 60lbs\",\n", - " asset_filter=[\n", - " [\"attributes.arc_display_type=Golden Retriever\"],\n", - " ],\n", - " richness_assertions=[\n", - " [\"Weight<61\"],\n", - " ],\n", - " )\n", - " )\n", - " print(\"RICHNESS_POLICY:\", json_dumps(richness_policy, indent=4))\n", - " return richness_policy" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "99eb566c-4006-4ed1-b427-1b7e5657a7f0", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Refresh token\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "RICHNESS_POLICY: {\n", - " \"identity\": \"compliance_policies/c459c04d-4664-4ce1-9855-b64ed03dbec1\",\n", - " \"compliance_type\": \"COMPLIANCE_RICHNESS\",\n", - " \"description\": \"Dog's weight not over 60lbs\",\n", - " \"display_name\": \"Dog's weight not over 60lbs\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"\",\n", - " \"closing_event_display_type\": \"\",\n", - " \"time_period_seconds\": \"0\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": [\n", - " {\n", - " \"or\": [\n", - " \"Weight<61\"\n", - " ]\n", - " }\n", - " ]\n", - "}\n", - "Compliance_Policy {\n", - " \"identity\": \"compliance_policies/c459c04d-4664-4ce1-9855-b64ed03dbec1\",\n", - " \"compliance_type\": \"COMPLIANCE_RICHNESS\",\n", - " \"description\": \"Dog's weight not over 60lbs\",\n", - " \"display_name\": \"Dog's weight not over 60lbs\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"\",\n", - " \"closing_event_display_type\": \"\",\n", - " \"time_period_seconds\": \"0\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": [\n", - " {\n", - " \"or\": [\n", - " \"Weight<61\"\n", - " ]\n", - " }\n", - " ]\n", - "}\n" - ] - } - ], - "source": [ - "# Creates RICHNESS compliance policy and prints result\n", - "compliance_policy = create_compliance_policy(arch)\n", - "print(\"Compliance_Policy\", json_dumps(compliance_policy, indent=4))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "base", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.6" - }, - "vscode": { - "interpreter": { - "hash": "c11202d2846b22eec7deaf37ea813ba92a5f75b5344a4d16688175855af7948e" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/archivist/notebooks/Feeding the Dog Hourly.ipynb b/archivist/notebooks/Feeding the Dog Hourly.ipynb deleted file mode 100644 index 9abe343d..00000000 --- a/archivist/notebooks/Feeding the Dog Hourly.ipynb +++ /dev/null @@ -1,232 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "d7a86415-e826-42f3-bc11-a20eec5fd2f3", - "metadata": {}, - "source": [ - "## Feeding the Dog Hourly" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "f5683dae-015e-4f59-a312-326e3fa767fd", - "metadata": {}, - "outputs": [], - "source": [ - "# Create Compliance PERIOD OUTSTANDING Policy\n", - "#\n", - "# Main function, establishes a connection to DataTrails using an App Registration then uses that\n", - "# to create a Compliance PERIOD OUTSTANDING Policy.\n", - "#\n", - "# Note: The purpose of DataTrails Jupyter Notebooks is to provide simplified examples that one can easily execute and digest.\n", - "# The DataTrails Python SDK is authored to work cleanly with more advanced coding techniques.\n", - "#\n", - "# DataTrails Python SDK: https://github.com/datatrails/datatrails-python\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "dd9f6f95-8832-4134-b8f2-aeda849e1f04", - "metadata": {}, - "outputs": [], - "source": [ - "from json import dumps as json_dumps\n", - "from os import getenv\n", - "\n", - "from archivist.archivist import Archivist\n", - "from archivist.compliance_policy_requests import (\n", - " CompliancePolicyPeriodOutstanding,\n", - ")\n", - "from archivist.logger import set_logger" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "839e2c9d-dd0d-454a-bdf0-4de873040eed", - "metadata": {}, - "outputs": [], - "source": [ - "%reload_ext dotenv\n", - "%dotenv -o notebooks.env" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "f7d30729-a0da-479f-91df-16b0f27ecf08", - "metadata": {}, - "outputs": [], - "source": [ - "# DATATRAILS_URL, DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET are environment variables that represent connection parameters.\n", - "#\n", - "# DATATRAILS_URL = represents the url to the DataTrails application\n", - "# DATATRAILS_APPREG_CLIENT = represents the client ID from an Application Registration\n", - "# DATATRAILS_APPREG_SECRET = represents the client secret from an Application Registration\n", - "DATATRAILS_URL = getenv(\"DATATRAILS_URL\")\n", - "DATATRAILS_APPREG_CLIENT = getenv(\"DATATRAILS_APPREG_CLIENT\")\n", - "DATATRAILS_APPREG_SECRET = getenv(\"DATATRAILS_APPREG_SECRET\")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "11160ab4-7dfe-4618-8293-483f30f080b8", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Connecting to DATATRAILS\n", - "DATATRAILS_URL https://app.datatrails.ai\n" - ] - } - ], - "source": [ - "\"\"\"\n", - "Main function of PERIOD OUTSTANDING policy creation.\n", - "\n", - "* Connect to DataTrails with client ID and client secret\n", - "* Creates a Compliance PERIOD OUTSTANDING Policy\n", - "\"\"\"\n", - "\n", - "# Optional call to set the logger level. The argument can be either\n", - "# \"INFO\" or \"DEBUG\". For more sophisticated logging control see our\n", - "# documentation.\n", - "set_logger(\"INFO\")\n", - "\n", - "# Initialize connection to DATATRAILS\n", - "print(\"Connecting to DATATRAILS\")\n", - "print(\"DATATRAILS_URL\", DATATRAILS_URL)\n", - "arch = Archivist(\n", - " DATATRAILS_URL, (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), max_time=300\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "b0eb183c-30ba-4091-84c0-02b0d4bb462d", - "metadata": {}, - "outputs": [], - "source": [ - "def create_compliance_policy(arch):\n", - " \"\"\"\n", - " Creates a PERIOD OUTSTANDING compliance policy for feeding one's dog. If DataTrails sees a \"Feed\"\n", - " event without a closing \"Fed\" event within an hour, then Golden Retriever Asset is\n", - " out of compliance.\n", - " \"\"\"\n", - " period_outstanding_policy = arch.compliance_policies.create(\n", - " CompliancePolicyPeriodOutstanding(\n", - " description=\"Ensuring my dog is fed within an hour\",\n", - " display_name=\"Feeding My Dog Hourly\",\n", - " asset_filter=[\n", - " [\"attributes.arc_display_type=Golden Retriever\"],\n", - " ],\n", - " event_display_type=\"Feed\",\n", - " closing_event_display_type=\"Fed\",\n", - " time_period_seconds=3600,\n", - " )\n", - " )\n", - " print(\"PERIOD_OUTSTANDING_POLICY:\", json_dumps(period_outstanding_policy, indent=4))\n", - " return period_outstanding_policy" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "9ec8c85e-8296-420e-a782-47f1d1c6526f", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Refresh token\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "PERIOD_OUTSTANDING_POLICY: {\n", - " \"identity\": \"compliance_policies/2599ea06-e4de-4472-a5a3-37b0dae75899\",\n", - " \"compliance_type\": \"COMPLIANCE_PERIOD_OUTSTANDING\",\n", - " \"description\": \"Ensuring my dog is fed within an hour\",\n", - " \"display_name\": \"Feeding My Dog Hourly\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Feed\",\n", - " \"closing_event_display_type\": \"Fed\",\n", - " \"time_period_seconds\": \"3600\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n", - "Compliance_Policy {\n", - " \"identity\": \"compliance_policies/2599ea06-e4de-4472-a5a3-37b0dae75899\",\n", - " \"compliance_type\": \"COMPLIANCE_PERIOD_OUTSTANDING\",\n", - " \"description\": \"Ensuring my dog is fed within an hour\",\n", - " \"display_name\": \"Feeding My Dog Hourly\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Feed\",\n", - " \"closing_event_display_type\": \"Fed\",\n", - " \"time_period_seconds\": \"3600\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n" - ] - } - ], - "source": [ - "# Creates PERIOD OUTSTANDING compliance policy and prints result\n", - "compliance_policy = create_compliance_policy(arch)\n", - "print(\"Compliance_Policy\", json_dumps(compliance_policy, indent=4))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "base", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.6" - }, - "vscode": { - "interpreter": { - "hash": "c11202d2846b22eec7deaf37ea813ba92a5f75b5344a4d16688175855af7948e" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb b/archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb deleted file mode 100644 index 4866b8eb..00000000 --- a/archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb +++ /dev/null @@ -1,233 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "592d34fb-a918-4b94-a5e4-4969d814e9f9", - "metadata": {}, - "source": [ - "## Feeding the Dog in a Timely Manner" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "477fca63-fc55-40e3-83ff-f59748a3e18a", - "metadata": {}, - "outputs": [], - "source": [ - "# Create Compliance DYNAMIC TOLERANCE Policy\n", - "#\n", - "# Main function, establishes a connection to DataTrails using an App Registration then uses that\n", - "# to create a Compliance DYNAMIC TOLERANCE Policy.\n", - "#\n", - "# Note: The purpose of DataTrails Jupyter Notebooks is to provide simplified examples that one can easily execute and digest.\n", - "# The DataTrails Python SDK is authored to work cleanly with more advanced coding techniques.\n", - "#\n", - "# DataTrails Python SDK: https://github.com/datatrails/datatrails-python\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "a808cbc9-ff34-4f02-be10-4faaf0d1f792", - "metadata": {}, - "outputs": [], - "source": [ - "from json import dumps as json_dumps\n", - "from os import getenv\n", - "\n", - "from archivist.archivist import Archivist\n", - "from archivist.compliance_policy_requests import (\n", - " CompliancePolicyDynamicTolerance,\n", - ")\n", - "from archivist.logger import set_logger" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "926f3e34-6482-4b83-8a56-9a48d5244a24", - "metadata": {}, - "outputs": [], - "source": [ - "%reload_ext dotenv\n", - "%dotenv -o notebooks.env" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "6578da4b-bf37-40b4-8758-765c51b5ae30", - "metadata": {}, - "outputs": [], - "source": [ - "# DATATRAILS_URL, DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET are environment variables that represent connection parameters.\n", - "#\n", - "# DATATRAILS_URL = represents the url to the DataTrails application\n", - "# DATATRAILS_APPREG_CLIENT = represents the client ID from an Application Registration\n", - "# DATATRAILS_APPREG_SECRET = represents the client secret from an Application Registration\n", - "DATATRAILS_URL = getenv(\"DATATRAILS_URL\")\n", - "DATATRAILS_APPREG_CLIENT = getenv(\"DATATRAILS_APPREG_CLIENT\")\n", - "DATATRAILS_APPREG_SECRET = getenv(\"DATATRAILS_APPREG_SECRET\")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "a1aa21c9-c6b1-4fe5-9abf-2c13814b0ede", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Connecting to DATATRAILS\n", - "DATATRAILS_URL https://app.datatrails.ai\n" - ] - } - ], - "source": [ - "\"\"\"\n", - "Main function of DYNAMIC TOLERANCE policy creation.\n", - "\n", - "* Connect to DataTrails with client ID and client secret\n", - "* Creates a Compliance DYNAMIC TOLERANCE Policy\n", - "\"\"\"\n", - "\n", - "# Optional call to set the logger level. The argument can be either\n", - "# \"INFO\" or \"DEBUG\". For more sophisticated logging control see our\n", - "# documentation.\n", - "set_logger(\"INFO\")\n", - "\n", - "# Initialize connection to DATATRAILS\n", - "print(\"Connecting to DATATRAILS\")\n", - "print(\"DATATRAILS_URL\", DATATRAILS_URL)\n", - "arch = Archivist(\n", - " DATATRAILS_URL, (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), max_time=300\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "900f0ae5-86dd-48e3-a2cf-9c36fd9ae577", - "metadata": {}, - "outputs": [], - "source": [ - "def create_compliance_policy(arch):\n", - " \"\"\"\n", - " Creates a DYNAMIC TOLERANCE compliance policy for Fed/Feed Events. If time between\n", - " Feed/Fed Events is .5 greater than the average time between similar Events in\n", - " the past week, then Golden Retriever Asset is out of compliance.\n", - " \"\"\"\n", - " dynamic_tolerance_policy = arch.compliance_policies.create(\n", - " CompliancePolicyDynamicTolerance(\n", - " description=\"Average time between Fed/Feed Events\",\n", - " display_name=\"Outlying Feed Events\",\n", - " asset_filter=[\n", - " [\"attributes.arc_display_type=Golden Retriever\"],\n", - " ],\n", - " event_display_type=\"Feed\",\n", - " closing_event_display_type=\"Fed\",\n", - " dynamic_window=604800,\n", - " dynamic_variability=0.5,\n", - " )\n", - " )\n", - " print(\"DYNAMIC_TOLERANCE_POLICY:\", json_dumps(dynamic_tolerance_policy, indent=4))\n", - " return dynamic_tolerance_policy" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "0ba4e484-d93e-4a71-9672-ff3d6c0adaea", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Refresh token\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "DYNAMIC_TOLERANCE_POLICY: {\n", - " \"identity\": \"compliance_policies/d02099fa-adb5-43b3-b2d6-60bf54ad247a\",\n", - " \"compliance_type\": \"COMPLIANCE_DYNAMIC_TOLERANCE\",\n", - " \"description\": \"Average time between Fed/Feed Events\",\n", - " \"display_name\": \"Outlying Feed Events\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Feed\",\n", - " \"closing_event_display_type\": \"Fed\",\n", - " \"time_period_seconds\": \"0\",\n", - " \"dynamic_window\": \"604800\",\n", - " \"dynamic_variability\": 0.5,\n", - " \"richness_assertions\": []\n", - "}\n", - "Compliance_Policy {\n", - " \"identity\": \"compliance_policies/d02099fa-adb5-43b3-b2d6-60bf54ad247a\",\n", - " \"compliance_type\": \"COMPLIANCE_DYNAMIC_TOLERANCE\",\n", - " \"description\": \"Average time between Fed/Feed Events\",\n", - " \"display_name\": \"Outlying Feed Events\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Feed\",\n", - " \"closing_event_display_type\": \"Fed\",\n", - " \"time_period_seconds\": \"0\",\n", - " \"dynamic_window\": \"604800\",\n", - " \"dynamic_variability\": 0.5,\n", - " \"richness_assertions\": []\n", - "}\n" - ] - } - ], - "source": [ - "# Creates DYNAMIC TOLERANCE compliance policy and prints result\n", - "compliance_policy = create_compliance_policy(arch)\n", - "print(\"Compliance_Policy\", json_dumps(compliance_policy, indent=4))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "base", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.6" - }, - "vscode": { - "interpreter": { - "hash": "c11202d2846b22eec7deaf37ea813ba92a5f75b5344a4d16688175855af7948e" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/archivist/notebooks/Feeding the Dog.ipynb b/archivist/notebooks/Feeding the Dog.ipynb deleted file mode 100644 index f5d13007..00000000 --- a/archivist/notebooks/Feeding the Dog.ipynb +++ /dev/null @@ -1,232 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "223675ce-b5ad-4c2b-83e8-3681b5c23b1e", - "metadata": {}, - "source": [ - "## Feeding the Dog" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "0a6bf094-1f8d-412f-ac1a-c303ee77fdac", - "metadata": {}, - "outputs": [], - "source": [ - "# Create Compliance CURRENT OUTSTANDING Policy\n", - "#\n", - "# Main function, establishes a connection to DataTrails using an App Registration then uses that\n", - "# to create a Compliance CURRENT OUTSTANDING Policy.\n", - "#\n", - "# Note: The purpose of DataTrails Jupyter Notebooks is to provide simplified examples that one can easily execute and digest.\n", - "# The DataTrails Python SDK is authored to work cleanly with more advanced coding techniques.\n", - "#\n", - "# DataTrails Python SDK: https://github.com/datatrails/datatrails-python\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "e01c8b81-7a3a-4591-bfee-47da41db95f8", - "metadata": {}, - "outputs": [], - "source": [ - "from json import dumps as json_dumps\n", - "from os import getenv\n", - "\n", - "from archivist.archivist import Archivist\n", - "from archivist.compliance_policy_requests import (\n", - " CompliancePolicyCurrentOutstanding,\n", - ")\n", - "from archivist.logger import set_logger" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "c9c8be0b-3d49-4655-9ac0-1a9c3c1758ee", - "metadata": {}, - "outputs": [], - "source": [ - "%reload_ext dotenv\n", - "%dotenv -o notebooks.env" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "5438ccf8-1ea0-4bc8-bbf4-7ddb7aab59e2", - "metadata": {}, - "outputs": [], - "source": [ - "# DATATRAILS_URL, DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET are environment variables that represent connection parameters.\n", - "#\n", - "# DATATRAILS_URL = represents the url to the DataTrails application\n", - "# DATATRAILS_APPREG_CLIENT = represents the client ID from an Application Registration\n", - "# DATATRAILS_APPREG_SECRET = represents the client secret from an Application Registration\n", - "DATATRAILS_URL = getenv(\"DATATRAILS_URL\")\n", - "DATATRAILS_APPREG_CLIENT = getenv(\"DATATRAILS_APPREG_CLIENT\")\n", - "DATATRAILS_APPREG_SECRET = getenv(\"DATATRAILS_APPREG_SECRET\")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "358d3939-87df-4b36-b19c-399c6ee18278", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Connecting to DATATRAILS\n", - "DATATRAILS_URL https://app.datatrails.ai\n" - ] - } - ], - "source": [ - "\"\"\"\n", - "Main function of CURRENT OUTSTANDING policy creation.\n", - "\n", - "* Connect to DataTrails with client ID and client secret\n", - "* Creates a Compliance CURRENT OUTSTANDING Policy\n", - "\"\"\"\n", - "\n", - "# Optional call to set the logger level. The argument can be either\n", - "# \"INFO\" or \"DEBUG\". For more sophisticated logging control see our\n", - "# documentation.\n", - "set_logger(\"INFO\")\n", - "\n", - "# Initialize connection to DATATRAILS\n", - "print(\"Connecting to DATATRAILS\")\n", - "print(\"DATATRAILS_URL\", DATATRAILS_URL)\n", - "arch = Archivist(\n", - " DATATRAILS_URL, (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), max_time=300\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "d62d889b-d9f1-43a0-81c0-eda9c013091f", - "metadata": {}, - "outputs": [], - "source": [ - "def create_compliance_policy(arch):\n", - " \"\"\"\n", - " Creates a CURRENT OUTSTANDING compliance policy for feeding one's dog. If DataTrails sees a \"Feed\"\n", - " event without a closing \"Fed\" event, then Golden Retriever Asset is out of compliance.\n", - " \"\"\"\n", - " current_outstanding_policy = arch.compliance_policies.create(\n", - " CompliancePolicyCurrentOutstanding(\n", - " description=\"Ensuring my dog is fed\",\n", - " display_name=\"Feeding My Dog\",\n", - " asset_filter=[\n", - " [\"attributes.arc_display_type=Golden Retriever\"],\n", - " ],\n", - " event_display_type=\"Feed\",\n", - " closing_event_display_type=\"Fed\",\n", - " )\n", - " )\n", - " print(\n", - " \"CURRENT_OUTSTANDING_POLICY:\", json_dumps(current_outstanding_policy, indent=4)\n", - " )\n", - " return current_outstanding_policy" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "fb43cce9-6222-46b2-bd94-ac07263a2bd9", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Refresh token\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CURRENT_OUTSTANDING_POLICY: {\n", - " \"identity\": \"compliance_policies/f896247e-bca5-4410-8c30-09adf81e9244\",\n", - " \"compliance_type\": \"COMPLIANCE_CURRENT_OUTSTANDING\",\n", - " \"description\": \"Ensuring my dog is fed\",\n", - " \"display_name\": \"Feeding My Dog\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Feed\",\n", - " \"closing_event_display_type\": \"Fed\",\n", - " \"time_period_seconds\": \"0\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n", - "Compliance_Policy {\n", - " \"identity\": \"compliance_policies/f896247e-bca5-4410-8c30-09adf81e9244\",\n", - " \"compliance_type\": \"COMPLIANCE_CURRENT_OUTSTANDING\",\n", - " \"description\": \"Ensuring my dog is fed\",\n", - " \"display_name\": \"Feeding My Dog\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Feed\",\n", - " \"closing_event_display_type\": \"Fed\",\n", - " \"time_period_seconds\": \"0\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n" - ] - } - ], - "source": [ - "# Creates CURRENT OUTSTANDING compliance policy and prints result\n", - "compliance_policy = create_compliance_policy(arch)\n", - "print(\"Compliance_Policy\", json_dumps(compliance_policy, indent=4))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "base", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.6" - }, - "vscode": { - "interpreter": { - "hash": "c11202d2846b22eec7deaf37ea813ba92a5f75b5344a4d16688175855af7948e" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb b/archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb deleted file mode 100644 index 7654535f..00000000 --- a/archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb +++ /dev/null @@ -1,230 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "11b1628e-4543-4fd6-a74c-a468d5c27d38", - "metadata": {}, - "source": [ - "## Playing Fetch Every 5 Minutes" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "3118a079-251e-4e4e-bead-a8849eb684c6", - "metadata": {}, - "outputs": [], - "source": [ - "# Create Compliance SINCE Policy\n", - "#\n", - "# Main function, establishes a connection to DataTrails using an App Registration then uses that\n", - "# to create a Compliance SINCE Policy.\n", - "#\n", - "# Note: The purpose of DataTrails Jupyter Notebooks is to provide simplified examples that one can easily execute and digest.\n", - "# The DataTrails Python SDK is authored to work cleanly with more advanced coding techniques.\n", - "#\n", - "# DataTrails Python SDK: https://github.com/datatrails/datatrails-python\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "36a65c0e-9425-4c9e-848b-6ad6f761ad19", - "metadata": {}, - "outputs": [], - "source": [ - "from json import dumps as json_dumps\n", - "from os import getenv\n", - "\n", - "from archivist.archivist import Archivist\n", - "from archivist.compliance_policy_requests import (\n", - " CompliancePolicySince,\n", - ")\n", - "from archivist.logger import set_logger" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "a49e804f-ddde-4f0b-a20f-85c41a3ea544", - "metadata": {}, - "outputs": [], - "source": [ - "%reload_ext dotenv\n", - "%dotenv -o notebooks.env" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "5826421d-1a9d-4ccf-b820-9055c10c6035", - "metadata": {}, - "outputs": [], - "source": [ - "# DATATRAILS_URL, DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET are environment variables that represent connection parameters.\n", - "#\n", - "# DATATRAILS_URL = represents the url to the DataTrails application\n", - "# DATATRAILS_APPREG_CLIENT = represents the client ID from an Application Registration\n", - "# DATATRAILS_APPREG_SECRET = represents the client secret from an Application Registration\n", - "DATATRAILS_URL = getenv(\"DATATRAILS_URL\")\n", - "DATATRAILS_APPREG_CLIENT = getenv(\"DATATRAILS_APPREG_CLIENT\")\n", - "DATATRAILS_APPREG_SECRET = getenv(\"DATATRAILS_APPREG_SECRET\")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "609cd663-86e8-4bce-8642-6ed6a3afdbfc", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Connecting to DATATRAILS\n", - "DATATRAILS_URL https://app.datatrails.ai\n" - ] - } - ], - "source": [ - "\"\"\"\n", - "Main function of SINCE policy creation.\n", - "\n", - "* Connect to DataTrails with client ID and client secret\n", - "* Creates a Compliance SINCE Policy\n", - "\"\"\"\n", - "\n", - "# Optional call to set the logger level. The argument can be either\n", - "# \"INFO\" or \"DEBUG\". For more sophisticated logging control see our\n", - "# documentation.\n", - "set_logger(\"INFO\")\n", - "\n", - "# Initialize connection to DATATRAILS\n", - "print(\"Connecting to DATATRAILS\")\n", - "print(\"DATATRAILS_URL\", DATATRAILS_URL)\n", - "arch = Archivist(\n", - " DATATRAILS_URL, (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), max_time=300\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "dc1adbe6-558c-4ef1-8939-9107ece96f95", - "metadata": {}, - "outputs": [], - "source": [ - "def create_compliance_policy(arch):\n", - " \"\"\"\n", - " Creates a SINCE compliance policy for playing fetch every 5 minutes. If DataTrails does not see\n", - " a \"Fetch\" event within 5 minutes then Golden Retriever Asset is out of compliance.\n", - " \"\"\"\n", - " since_policy = arch.compliance_policies.create(\n", - " CompliancePolicySince(\n", - " description=\"Playing fetch with my dog every 5 minutes\",\n", - " display_name=\"Playing Fetch\",\n", - " asset_filter=[\n", - " [\"attributes.arc_display_type=Golden Retriever\"],\n", - " ],\n", - " event_display_type=\"Fetch\",\n", - " time_period_seconds=300,\n", - " )\n", - " )\n", - " print(\"SINCE_POLICY:\", json_dumps(since_policy, indent=4))\n", - " return since_policy" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "2dfa472a-4e3a-42f2-a273-9bd8d512cf21", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Refresh token\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "SINCE_POLICY: {\n", - " \"identity\": \"compliance_policies/246d2406-5fea-4869-b4e6-516a5aada609\",\n", - " \"compliance_type\": \"COMPLIANCE_SINCE\",\n", - " \"description\": \"Playing fetch with my dog every 5 minutes\",\n", - " \"display_name\": \"Playing Fetch\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Fetch\",\n", - " \"closing_event_display_type\": \"\",\n", - " \"time_period_seconds\": \"300\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n", - "Compliance_Policy {\n", - " \"identity\": \"compliance_policies/246d2406-5fea-4869-b4e6-516a5aada609\",\n", - " \"compliance_type\": \"COMPLIANCE_SINCE\",\n", - " \"description\": \"Playing fetch with my dog every 5 minutes\",\n", - " \"display_name\": \"Playing Fetch\",\n", - " \"asset_filter\": [\n", - " {\n", - " \"or\": [\n", - " \"attributes.arc_display_type=Golden Retriever\"\n", - " ]\n", - " }\n", - " ],\n", - " \"event_display_type\": \"Fetch\",\n", - " \"closing_event_display_type\": \"\",\n", - " \"time_period_seconds\": \"300\",\n", - " \"dynamic_window\": \"0\",\n", - " \"dynamic_variability\": 0,\n", - " \"richness_assertions\": []\n", - "}\n" - ] - } - ], - "source": [ - "# Creates SINCE compliance policy and prints result\n", - "compliance_policy = create_compliance_policy(arch)\n", - "print(\"Compliance_Policy\", json_dumps(compliance_policy, indent=4))" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "base", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.6" - }, - "vscode": { - "interpreter": { - "hash": "c11202d2846b22eec7deaf37ea813ba92a5f75b5344a4d16688175855af7948e" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/archivist/runner.py b/archivist/runner.py index 2e917860..54e34610 100644 --- a/archivist/runner.py +++ b/archivist/runner.py @@ -9,7 +9,7 @@ from logging import getLogger from time import sleep as time_sleep from types import GeneratorType -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from uuid import UUID from .errors import ArchivistError, ArchivistInvalidOperationError @@ -92,15 +92,6 @@ def __init__(self, archivist_instance: "Archivist"): self["COMPOSITE_ESTATE_INFO"] = { "action": self._archivist.composite.estate_info, } - self["COMPLIANCE_POLICIES_CREATE"] = { - "action": self._archivist.compliance_policies.create_from_data, - "delete": self._archivist.compliance_policies.delete, - } - self["COMPLIANCE_COMPLIANT_AT"] = { - "action": self._archivist.compliance.compliant_at, - "keywords": ("report",), - "use_asset_label": "add_arg_identity", - } self["EVENTS_CREATE"] = { "action": self._archivist.events.create_from_data, "keywords": ("confirm",), @@ -190,7 +181,7 @@ def __init__(self, archivist_instance: "Archivist"): "use_subject_label": "add_arg_identity", } - def ops(self, action_name: str) -> "dict[str, Any]": + def ops(self, action_name: str): """ Get valid entry in map """ @@ -199,14 +190,14 @@ def ops(self, action_name: str) -> "dict[str, Any]": raise ArchivistInvalidOperationError(f"Illegal Action '{action_name}'") return ops - def action(self, action_name: str) -> Callable: + def action(self, action_name: str): """ Get valid action in map """ # if an exception occurs here then the dict initialized above is faulty. return self.ops(action_name).get("action") # pyright: ignore - def keywords(self, action_name: str) -> "tuple | None": + def keywords(self, action_name: str): """ Get keywords in map """ @@ -218,7 +209,7 @@ def delete(self, action_name: str): """ return self.ops(action_name).get("delete") - def label(self, noun: str, endpoint: str, action_name: str) -> bool: + def label(self, noun: str, endpoint: str, action_name: str): """ Return whether this action uses or sets label """ @@ -230,7 +221,7 @@ def __init__(self, archivist_instance: "Archivist", **kwargs): super().__init__(**kwargs) self._archivist = archivist_instance self._args: "list[Any]" = [] - self._kwargs: "dict[str, Any]" = {} + self._kwargs = {} self._actions = None self._action = None self._action_name = None @@ -255,7 +246,7 @@ def add_data_identity(self, key, identity): add_data_location_identity = partialmethod(add_data_identity, "location") - def args(self, identity_method, step): + def init_args(self, identity_method, step): """ Add args and kwargs to action. """ @@ -330,10 +321,6 @@ def description(self): if description is not None: LOGGER.info(description) - @property - def delete(self): - return self.get("delete") - def print_response(self, response): print_response = self.get("print_response") if print_response: @@ -358,7 +345,11 @@ def actions(self): return self._actions @property - def action(self) -> Callable: + def args(self): + return self._args + + @property + def action(self): if self._action is None: self._action = self.actions.action(self.action_name) @@ -366,16 +357,13 @@ def action(self) -> Callable: @property def delete_method(self): - if self._delete_method is None: - self._delete_method = self.actions.delete(self.action_name) + self._delete_method = self.actions.delete(self.action_name) return self._delete_method @property def keywords(self): - if self._keywords is None: - self._keywords = self.actions.keywords(self.action_name) - + self._keywords = self.actions.keywords(self.action_name) return self._keywords @property @@ -444,7 +432,6 @@ def run_steps(self, config: "dict[str, Any]"): for step in config["steps"]: self.run_step(step) - self.delete() self._archivist.close() def run_step(self, step: "dict[str, Any]"): @@ -461,7 +448,7 @@ def run_step(self, step: "dict[str, Any]"): s.description() # this is a bit clunky... - s.args(self.identity, step) + s.init_args(self.identity, step) # wait for a number of seconds and then execute s.wait_time() @@ -469,27 +456,11 @@ def run_step(self, step: "dict[str, Any]"): s.print_response(response) - if s.delete: - self.set_deletions(response, s.delete_method) - for noun in NOUNS: label = s.get(f"{noun}_label") if s.label("set", noun) and label is not None: self.entities[label] = response - def set_deletions(self, response: "dict[str, Any]", delete_method): - """sets entry to be deleted""" - - if delete_method is not None: - identity = response["identity"] - self.deletions[identity] = delete_method - - def delete(self): - """Deletes all entities""" - for identity, delete_method in self.deletions.items(): - LOGGER.info("Delete %s", identity) - delete_method(identity) - def identity(self, name: str) -> "str|None": """Gets entity id""" diff --git a/docs/code/compliance_policies/compliance.rst b/docs/code/compliance_policies/compliance.rst deleted file mode 100644 index 16c5d908..00000000 --- a/docs/code/compliance_policies/compliance.rst +++ /dev/null @@ -1,11 +0,0 @@ - -.. _complianceref: - -Compliance --------------------- - - -.. automodule:: archivist.compliance - :members: - :private-members: - diff --git a/docs/code/compliance_policies/compliance_policies.rst b/docs/code/compliance_policies/compliance_policies.rst deleted file mode 100644 index 491ea878..00000000 --- a/docs/code/compliance_policies/compliance_policies.rst +++ /dev/null @@ -1,11 +0,0 @@ - -.. _compliance_policies_clientref: - -Compliance Policies Client --------------------------- - - -.. automodule:: archivist.compliance_policies - :members: - :private-members: - diff --git a/docs/code/compliance_policies/compliance_policies_type.rst b/docs/code/compliance_policies/compliance_policies_type.rst deleted file mode 100644 index 6083ee1a..00000000 --- a/docs/code/compliance_policies/compliance_policies_type.rst +++ /dev/null @@ -1,11 +0,0 @@ - -.. _compliance_policies_type: - -Compliance Policy types ------------------------ - - -.. automodule:: archivist.compliance_policy_type - :members: - :private-members: - diff --git a/docs/code/compliance_policies/compliance_policy_requests.rst b/docs/code/compliance_policies/compliance_policy_requests.rst deleted file mode 100644 index 4be9d6f0..00000000 --- a/docs/code/compliance_policies/compliance_policy_requests.rst +++ /dev/null @@ -1,11 +0,0 @@ - -.. _compliance_policies_requests: - -Compliance Policy Requests ---------------------------- - - -.. automodule:: archivist.compliance_policy_requests - :members: - :private-members: - diff --git a/docs/code/compliance_policies/index.rst b/docs/code/compliance_policies/index.rst deleted file mode 100644 index 45a8b3ff..00000000 --- a/docs/code/compliance_policies/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. _compliance_policiesindex: - -Compliance Policies -=================== - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - - compliance_policies - compliance_policies_type - compliance_policy_requests - compliance - diff --git a/docs/code/index.rst b/docs/code/index.rst index 893d4da5..24ce42ef 100644 --- a/docs/code/index.rst +++ b/docs/code/index.rst @@ -16,7 +16,6 @@ Shows all code locations attachments tenancies - compliance_policies/index iam/index runner diff --git a/docs/compliance_policies_since.rst b/docs/compliance_policies_since.rst deleted file mode 100644 index 4623d7ed..00000000 --- a/docs/compliance_policies_since.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. _compliance_policies_sinceref: - -Since Compliance Policy -........................ - -.. literalinclude:: ../examples/compliance_policies_since.py - :language: python - - - diff --git a/docs/fixtures.rst b/docs/fixtures.rst index 6e388d18..057f53f7 100644 --- a/docs/fixtures.rst +++ b/docs/fixtures.rst @@ -17,13 +17,13 @@ and locations. with open(".auth_token", mode='r', encoding="utf-8") as tokenfile: authtoken = tokenfile.read().strip() - # Initialize connection to Archivist - for assets on DLT. + # Initialize connection to Archivist - for assets on immutable ledger. ledger = Archivist( "https://app.datatrails.ai", authtoken, ) - # lets define doors in our namespace that reside on the ledger... + # lets define doors in our namespace that reside on the immutable ledger... doors = copy(ledger) doors.fixtures = { "assets": { diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 7e7caefe..79f5ee28 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -27,8 +27,6 @@ See the examples and functests directories. access_policy_create access_policies_filter - compliance_policies_since - runner/index scan_test diff --git a/docs/notebooks.rst b/docs/notebooks.rst index edae7518..9b94ea1f 100644 --- a/docs/notebooks.rst +++ b/docs/notebooks.rst @@ -67,17 +67,3 @@ Download the notebooks into a suitable folder: notebooks/Sharing Album Release Info with User notebooks/Sharing Artist Asset with Record Labels notebooks/Sharing Album Release Info with Record Labels - -.. toctree:: - :maxdepth: 2 - :caption: Compliance Policies Examples - - notebooks/Playing Fetch Every 5 Minutes - notebooks/Feeding the Dog - notebooks/Feeding the Dog Hourly - notebooks/Feeding the Dog in a Timely Manner - notebooks/Checking the Dog's Weight - notebooks/Check Asset Compliance using CURRENT OUTSTANDING Policy - notebooks/Check Asset Compliance using SINCE Policy - - diff --git a/docs/runner/components/compliance_compliant_at.rst b/docs/runner/components/compliance_compliant_at.rst deleted file mode 100644 index 5fa2240f..00000000 --- a/docs/runner/components/compliance_compliant_at.rst +++ /dev/null @@ -1,21 +0,0 @@ -.. _compliance_compliant_at_yamlref: - -Compliance Compliant_at Story Runner YAML -........................................... - -Verify an Asset against its compliance policies. - -:code:`asset_label` is required from a previously created asset. The :code:`asset_id` is retrieved and -inserted as the first argument to :code:`compliance.compliant_at()`. - -Setting :code:`report: true` will trigger a report to be printed on the compliance status. - -.. code-block:: yaml - - --- - steps: - - step: - action: COMPLIANCE_COMPLIANT_AT - description: Check Compliance of EV pump 1. - report: true - asset_label: ev pump 1 diff --git a/docs/runner/components/compliance_policies_create.rst b/docs/runner/components/compliance_policies_create.rst deleted file mode 100644 index dd17b324..00000000 --- a/docs/runner/components/compliance_policies_create.rst +++ /dev/null @@ -1,46 +0,0 @@ -.. _compliance_policies_create_yamlref: - -Compliance Policy Create Story Runner YAML -........................................... - -The specific fields required vary according to -:code:`compliance_type` and is documented elsewhere. - -This example is for a :code:`DYNAMIC_TOLERANCE` type policy. - -.. code-block:: yaml - - --- - steps: - - step: - action: COMPLIANCE_POLICIES_CREATE - description: Create a compliance policy that checks an EV pump maintenance requests are serviced within a reasonable time frame. - print_response: true - description: ev maintenance policy - display_name: ev maintenance policy - compliance_type: COMPLIANCE_DYNAMIC_TOLERANCE - asset_filter: - - or: [ "attributes.ev_pump=true" ] - event_display_type: Maintenance Requested - closing_event_display_type: Maintenance Performed - dynamic_window: 700 - dynamic_variability: 1.5 - - -This example is for a :code:`RICHNESS` type policy. - -.. code-block:: yaml - - --- - steps: - - step: - action: COMPLIANCE_POLICIES_CREATE - description: Create a compliance policy that checks the radiation level of radiation bags is less than 7 rads. - print_response: true - description: radiation level safety policy - display_name: radiation safety policy - compliance_type: COMPLIANCE_RICHNESS - asset_filter: - - or: [ "attributes.radioactive=true" ] - richness_assertions: - - or: [ "radiation_level<7" ] diff --git a/docs/runner/components/generic.rst b/docs/runner/components/generic.rst index 83aa0da9..025c0005 100644 --- a/docs/runner/components/generic.rst +++ b/docs/runner/components/generic.rst @@ -33,9 +33,6 @@ Each step follows the same pattern: :wait_time: The story runner will pause for this number of seconds before execution. - Primarily used to demonstrate compliance policy evaluation. One pauses - before creating events and before evaluating compliance to allow - (for example) the asset to become non-compliant. (demonstration) :print_response: Emit JSON representation of response. Useful for debugging purposes. diff --git a/docs/runner/components/index.rst b/docs/runner/components/index.rst index 1790d4ed..139767a0 100644 --- a/docs/runner/components/index.rst +++ b/docs/runner/components/index.rst @@ -13,8 +13,6 @@ Story Runner Components assets_create_if_not_exists assets_list assets_wait_for_confirmed - compliance_compliant_at - compliance_policies_create composite_estate_info events_count events_create diff --git a/docs/runner/demos/compliance_policies_demo.rst b/docs/runner/demos/compliance_policies_demo.rst deleted file mode 100644 index cac20e1d..00000000 --- a/docs/runner/demos/compliance_policies_demo.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. _compliance_policies_demoref: - -Compliance Policies Demo -........................... - -An example of using the archivist_runner endpoint to execute a scenario of -using various compliance policies. - -Reference yaml files are available here: - - - :ref:`compliance_policies_demo_dynamic_toleranceref` - - :ref:`compliance_policies_demo_richnessref` - -See :ref:`story_runnerindex` for details of executing the runner from the -command line. diff --git a/docs/runner/demos/compliance_policies_demo_dynamic_tolerance.rst b/docs/runner/demos/compliance_policies_demo_dynamic_tolerance.rst deleted file mode 100644 index 438a0e4f..00000000 --- a/docs/runner/demos/compliance_policies_demo_dynamic_tolerance.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. _compliance_policies_demo_dynamic_toleranceref: - -Compliance Policies Dynamic Tolerance Demo -........................................... - -Code to use this file is found here :ref:`compliance_policies_demoref` - - -.. literalinclude:: ../../../functests/test_resources/dynamic_tolerance_story.yaml - :language: yaml - - - diff --git a/docs/runner/demos/compliance_policies_demo_richness.rst b/docs/runner/demos/compliance_policies_demo_richness.rst deleted file mode 100644 index 0ae94400..00000000 --- a/docs/runner/demos/compliance_policies_demo_richness.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. _compliance_policies_demo_richnessref: - -Compliance Policies Richness Demo -.................................. - -Code to use this file is found here :ref:`compliance_policies_demoref` - -.. literalinclude:: ../../../functests/test_resources/richness_story.yaml - :language: yaml - - - diff --git a/docs/runner/demos/index.rst b/docs/runner/demos/index.rst index e2535f8b..1d0a6719 100644 --- a/docs/runner/demos/index.rst +++ b/docs/runner/demos/index.rst @@ -7,9 +7,6 @@ Story Runner Demos :maxdepth: 2 :caption: Contents: - compliance_policies_demo - compliance_policies_demo_dynamic_tolerance - compliance_policies_demo_richness door_entry_demo estate_info_demo synsation_demo diff --git a/docs/runner/execute.rst b/docs/runner/execute.rst index 77072a56..f5571a3b 100644 --- a/docs/runner/execute.rst +++ b/docs/runner/execute.rst @@ -10,5 +10,5 @@ A straightforward incantation of the yaml runner: archivist_runner \ -u https://app.datatrails.ai \ --auth-token credentials/token \ - functests/test_resources/richness_story.yaml + functests/test_resources/subjects_story.yaml diff --git a/docs/runner/index.rst b/docs/runner/index.rst index 7452a0f3..5143a19c 100644 --- a/docs/runner/index.rst +++ b/docs/runner/index.rst @@ -40,7 +40,7 @@ Example usage: -u https://app.datatrails.ai \ --client-id \ --client-secret \ - functests/test_resources/richness_story.yaml + functests/test_resources/subjects_story.yaml For further reading: diff --git a/docs/sbom.xml b/docs/sbom.xml index ad93e985..d9fd7b70 100644 --- a/docs/sbom.xml +++ b/docs/sbom.xml @@ -1,122 +1,89 @@ - - - 2023-10-18T08:27:07.469453+00:00 - - - CycloneDX - cyclonedx-python-lib - 4.2.3 - - - https://github.com/CycloneDX/cyclonedx-python-lib/actions - - - https://pypi.org/project/cyclonedx-python-lib/ - - - https://cyclonedx.github.io/cyclonedx-python-lib/ - - - https://github.com/CycloneDX/cyclonedx-python-lib/issues - - - https://github.com/CycloneDX/cyclonedx-python-lib/blob/main/LICENSE - - - https://github.com/CycloneDX/cyclonedx-python-lib/blob/main/CHANGELOG.md - - - https://github.com/CycloneDX/cyclonedx-python-lib - - - https://cyclonedx.org - - - - - - - - Jinja2 - 3.1.2 - - - MarkupSafe - 2.1.3 - - - PyYAML - 6.0.1 - - - backoff - 2.2.1 - - - certifi - 2023.7.22 - - - charset-normalizer - 3.3.0 - - - flatten-dict - 0.4.2 - - - idna - 3.4 - - - iso8601 - 2.1.0 - - - pyaml-env - 1.2.1 - - - requests - 2.31.0 - - - requests-toolbelt - 1.0.0 - - - rfc3339 - 6.2 - - - six - 1.16.0 - - - urllib3 - 2.0.7 - - - xmltodict - 0.13.0 - - - - - - - - - - - - - - - - - - - - - + + + 2025-03-27T17:51:38.265997+00:00 + + + + Jinja2 + 3.1.6 + + + MarkupSafe + 3.0.2 + + + PyYAML + 6.0.2 + + + backoff + 2.2.1 + + + certifi + 2025.1.31 + + + charset-normalizer + 3.4.1 + + + flatten-dict + 0.4.2 + + + idna + 3.10 + + + iso8601 + 2.1.0 + + + pyaml_env + 1.2.2 + + + requests + 2.32.3 + + + requests-toolbelt + 1.0.0 + + + rfc3339 + 6.2 + + + six + 1.17.0 + + + urllib3 + 2.3.0 + + + xmltodict + 0.14.2 + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/compliance_policies_since.py b/examples/compliance_policies_since.py deleted file mode 100644 index 5260a066..00000000 --- a/examples/compliance_policies_since.py +++ /dev/null @@ -1,157 +0,0 @@ -"""Define a compliance policy that alerts when an asset has expired. - -Main function parses in a url to the Archivist and client credentials , which is -a user authorization. The main function would initialize an archivist connection -using the url and the credentials, called "arch", then call arch.access_policies.list() -with suitable properties and attributes. - -""" - -from json import dumps as json_dumps -from os import getenv -from time import sleep -from uuid import uuid4 -from warnings import filterwarnings - -from archivist.archivist import Archivist -from archivist.compliance_policy_requests import ( - CompliancePolicySince, -) -from archivist.utils import get_auth - -filterwarnings("ignore", message="Unverified HTTPS request") - - -def get_archivist(): - """Create Archivist endpoint.""" - - # client id and client secret is obtained from the appidp endpoint - see the - # application registrations example code in examples/applications_registration.py - # - # client id is an environment variable. client_secret is stored in a file in a - # directory that has 0700 permissions. The location of this file is set in - # the client_secret_file environment variable. - # - auth = get_auth( - auth_token=getenv("DATATRAILS_AUTHTOKEN"), - auth_token_filename=getenv("DATATRAILS_AUTHTOKEN_FILENAME"), - client_id=getenv("DATATRAILS_APPREG_CLIENT"), - client_secret=getenv("DATATRAILS_APPREG_SECRET"), - client_secret_filename=getenv("DATATRAILS_APPREG_SECRET_FILENAME"), - ) - - # Initialize connection to Archivist - arch = Archivist( - "https://app.datatrails.ai", - auth, - ) - return arch - - -def create_compliance_policy(arch, tag): - """Compliance policy which expires 10 seconds after a - Maintenance Performed event on a 'Traffic Light' has occurred. - - Usually the expiry time is on the order of days or weeks.. - - Additionally the use of tag is simply to make this example - repeatable. - """ - compliance_policy = arch.compliance_policies.create( - CompliancePolicySince( - description="Maintenance should be performed every 10 seconds", - display_name="Regular Maintenance of Traffic light", - asset_filter=[ - ["attributes.arc_display_type=Traffic Light"], - ], - event_display_type=f"Maintenance Performed {tag}", - time_period_seconds=10, # very short so we can test - ) - ) - print("SINCE_POLICY:", json_dumps(compliance_policy, indent=4)) - return compliance_policy - - -def create_traffic_light(arch): - """ - Creates a traffic light. - - Note that arc_display_type siginfies a Traffic Light - """ - - traffic_light = arch.assets.create( - attrs={ - "arc_display_name": "Traffic light model 54", - "arc_description": "Traffic flow control light at A603 North East", - "arc_display_type": "Traffic Light", - }, - ) - print("TRAFFIC_LIGHT:", json_dumps(traffic_light, indent=4)) - return traffic_light - - -def perform_maintenance(arch, traffic_light, tag): - """ - Perform maintenance on traffic light - """ - maintenance_performed = arch.events.create( - traffic_light["identity"], - { - "operation": "Record", - "behaviour": "RecordEvidence", - }, - { - "arc_description": "Maintenance performed on traffic light", - "arc_display_type": f"Maintenance Performed {tag}", - }, - ) - print("MAINTENANCE_PERFORMED:", json_dumps(maintenance_performed, indent=4)) - - -def main(): - """ - Connect to archivist, create an asset, create a compliance policy - execute an event on the asset and check if the asset has expired - """ - # first get Archivist connection. - arch = get_archivist() - - tag = uuid4() # make this example repeatable - - # make a SINCE compliance policy that alerts when the - # maintenance performed event has expired. - compliance_policy = create_compliance_policy(arch, tag) - - # create an asset that matches the assets_filter field in the - # compliance policy. - traffic_light = create_traffic_light(arch) - - # perform maintenance on the asset which is valid for 10 seconds. - perform_maintenance(arch, traffic_light, tag) - - # and check compliance - should be OK. - print("Sleep 1 second...") - sleep(1) - compliance = arch.compliance.compliant_at( - traffic_light["identity"], - ) - print("COMPLIANCE (true):", json_dumps(compliance, indent=4)) - - # however waiting long enough (> 10s) will cause the asset to - # become non-compliant... - print("Sleep 15 seconds...") - sleep(15) - compliance = arch.compliance.compliant_at( - traffic_light["identity"], - ) - print("COMPLIANCE (false):", json_dumps(compliance, indent=4)) - - # finally delete the compliance_policy - arch.compliance_policies.delete( - compliance_policy["identity"], - ) - arch.close() - - -if __name__ == "__main__": - main() diff --git a/functests/execcompliance_policies.py b/functests/execcompliance_policies.py deleted file mode 100644 index 2137eac4..00000000 --- a/functests/execcompliance_policies.py +++ /dev/null @@ -1,486 +0,0 @@ -""" -Test compliance policies -""" - -from json import dumps as json_dumps -from os import getenv -from time import sleep -from uuid import uuid4 - -from archivist import logger -from archivist.archivist import Archivist -from archivist.compliance_policy_requests import ( - CompliancePolicyCurrentOutstanding, - CompliancePolicyDynamicTolerance, - CompliancePolicyPeriodOutstanding, - CompliancePolicyRichness, - CompliancePolicySince, -) -from archivist.compliance_policy_type import CompliancePolicyType -from archivist.utils import get_auth - -from .constants import ( - PARTNER_ID_VALUE, - USER_AGENT_VALUE, - TestCase, -) - -# pylint: disable=fixme -# pylint: disable=missing-docstring -# pylint: disable=unused-variable - -if getenv("DATATRAILS_LOGLEVEL") is not None: - logger.set_logger(getenv("DATATRAILS_LOGLEVEL")) - -LOGGER = logger.LOGGER - -# Ridiculaously short maintenance period for test purposes -SINCE_POLICY = CompliancePolicySince( - description="Maintenance should be performed every 10 seconds", - display_name="Regular Maintenance of Traffic light", - asset_filter=[ - ["attributes.arc_display_type=Traffic Light"], - ], - event_display_type="Maintenance Performed", - time_period_seconds=10, # very short so we can test -) - -CURRENT_OUTSTANDING_POLICY = CompliancePolicyCurrentOutstanding( - description="Maintenance should be performed every 10 seconds", - display_name="Regular Maintenance of Traffic light", - asset_filter=[ - ["attributes.arc_display_type=Traffic Light"], - ], - event_display_type="Maintenance Request", - closing_event_display_type="Maintenance Performed", -) - -PERIOD_OUTSTANDING_POLICY = CompliancePolicyPeriodOutstanding( - description="period_outstanding description", - display_name="period_outstanding display_name", - asset_filter=[ - ["attributes.radioactive=true"], - ], - event_display_type="period_outstanding event_display_type", - closing_event_display_type="period_outstanding closing_event_display_type", - time_period_seconds=10, -) -DYNAMIC_TOLERANCE_POLICY = CompliancePolicyDynamicTolerance( - description="dynamic_tolerance description", - display_name="dynamic_tolerance display_name", - asset_filter=[ - ["attributes.radioactive=true"], - ], - event_display_type="dynamic_tolerance event_display_type", - closing_event_display_type="dynamic_tolerance closing_event_display_type", - dynamic_window=86400, - dynamic_variability=0.5, -) -RICHNESS_POLICY = CompliancePolicyRichness( - description="richness description", - display_name="richness display_name", - asset_filter=[ - ["attributes.radioactive=true"], - ], - richness_assertions=[ - ["rad<7"], - ], -) - - -class TestCompliancePoliciesBase(TestCase): - """ - Test Archivist CompliancePolicies Create method - """ - - maxDiff = None - - def setUp(self): - auth = get_auth( - client_filename=getenv("DATATRAILS_APPREG_CLIENT_FILENAME"), - client_secret_filename=getenv("DATATRAILS_APPREG_SECRET_FILENAME"), - ) - self.arch = Archivist( - getenv("DATATRAILS_URL"), - auth, - partner_id=PARTNER_ID_VALUE, - ) - self.arch.user_agent = USER_AGENT_VALUE - self.identities = [] - - def tearDown(self): - if self.identities: - for identity in self.identities: - LOGGER.debug("Delete %s", identity) - self.arch.compliance_policies.delete(identity) - - self.arch.close() - - -class TestCompliancePolicies(TestCompliancePoliciesBase): - def test_compliancepolicies_create_since(self): - """ - Test compliance_policies creation - """ - compliance_policy = self.arch.compliance_policies.create( - SINCE_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - self.assertEqual( - compliance_policy["display_name"], - SINCE_POLICY.display_name, - msg="Incorrect display name", - ) - LOGGER.debug("SINCE_POLICY: %s", json_dumps(compliance_policy, indent=4)) - - def test_compliancepolicies_create_richness(self): - """ - Test compliance_policies creation - """ - compliance_policy = self.arch.compliance_policies.create( - RICHNESS_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - self.assertEqual( - compliance_policy["display_name"], - RICHNESS_POLICY.display_name, - msg="Incorrect display name", - ) - LOGGER.debug("RICHNESS_POLICY: %s", json_dumps(compliance_policy, indent=4)) - - def test_compliancepolicies_create_dynamic_tolerance(self): - """ - Test compliance_policies creation - """ - compliance_policy = self.arch.compliance_policies.create( - DYNAMIC_TOLERANCE_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - self.assertEqual( - compliance_policy["display_name"], - DYNAMIC_TOLERANCE_POLICY.display_name, - msg="Incorrect display name", - ) - LOGGER.debug( - "DYNAMIC_TOLERANCE_POLICY: %s", json_dumps(compliance_policy, indent=4) - ) - - def test_compliancepolicies_create_current_outstanding(self): - """ - Test compliance_policies creation - """ - compliance_policy = self.arch.compliance_policies.create( - CURRENT_OUTSTANDING_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - self.assertEqual( - compliance_policy["display_name"], - CURRENT_OUTSTANDING_POLICY.display_name, - msg="Incorrect display name", - ) - LOGGER.debug( - "CURRENT_OUTSTANDING_POLICY: %s", json_dumps(compliance_policy, indent=4) - ) - - def test_compliancepolicies_create_period_understanding(self): - """ - Test compliance_policies creation - """ - compliance_policy = self.arch.compliance_policies.create( - PERIOD_OUTSTANDING_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - self.assertEqual( - compliance_policy["display_name"], - PERIOD_OUTSTANDING_POLICY.display_name, - msg="Incorrect display name", - ) - LOGGER.debug( - "PERIOD_OUTSTANDING_POLICY: %s", json_dumps(compliance_policy, indent=4) - ) - - def test_compliance_policies_list(self): - """ - Test compliance_policy list - """ - compliance_policy = self.arch.compliance_policies.create( - SINCE_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - compliance_policy = self.arch.compliance_policies.create( - PERIOD_OUTSTANDING_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - compliance_policy = self.arch.compliance_policies.create( - CURRENT_OUTSTANDING_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - compliance_policy = self.arch.compliance_policies.create( - DYNAMIC_TOLERANCE_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - - compliance_policies = list(self.arch.compliance_policies.list()) - for i, compliance_policy in enumerate(compliance_policies): - LOGGER.debug("%d: %s", i, json_dumps(compliance_policy, indent=4)) - self.assertGreater( - len(compliance_policy["display_name"]), - 0, - msg="Incorrect display name", - ) - - def test_compliance_policies_count(self): - """ - Test compliance_policy count - """ - compliance_policy = self.arch.compliance_policies.create( - SINCE_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - compliance_policy = self.arch.compliance_policies.create( - PERIOD_OUTSTANDING_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - compliance_policy = self.arch.compliance_policies.create( - CURRENT_OUTSTANDING_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - compliance_policy = self.arch.compliance_policies.create( - DYNAMIC_TOLERANCE_POLICY, - ) - self.identities.append(compliance_policy["identity"]) - - count = self.arch.compliance_policies.count( - props={"compliance_type": CompliancePolicyType.COMPLIANCE_SINCE.name} - ) - LOGGER.debug("No. of 'SINCE' compliance policies: %d", count) - count = self.arch.compliance_policies.count( - props={"compliance_type": CompliancePolicyType.COMPLIANCE_RICHNESS.name} - ) - LOGGER.debug("No. of 'RICHNESS' compliance policies: %d", count) - count = self.arch.compliance_policies.count( - props={ - "compliance_type": CompliancePolicyType.COMPLIANCE_DYNAMIC_TOLERANCE.name - } - ) - LOGGER.debug("No. of 'DYNAMIC_TOLERANCE' compliance policies: %d", count) - count = self.arch.compliance_policies.count( - props={ - "compliance_type": CompliancePolicyType.COMPLIANCE_CURRENT_OUTSTANDING.name - } - ) - LOGGER.debug("No. of 'CURRENT_OUTSTANDING' compliance policies: %d", count) - count = self.arch.compliance_policies.count( - props={ - "compliance_type": CompliancePolicyType.COMPLIANCE_PERIOD_OUTSTANDING.name - } - ) - LOGGER.debug("No. of 'PERIOD_OUTSTANDING' compliance policies: %d", count) - - -TRAFFIC_LIGHT = { - "arc_display_name": "Traffic light model 54", - "arc_description": "Traffic flow control light at A603 North East", - "arc_display_type": "Traffic Light", -} - -PROPS = { - "operation": "Record", - "behaviour": "RecordEvidence", -} - - -class TestCompliancePoliciesCompliantAt(TestCompliancePoliciesBase): - def test_compliancepolicies_since(self): - """ - Test compliance_policies creation - """ - tag = uuid4() - compliance_policy = self.arch.compliance_policies.create( - CompliancePolicySince( - description="Maintenance should be performed every 10 seconds", - display_name="Regular Maintenance of Traffic light", - asset_filter=[ - ["attributes.arc_display_type=Traffic Light"], - ], - event_display_type=f"Maintenance Performed {tag}", - time_period_seconds=20, - ) - ) - LOGGER.debug("SINCE_POLICY: %s", json_dumps(compliance_policy, indent=4)) - self.identities.append(compliance_policy["identity"]) - - traffic_light = self.arch.assets.create( - attrs=TRAFFIC_LIGHT, - ) - LOGGER.debug("TRAFFIC_LIGHT: %s", json_dumps(traffic_light, indent=4)) - - maintenance_performed = self.arch.events.create( - traffic_light["identity"], - PROPS, - { - "arc_description": "Maintenance performed on traffic light", - "arc_display_type": f"Maintenance Performed {tag}", - }, - ) - LOGGER.debug( - "MAINTENANCE_PERFORMED: %s", json_dumps(maintenance_performed, indent=4) - ) - - LOGGER.debug("Sleep 10 seconds ...") - sleep(10) - compliance = self.arch.compliance.compliant_at( - traffic_light["identity"], - ) - LOGGER.debug("COMPLIANCE (true): %s", json_dumps(compliance, indent=4)) - - policy_statements = [ - c - for c in compliance["compliance"] - if c["compliance_policy_identity"] == compliance_policy["identity"] - ] - LOGGER.debug("COMPLIANCE (false): %s", json_dumps(policy_statements, indent=4)) - - self.assertEqual( - len(policy_statements), - 1, - msg="Only one policy statement is expected", - ) - - self.assertTrue( - policy_statements[0]["compliant"], - msg="Asset should be compliant", - ) - - LOGGER.debug( - "Sleep 15 seconds so that subsequent falls outside compliance_policy limits ..." - ) - sleep(15) - compliance = self.arch.compliance.compliant_at( - traffic_light["identity"], - ) - LOGGER.debug("COMPLIANCE (false): %s", json_dumps(compliance, indent=4)) - - policy_statements = [ - c - for c in compliance["compliance"] - if c["compliance_policy_identity"] == compliance_policy["identity"] - ] - LOGGER.debug("COMPLIANCE (false): %s", json_dumps(policy_statements, indent=4)) - - self.assertEqual( - len(policy_statements), - 1, - msg="Only one policy statement is expected", - ) - - self.assertFalse( - policy_statements[0]["compliant"], - msg=( - "Asset should not be compliant as it was maintained" - "after the compliance policy expired" - ), - ) - - def test_compliancepolicies_current_outstanding(self): - """ - Test compliance_policies creation - """ - tag = uuid4() - compliance_policy = self.arch.compliance_policies.create( - CompliancePolicyCurrentOutstanding( - description="Maintenance should be completed", - display_name="Regular Maintenance of Traffic light", - asset_filter=[ - ["attributes.arc_display_type=Traffic Light"], - ], - event_display_type=f"Maintenance Request {tag}", - closing_event_display_type=f"Maintenance Performed {tag}", - ), - ) - self.identities.append(compliance_policy["identity"]) - LOGGER.debug( - "CURRENT_OUTSTANDING_POLICY: %s", json_dumps(compliance_policy, indent=4) - ) - - traffic_light = self.arch.assets.create( - attrs=TRAFFIC_LIGHT, - ) - LOGGER.debug("TRAFFIC_LIGHT: %s", json_dumps(traffic_light, indent=4)) - - maintenance_request = self.arch.events.create( - traffic_light["identity"], - PROPS, - { - "arc_description": "Maintenance request on traffic light", - "arc_display_type": f"Maintenance Request {tag}", - "arc_correlation_value": str(tag), - }, - ) - LOGGER.debug( - "MAINTENANCE_REQUIRED: %s", json_dumps(maintenance_request, indent=4) - ) - - LOGGER.debug("Sleep 10 seconds ...") - sleep(10) - - compliance = self.arch.compliance.compliant_at( - traffic_light["identity"], - ) - LOGGER.debug("COMPLIANCE (true): %s", json_dumps(compliance, indent=4)) - - policy_statements = [ - c - for c in compliance["compliance"] - if c["compliance_policy_identity"] == compliance_policy["identity"] - ] - LOGGER.debug("COMPLIANCE (false): %s", json_dumps(policy_statements, indent=4)) - - self.assertEqual( - len(policy_statements), - 1, - msg="Only one policy statement is expected", - ) - - self.assertFalse( - policy_statements[0]["compliant"], - msg="Asset should not be compliant", - ) - - maintenance_performed = self.arch.events.create( - traffic_light["identity"], - PROPS, - { - "arc_description": "Maintenance performed on traffic light", - "arc_display_type": f"Maintenance Performed {tag}", - "arc_correlation_value": str(tag), - }, - ) - LOGGER.debug( - "MAINTENANCE_PERFORMED: %s", json_dumps(maintenance_performed, indent=4) - ) - - LOGGER.debug("Sleep 10 seconds ...") - sleep(10) - - compliance = self.arch.compliance.compliant_at( - traffic_light["identity"], - ) - LOGGER.debug("COMPLIANCE (true): %s", json_dumps(compliance, indent=4)) - - policy_statements = [ - c - for c in compliance["compliance"] - if c["compliance_policy_identity"] == compliance_policy["identity"] - ] - LOGGER.debug("COMPLIANCE (true): %s", json_dumps(policy_statements, indent=4)) - - self.assertEqual( - len(policy_statements), - 1, - msg="Only one policy statement is expected", - ) - self.assertTrue( - policy_statements[0]["compliant"], - msg="Asset should be compliant", - ) diff --git a/functests/execnotebooks.py b/functests/execnotebooks.py index 3a58765d..454503c7 100644 --- a/functests/execnotebooks.py +++ b/functests/execnotebooks.py @@ -161,105 +161,6 @@ def test_create_event_with_verified_domain(self): self.check_notebook_cell(notebook, 11) LOGGER.debug("=================================") - def test_check_asset_compliance_current_outstanding(self): - """ - Test check_asset_compliance_current_outstanding - """ - with testbook( - "archivist/notebooks/Check Asset Compliance using CURRENT OUTSTANDING Policy.ipynb", - execute=True, - ) as notebook: - LOGGER.debug("\ncheck_asset_compliance_current_outstanding") - self.basic_notebook_test(notebook) - self.check_notebook_cell(notebook, 10) - self.check_notebook_cell(notebook, 11) - self.check_notebook_cell(notebook, 12) - self.check_notebook_cell(notebook, 13) - self.check_notebook_cell(notebook, 14) - self.check_notebook_cell(notebook, 15) - self.check_notebook_cell(notebook, 16) - LOGGER.debug("=================================") - - def test_check_asset_compliance_since(self): - """ - Test check_asset_compliance_since - """ - with testbook( - "archivist/notebooks/Check Asset Compliance using SINCE Policy.ipynb", - execute=True, - ) as notebook: - LOGGER.debug("\ncheck_asset_compliance_since") - self.basic_notebook_test(notebook) - self.check_notebook_cell(notebook, 10) - self.check_notebook_cell(notebook, 11) - self.check_notebook_cell(notebook, 12) - self.check_notebook_cell(notebook, 13) - LOGGER.debug("=================================") - - def test_playing_fetch_fiveminutes(self): - """ - Test playing_fetch_fiveminutes - """ - with testbook( - "archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb", execute=True - ) as notebook: - LOGGER.debug("\nplaying_fetch_fiveminutes") - self.basic_notebook_test(notebook) - self.check_notebook_cell(notebook, 7) - LOGGER.debug("=================================") - - def test_feed_the_dog(self): - """ - Test feed_the_dog - """ - with testbook( - "archivist/notebooks/Feeding the Dog.ipynb", - execute=True, - ) as notebook: - LOGGER.debug("\nfeed_the_dog") - self.basic_notebook_test(notebook) - self.check_notebook_cell(notebook, 7) - LOGGER.debug("=================================") - - def test_feed_the_doghourly(self): - """ - Test feed_the_doghourly - """ - with testbook( - "archivist/notebooks/Feeding the Dog Hourly.ipynb", - execute=True, - ) as notebook: - LOGGER.debug("\ncreate_compliance_period_outstanding") - self.basic_notebook_test(notebook) - self.check_notebook_cell(notebook, 7) - LOGGER.debug("=================================") - - def test_check_dogs_weight(self): - """ - Test check_dogs_weight - """ - with testbook( - "archivist/notebooks/Checking the Dog's Weight.ipynb", - execute=True, - ) as notebook: - LOGGER.debug("\ncheck_dogs_weight") - self.basic_notebook_test(notebook) - self.check_notebook_cell(notebook, 7) - LOGGER.debug("=================================") - - def test_feed_dog_timelymanner(self): - """ - Test feed_dog_timelymanner - """ - with testbook( - "archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb", - execute=True, - ) as notebook: - LOGGER.debug("\nfeed_dog_timelymanner") - self.basic_notebook_test(notebook) - self.check_notebook_cell(notebook, 7) - LOGGER.debug("=================================") - @skip("Requires root access credentials - see #7742") def test_share_artist_asset_user(self): """ diff --git a/functests/execrunner.py b/functests/execrunner.py index 58611193..b74b5ff3 100644 --- a/functests/execrunner.py +++ b/functests/execrunner.py @@ -53,27 +53,6 @@ def setUp(self): def tearDown(self): self.arch.close() - def test_runner_dynamic_tolerance(self): - """ - Test runner with dynamic tolerance story - - uses DATATRAILS_UNIQUE_ID to set namespace value - - run_steps is used so that exceptions are shown - """ - LOGGER.debug("...") - with open( - "functests/test_resources/dynamic_tolerance_story.yaml", - "r", - encoding="utf-8", - ) as y: - self.arch.runner.run_steps(parse_config(data=y)) - self.assertEqual( - len(self.arch.runner.entities), - 1, - msg="Incorrect number of entities", - ) - def test_runner_synsation(self): """ Test runner with synsation story @@ -115,28 +94,6 @@ def test_runner_synsation(self): msg="Incorrect number of entities", ) - def test_runner_richness(self): - """ - Test runner with richness story - - uses DATATRAILS_UNIQUE_ID to set namespace value - - run_steps is used so that exceptions are shown - """ - - LOGGER.debug("...") - with open( - "functests/test_resources/richness_story.yaml", - "r", - encoding="utf-8", - ) as y: - self.arch.runner.run_steps(parse_config(data=y)) - self.assertEqual( - len(self.arch.runner.entities), - 3, - msg="Incorrect number of entities", - ) - def test_runner_door_entry(self): """ Test runner with door_entry story diff --git a/functests/test_resources/dynamic_tolerance_story.yaml b/functests/test_resources/dynamic_tolerance_story.yaml deleted file mode 100644 index 0a063c1f..00000000 --- a/functests/test_resources/dynamic_tolerance_story.yaml +++ /dev/null @@ -1,146 +0,0 @@ ---- -# Demonstration of applying a Dynamic Tolerance compliance policy to an asset that undergoes -# events that may or may not make the asset compliant or non-compliant. -# -# The step field is a string that represents the method bound to an endpoint. -# -# NB the assets and events endpoints require all values to be strings. Other values may -# be of the correct type such as confirm which is a boolean. -# -# For example attributes.ev_pump is a string - internally archivist will see this -# as a boolean. -steps: - - step: - action: ASSETS_CREATE - description: Create new EV Pump with id 1. - asset_label: ev pump 1 - behaviours: - - RecordEvidence - attributes: - arc_display_name: ev pump 1 - arc_namespace: !ENV ${DATATRAILS_UNIQUE_ID:namespace} - ev_pump: "true" - - # create some policies - - step: - action: COMPLIANCE_POLICIES_CREATE - description: Create a compliance policy that checks an EV pump maintenance requests are serviced within a reasonable time frame. - LOGGER.debug_response: true - delete: true - description: ev maintenance policy - display_name: ev maintenance policy - compliance_type: COMPLIANCE_DYNAMIC_TOLERANCE - asset_filter: - - or: [ "attributes.ev_pump=true" ] - event_display_type: Maintenance Requested - closing_event_display_type: Maintenance Performed - dynamic_window: 700 - dynamic_variability: 1.5 - - # setup the ev pump to have maintenance requests and servicings - - step: - action: EVENTS_CREATE - description: Create Event requesting EV pump 1 needs maintenance. - asset_label: ev pump 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_correlation_value: EV Maintenance 1 - arc_description: request maintenance - arc_display_type: Maintenance Requested - - - step: - action: EVENTS_CREATE - description: Create Event after 1 seconds, for EV pump 1, stating maintenance occurred. - wait_time: 1 - asset_label: ev pump 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_correlation_value: EV Maintenance 1 - arc_description: perform maintenance - arc_display_type: Maintenance Performed - - - step: - action: EVENTS_CREATE - description: Create Event requesting EV pump 1 needs maintenance. - asset_label: ev pump 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_correlation_value: EV Maintenance 2 - arc_description: request maintenance - arc_display_type: Maintenance Requested - - - step: - action: EVENTS_CREATE - description: Create Event after 2 seconds, for EV pump 1, stating maintenance occurred. - wait_time: 2 - asset_label: ev pump 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_correlation_value: EV Maintenance 2 - arc_description: perform maintenance - arc_display_type: Maintenance Performed - - - step: - action: EVENTS_CREATE - description: Create Event requesting EV pump 1 needs maintenance. - asset_label: ev pump 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_correlation_value: EV Maintenance 3 - arc_description: request maintenance - arc_display_type: Maintenance Requested - - - step: - action: EVENTS_CREATE - description: Create Event after 3 seconds, for EV pump 1, stating maintenance occurred. - wait_time: 3 - asset_label: ev pump 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_correlation_value: EV Maintenance 3 - arc_description: perform maintenance - arc_display_type: Maintenance Performed - - # check compliance for ev pump - - step: - action: COMPLIANCE_COMPLIANT_AT - description: Check Compliance of EV pump 1. - asset_label: ev pump 1 - - # now create an event that throws the Standard deviation out of whack - - step: - action: EVENTS_CREATE - description: Create Event requesting EV pump 1 needs maintenance. - asset_label: ev pump 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_correlation_value: EV Maintenance 4 - arc_description: request maintenance - arc_display_type: Maintenance Requested - - - step: - action: EVENTS_CREATE - description: Create Event after 20 seconds, for EV pump 1, stating maintenance occurred. - wait_time: 20 - asset_label: ev pump 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_correlation_value: EV Maintenance 4 - arc_description: perform maintenance - arc_display_type: Maintenance Performed - - # check compliance for ev pump - - step: - action: COMPLIANCE_COMPLIANT_AT - description: Check Compliance of EV pump 1. - asset_label: ev pump 1 - report: true - diff --git a/functests/test_resources/richness_story.yaml b/functests/test_resources/richness_story.yaml deleted file mode 100644 index 391752e2..00000000 --- a/functests/test_resources/richness_story.yaml +++ /dev/null @@ -1,152 +0,0 @@ ---- -# Demonstration of applying a Richness compliance policy to an asset that undergoes -# events that may or may not make the asset compliant or non-compliant. -# -# The step field is a string that represents the method bound to an endpoint. -# -# NB the assets and events endpoints require all values to be strings. Other values may -# be of the correct type such as confirm which is a boolean. -steps: - - step: - action: ASSETS_CREATE - description: Create an empty radiation bag with id 1. - asset_label: radiation bag 1 - behaviours: - - RecordEvidence - attributes: - arc_display_name: radiation bag 1 - arc_namespace: !ENV ${DATATRAILS_UNIQUE_ID:namespace} - radioactive: "true" - radiation_level: "0" - weight: "0" - - - step: - action: ASSETS_CREATE - description: Create an empty radiation bag with id 2. - asset_label: radiation bag 2 - behaviours: - - RecordEvidence - attributes: - arc_display_name: radiation bag 2 - arc_namespace: !ENV ${DATATRAILS_UNIQUE_ID:namespace} - radioactive: "true" - radiation_level: "0" - weight: "0" - - - step: - action: ASSETS_CREATE - description: Create an empty radiation bag with id 3. - asset_label: radiation bag 3 - behaviours: - - RecordEvidence - attributes: - arc_display_name: radiation bag 3 - arc_namespace: !ENV ${DATATRAILS_UNIQUE_ID:namespace} - radioactive: "true" - radiation_level: "0" - weight: "0" - - # create some policies - - step: - action: COMPLIANCE_POLICIES_CREATE - description: Create a compliance policy that checks the radiation level of radiation bags is less than 7 rads. - LOGGER.debug_response: true - delete: true - description: radiation level safety policy - display_name: radiation safety policy - compliance_type: COMPLIANCE_RICHNESS - asset_filter: - - or: [ "attributes.radioactive=true" ] - richness_assertions: - - or: [ "radiation_level<7" ] - - - - step: - action: COMPLIANCE_POLICIES_CREATE - description: Create a compliance policy that checks the weight of a radiation bag is less than or equal to 10kg. - LOGGER.debug_response: true - delete: true - description: weight level safety policy - display_name: weight safety policy - compliance_type: COMPLIANCE_RICHNESS - asset_filter: - - or: [ "attributes.radioactive=true" ] - richness_assertions: - - or: [ "weight<=10" ] - - # setup the radiation bags to have a varing amount of radiactive waste - # note the values to the events.create method are string representations of boolean - # and numbers - - step: - action: EVENTS_CREATE - description: Create Event adding 3 rads of radiation to bag 1, increasing its weight by 1kg. - asset_label: radiation bag 1 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_description: add waste to bag - asset_attributes: - radiation_level: "3" - weight: "1" - - - step: - action: EVENTS_CREATE - description: Create Event adding 2 rads of radiation to bag 2, increasing its weight by 5kg. - asset_label: radiation bag 2 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_description: add waste to bag - asset_attributes: - radiation_level: "2" - weight: "5" - - - step: - action: EVENTS_CREATE - description: Create Event adding 5 rads of radiation to bag 3, increasing its weight by 7kg. - asset_label: radiation bag 3 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_description: add waste to bag - asset_attributes: - radiation_level: "5" - weight: "7" - - # check compliance for all 3 bags - - step: - action: COMPLIANCE_COMPLIANT_AT - description: Check Compliance of bag 1. - LOGGER.debug_response: True - asset_label: radiation bag 1 - - - step: - action: COMPLIANCE_COMPLIANT_AT - description: Check Compliance of bag 2. - asset_label: radiation bag 2 - - - step: - action: COMPLIANCE_COMPLIANT_AT - description: Check Compliance of bag 3. - asset_label: radiation bag 3 - - # now attempt to add waste to tip one over the edge - - step: - action: EVENTS_CREATE - description: Now Create Event adding 4 rads of radiation to bag 3 increasing its weight by 1kg. - This brings the total radiation level to 9 rads and weight to 8kg. - asset_label: radiation bag 3 - operation: Record - behaviour: RecordEvidence - event_attributes: - arc_description: add waste to bag - asset_attributes: - radiation_level: "9" - weight: "8" - - # check the compliance - - step: - action: COMPLIANCE_COMPLIANT_AT - description: Check Compliance of bag 3. - asset_label: radiation bag 3 - report: true diff --git a/scripts/unittests.sh b/scripts/unittests.sh index 2c3f776a..70f01de0 100755 --- a/scripts/unittests.sh +++ b/scripts/unittests.sh @@ -7,13 +7,13 @@ python3 --version # run single test from cmdline for example: # # all tests in a module (name of file in unittest directory): -# UNITTEST=testcompliance_policy_type task unittests +# UNITTEST=testRunnerstep task unittests # # all tests in a class -# UNITTEST=testcompliance_policy_type.TestCompliancePolicyType task unittests +# UNITTEST=testrunnerstep.TestRunnerStep task unittests # # single test -# UNITTEST=testcompliance_policy_type.TestCompliancePolicyType.test_compliance_policy_type task unittests +# UNITTEST=testrunnerstep.TestRunnerStep.test_runner_step task unittests # export DATATRAILS_ARTIST_ATTACHMENT=archivist/notebooks/test_files/pexels-andrea-turner-707697.jpeg export DATATRAILS_UNIQUE_ID=${SRANDOM} diff --git a/unittests/testarchivist.py b/unittests/testarchivist.py index 884f4962..ddc7eeea 100644 --- a/unittests/testarchivist.py +++ b/unittests/testarchivist.py @@ -105,16 +105,6 @@ def test_archivist(self): "AttachmentsClient(https://app.datatrails.ai)", msg="Incorrect attachments", ) - self.assertEqual( - str(arch.compliance), - "ComplianceClient(https://app.datatrails.ai)", - msg="Incorrect compliance", - ) - self.assertEqual( - str(arch.compliance_policies), - "CompliancePoliciesClient(https://app.datatrails.ai)", - msg="Incorrect compliance_policies", - ) self.assertEqual( str(arch.events), "EventsRestricted(https://app.datatrails.ai)", diff --git a/unittests/testcompliance.py b/unittests/testcompliance.py deleted file mode 100644 index 67263ab3..00000000 --- a/unittests/testcompliance.py +++ /dev/null @@ -1,169 +0,0 @@ -""" -Test compliance -""" - -from logging import getLogger -from os import environ -from unittest import TestCase, mock - -from archivist.about import __version__ as VERSION -from archivist.archivist import Archivist -from archivist.constants import ( - COMPLIANCE_LABEL, - COMPLIANCE_POLICIES_LABEL, - COMPLIANCE_SUBPATH, - ROOT, - USER_AGENT, - USER_AGENT_PREFIX, -) -from archivist.logger import set_logger - -from .mock_response import MockResponse - -# pylint: disable=missing-docstring -# pylint: disable=protected-access -# pylint: disable=unused-variable - -IDENTITY = f"{COMPLIANCE_POLICIES_LABEL}/0000-0000-000000000-00000000" -SUBPATH = f"{COMPLIANCE_SUBPATH}/{COMPLIANCE_LABEL}" -ASSET_ID = "assets/0000-0000-000000000-00000000" - -POLICY_RESPONSE = { - "compliance_policy_identity": IDENTITY, - "compliant": False, - "reason": "reason", -} -POLICY_RESPONSE2 = { - "compliance_policy_identity": IDENTITY, - "compliant": True, - "reason": "", -} -RESPONSE = { - "compliance": [ - POLICY_RESPONSE, - POLICY_RESPONSE2, - ], - "compliant": False, - "compliant_at": "2019-11-27T14:44:19Z", -} - -POLICY = { - "identity": IDENTITY, - "description": "policy description", - "display_name": "policy display_name", - "asset_filter": [ - ["a", "b"], - ["x", "z"], - ], - "event_display_type": "policy event_display_type", - "time_period_seconds": 10, -} - -if "DATATRAILS_LOGLEVEL" in environ and environ["DATATRAILS_LOGLEVEL"]: - set_logger(environ["DATATRAILS_LOGLEVEL"]) - -LOGGER = getLogger(__name__) - - -class TestCompliance(TestCase): - """ - Test Archivist Compliance - """ - - maxDiff = None - - def setUp(self): - self.arch = Archivist("url", "authauthauth") - - def tearDown(self): - self.arch.close() - - def test_compliance_str(self): - """ - Test compliance str - """ - self.assertEqual( - str(self.arch.compliance), - "ComplianceClient(url)", - msg="Incorrect str", - ) - - def test_compliance_report(self): - """ - Test compliance - """ - with mock.patch.object(self.arch.compliance_policies, "read") as mock_read: - mock_read.return_value = MockResponse(200, **POLICY) - self.arch.compliance.compliant_at_report(RESPONSE) - mock_read.assert_called_once_with(IDENTITY) - - def test_compliance(self): - """ - Test compliance - """ - with mock.patch.object(self.arch.session, "get") as mock_get: - mock_response = MockResponse( - 200, - **RESPONSE, - ) - mock_get.return_value = mock_response - - response = self.arch.compliance.compliant_at( - ASSET_ID, - ) - self.assertEqual( - len(response["compliance"]), - 2, - msg="incorrect number of compliances", - ) - self.assertEqual( - response["compliant"], - False, - msg="Incorrect compliant", - ) - self.assertEqual( - response["compliant_at"], - "2019-11-27T14:44:19Z", - msg="Incorrect compliant_at", - ) - for a in mock_get.call_args_list: - self.assertEqual( - tuple(a), - ( - (f"url/{ROOT}/{SUBPATH}/{ASSET_ID}",), - { - "headers": { - "authorization": "Bearer authauthauth", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - "params": None, - }, - ), - msg="GET method called incorrectly", - ) - - def test_compliance_with_report(self): - """ - Test compliance - """ - with ( - mock.patch.object(self.arch.session, "get") as mock_get, - mock.patch.object(self.arch.compliance_policies, "read") as mock_read, - ): - mock_read.return_value = MockResponse(200, **POLICY) - mock_response = MockResponse( - 200, - **RESPONSE, - ) - mock_get.return_value = mock_response - - response = self.arch.compliance.compliant_at( - ASSET_ID, - report=True, - ) - self.assertEqual( - len(response["compliance"]), - 2, - msg="incorrect number of compliances", - ) - mock_read.assert_called_once_with(IDENTITY) diff --git a/unittests/testcompliance_policies.py b/unittests/testcompliance_policies.py deleted file mode 100644 index 7a08c9b5..00000000 --- a/unittests/testcompliance_policies.py +++ /dev/null @@ -1,379 +0,0 @@ -""" -Test compliance policies -""" - -from unittest import TestCase, mock - -from archivist.about import __version__ as VERSION -from archivist.archivist import Archivist -from archivist.compliance_policies import ( - CompliancePolicy, -) -from archivist.compliance_policy_requests import ( - CompliancePolicySince, -) -from archivist.constants import ( - COMPLIANCE_POLICIES_LABEL, - COMPLIANCE_POLICIES_SUBPATH, - HEADERS_REQUEST_TOTAL_COUNT, - HEADERS_TOTAL_COUNT, - ROOT, - USER_AGENT, - USER_AGENT_PREFIX, -) -from archivist.errors import ArchivistBadRequestError - -from .mock_response import MockResponse - -# pylint: disable=missing-docstring -# pylint: disable=protected-access -# pylint: disable=unused-variable - -SINCE_POLICY = CompliancePolicySince( - description="since description", - display_name="since display_name", - asset_filter=[ - ["a", "b"], - ["x", "z"], - ], - event_display_type="since event_display_type", - time_period_seconds=10, -) -SINCE_POLICY_REQUEST = SINCE_POLICY.dict() - -IDENTITY = f"{COMPLIANCE_POLICIES_LABEL}/xxxxxxxx" -SUBPATH = f"{COMPLIANCE_POLICIES_SUBPATH}/{COMPLIANCE_POLICIES_LABEL}" - -SINCE_RESPONSE = { - **SINCE_POLICY_REQUEST, - "identity": IDENTITY, - "compliance_type": "SINCE", -} -SINCE_REQUEST = { - **SINCE_POLICY_REQUEST, -} - - -class TestCompliancePolicy(TestCase): - """ - Test Archivist CompliancePolicy - """ - - maxDiff = None - - def test_compliance_policy(self): - """ - Test compliance_policy - """ - self.assertEqual( - CompliancePolicy(**SINCE_RESPONSE).name, - "since display_name", - msg="Incorrct name property", - ) - - def test_compliance_policy_without_name(self): - """ - Test compliance_policy - """ - self.assertIsNone( - CompliancePolicy(**{"description": "descriptton"}).name, - msg="Incorrct name property", - ) - - -class TestCompliancePolicies(TestCase): - """ - Test Archivist CompliancePolicies Create method - """ - - maxDiff = None - - def setUp(self): - self.arch = Archivist("url", "authauthauth") - - def tearDown(self): - self.arch.close() - - def test_compliance_policies_str(self): - """ - Test compliance_policy str - """ - self.assertEqual( - str(self.arch.compliance_policies), - "CompliancePoliciesClient(url)", - msg="Incorrect str", - ) - - def test_compliance_policies_create(self): - """ - Test compliance_policy creation - """ - with mock.patch.object(self.arch.session, "post") as mock_post: - mock_post.return_value = MockResponse(200, **SINCE_RESPONSE) - - compliance_policy = self.arch.compliance_policies.create( - SINCE_POLICY, - ) - args, kwargs = mock_post.call_args - self.assertEqual( - args, - (f"url/{ROOT}/{SUBPATH}",), - msg="CREATE method args called incorrectly", - ) - self.assertEqual( - kwargs, - { - "json": SINCE_REQUEST, - "headers": { - "authorization": "Bearer authauthauth", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - }, - msg="CREATE method kwargs called incorrectly", - ) - self.assertEqual( - compliance_policy, - SINCE_RESPONSE, - msg="CREATE method called incorrectly", - ) - - def test_compliance_policies_read(self): - """ - Test compliance_policy reading - """ - with mock.patch.object(self.arch.session, "get") as mock_get: - mock_get.return_value = MockResponse(200, **SINCE_RESPONSE) - - self.arch.compliance_policies.read(IDENTITY) - self.assertEqual( - tuple(mock_get.call_args), - ( - ((f"url/{ROOT}/{COMPLIANCE_POLICIES_SUBPATH}/{IDENTITY}"),), - { - "headers": { - "authorization": "Bearer authauthauth", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - "params": None, - }, - ), - msg="GET method called incorrectly", - ) - - def test_compliance_policies_delete(self): - """ - Test compliance_policy deleting - """ - with mock.patch.object(self.arch.session, "delete") as mock_delete: - mock_delete.return_value = MockResponse(200, {}) - - self.arch.compliance_policies.delete(IDENTITY) - self.assertEqual( - tuple(mock_delete.call_args), - ( - ((f"url/{ROOT}/{COMPLIANCE_POLICIES_SUBPATH}/{IDENTITY}"),), - { - "headers": { - "authorization": "Bearer authauthauth", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - }, - ), - msg="DELETE method called incorrectly", - ) - - def test_compliance_policies_read_with_error(self): - """ - Test read method with error - """ - with mock.patch.object(self.arch.session, "get") as mock_get: - mock_get.return_value = MockResponse(400) - with self.assertRaises(ArchivistBadRequestError): - self.arch.compliance_policies.read(IDENTITY) - - def test_compliance_policies_count(self): - """ - Test compliance_policy counting - """ - with mock.patch.object(self.arch.session, "get") as mock_get: - mock_get.return_value = MockResponse( - 200, - headers={HEADERS_TOTAL_COUNT: 1}, - compliance_policies=[ - SINCE_RESPONSE, - ], - ) - - count = self.arch.compliance_policies.count() - self.assertEqual( - tuple(mock_get.call_args), - ( - ((f"url/{ROOT}/{SUBPATH}"),), - { - "headers": { - "authorization": "Bearer authauthauth", - HEADERS_REQUEST_TOTAL_COUNT: "true", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - "params": {"page_size": 1}, - }, - ), - msg="GET method called incorrectly", - ) - self.assertEqual( - count, - 1, - msg="Incorrect count", - ) - - def test_compliance_policies_count_by_name(self): - """ - Test compliance_policy counting - """ - with mock.patch.object(self.arch.session, "get") as mock_get: - mock_get.return_value = MockResponse( - 200, - headers={HEADERS_TOTAL_COUNT: 1}, - compliance_policies=[ - SINCE_RESPONSE, - ], - ) - - self.arch.compliance_policies.count( - props={"compliance_type": "SINCE"}, - ) - self.assertEqual( - tuple(mock_get.call_args), - ( - ((f"url/{ROOT}/{SUBPATH}"),), - { - "headers": { - "authorization": "Bearer authauthauth", - HEADERS_REQUEST_TOTAL_COUNT: "true", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - "params": {"page_size": 1, "compliance_type": "SINCE"}, - }, - ), - msg="GET method called incorrectly", - ) - - def test_compliance_policies_list(self): - """ - Test compliance_policy listing - """ - with mock.patch.object(self.arch.session, "get") as mock_get: - mock_get.return_value = MockResponse( - 200, - compliance_policies=[ - SINCE_RESPONSE, - ], - ) - - compliance_policies = list(self.arch.compliance_policies.list()) - self.assertEqual( - len(compliance_policies), - 1, - msg="incorrect number of compliance_policies", - ) - for compliance_policy in compliance_policies: - self.assertEqual( - compliance_policy, - SINCE_RESPONSE, - msg="Incorrect compliance_policy listed", - ) - - for a in mock_get.call_args_list: - self.assertEqual( - tuple(a), - ( - (f"url/{ROOT}/{SUBPATH}",), - { - "headers": { - "authorization": "Bearer authauthauth", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - "params": {}, - }, - ), - msg="GET method called incorrectly", - ) - - def test_compliance_policies_list_by_name(self): - """ - Test compliance_policy listing - """ - with mock.patch.object(self.arch.session, "get") as mock_get: - mock_get.return_value = MockResponse( - 200, - compliance_policies=[ - SINCE_RESPONSE, - ], - ) - - compliance_policies = list( - self.arch.compliance_policies.list( - props={"compliance_type": "SINCE"}, - ) - ) - self.assertEqual( - len(compliance_policies), - 1, - msg="incorrect number of compliance_policies", - ) - for compliance_policy in compliance_policies: - self.assertEqual( - compliance_policy, - SINCE_RESPONSE, - msg="Incorrect compliance_policy listed", - ) - - for a in mock_get.call_args_list: - self.assertEqual( - tuple(a), - ( - ((f"url/{ROOT}/{SUBPATH}"),), - { - "headers": { - "authorization": "Bearer authauthauth", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - "params": {"compliance_type": "SINCE"}, - }, - ), - msg="GET method called incorrectly", - ) - - def test_compliance_policies_read_by_signature(self): - """ - Test compliance policies read_by_signature - """ - with mock.patch.object(self.arch.session, "get") as mock_get: - mock_get.return_value = MockResponse( - 200, - compliance_policies=[ - SINCE_RESPONSE, - ], - ) - - policy = self.arch.compliance_policies.read_by_signature() - self.assertEqual( - policy, - SINCE_RESPONSE, - msg="Incorrect compliance_policy listed", - ) - - self.assertEqual( - tuple(mock_get.call_args), - ( - (f"url/{ROOT}/{SUBPATH}",), - { - "headers": { - "authorization": "Bearer authauthauth", - USER_AGENT: f"{USER_AGENT_PREFIX}{VERSION}", - }, - "params": {"page_size": 2}, - }, - ), - msg="GET method called incorrectly", - ) diff --git a/unittests/testcompliance_policy_request.py b/unittests/testcompliance_policy_request.py deleted file mode 100644 index 273b824a..00000000 --- a/unittests/testcompliance_policy_request.py +++ /dev/null @@ -1,171 +0,0 @@ -""" -Test compliance policy request -""" - -# pylint: disable=missing-docstring -# pylint: disable=too-few-public-methods - -from unittest import TestCase - -from archivist import compliance_policy_requests - - -class TestCompliancePolicyRequests(TestCase): - """ - Test CompliancePolicyRequest - """ - - def test_compliance_policy_since(self): - """ - Test CompliancePolicySince - """ - self.assertEqual( - compliance_policy_requests.CompliancePolicySince( - description="since description", - display_name="since display_name", - asset_filter=[ - ["a", "b"], - ["x", "z"], - ], - event_display_type="since event_display_type", - time_period_seconds=10, - ).dict(), - { - "compliance_type": "COMPLIANCE_SINCE", - "description": "since description", - "display_name": "since display_name", - "asset_filter": [ - {"or": ["a", "b"]}, - {"or": ["x", "z"]}, - ], - "event_display_type": "since event_display_type", - "time_period_seconds": 10, - }, - msg="Incorrect dictionary", - ) - - def test_compliance_policy_current_outstanding(self): - """ - Test CompliancePolicyCurrentOutstanding - """ - self.assertEqual( - compliance_policy_requests.CompliancePolicyCurrentOutstanding( - description="current_outstanding description", - display_name="current_outstanding display_name", - asset_filter=[ - ["a", "b"], - ["x", "z"], - ], - event_display_type="current_outstanding event_display_type", - closing_event_display_type="current_outstanding closing_event_display_type", - ).dict(), - { - "compliance_type": "COMPLIANCE_CURRENT_OUTSTANDING", - "description": "current_outstanding description", - "display_name": "current_outstanding display_name", - "asset_filter": [ - {"or": ["a", "b"]}, - {"or": ["x", "z"]}, - ], - "event_display_type": "current_outstanding event_display_type", - "closing_event_display_type": "current_outstanding closing_event_display_type", - }, - msg="Incorrect dictionary", - ) - - def test_compliance_policy_period_outstanding(self): - """ - Test CompliancePolicyequestPeriodOutstanding - """ - self.assertEqual( - compliance_policy_requests.CompliancePolicyPeriodOutstanding( - description="period_outstanding description", - display_name="period_outstanding display_name", - asset_filter=[ - ["a", "b"], - ["x", "z"], - ], - event_display_type="period_outstanding event_display_type", - closing_event_display_type="period_outstanding closing_event_display_type", - time_period_seconds=10, - ).dict(), - { - "compliance_type": "COMPLIANCE_PERIOD_OUTSTANDING", - "description": "period_outstanding description", - "display_name": "period_outstanding display_name", - "asset_filter": [ - {"or": ["a", "b"]}, - {"or": ["x", "z"]}, - ], - "event_display_type": "period_outstanding event_display_type", - "closing_event_display_type": "period_outstanding closing_event_display_type", - "time_period_seconds": 10, - }, - msg="Incorrect dictionary", - ) - - def test_compliance_policy_dynamic_tolerance(self): - """ - Test CompliancePolicyDynamicTolerance - """ - self.assertEqual( - compliance_policy_requests.CompliancePolicyDynamicTolerance( - description="dynamic_tolerance description", - display_name="dynamic_tolerance display_name", - asset_filter=[ - ["a", "b"], - ["x", "z"], - ], - event_display_type="dynamic_tolerance event_display_type", - closing_event_display_type="dynamic_tolerance closing_event_display_type", - dynamic_window=86400, - dynamic_variability=0.5, - ).dict(), - { - "compliance_type": "COMPLIANCE_DYNAMIC_TOLERANCE", - "description": "dynamic_tolerance description", - "display_name": "dynamic_tolerance display_name", - "asset_filter": [ - {"or": ["a", "b"]}, - {"or": ["x", "z"]}, - ], - "event_display_type": "dynamic_tolerance event_display_type", - "closing_event_display_type": "dynamic_tolerance closing_event_display_type", - "dynamic_window": 86400, - "dynamic_variability": 0.5, - }, - msg="Incorrect dictionary", - ) - - def test_compliance_policy_richness(self): - """ - Test CompliancePolicyRichness - """ - self.assertEqual( - compliance_policy_requests.CompliancePolicyRichness( - description="richness description", - display_name="richness display_name", - asset_filter=[ - ["a", "b"], - ["x", "z"], - ], - richness_assertions=[ - ["rad<7", "weight>5"], - ["rad>1", "weight<10"], - ], - ).dict(), - { - "compliance_type": "COMPLIANCE_RICHNESS", - "description": "richness description", - "display_name": "richness display_name", - "asset_filter": [ - {"or": ["a", "b"]}, - {"or": ["x", "z"]}, - ], - "richness_assertions": [ - {"or": ["rad<7", "weight>5"]}, - {"or": ["rad>1", "weight<10"]}, - ], - }, - msg="Incorrect dictionary", - ) diff --git a/unittests/testcompliance_policy_type.py b/unittests/testcompliance_policy_type.py deleted file mode 100644 index 1faa93cd..00000000 --- a/unittests/testcompliance_policy_type.py +++ /dev/null @@ -1,71 +0,0 @@ -""" -Test compliance policy type -""" - -# pylint: disable=attribute-defined-outside-init -# pylint: disable=too-few-public-methods - -from unittest import TestCase - -from archivist.compliance_policy_type import CompliancePolicyType - - -class TestCompliancePolicyType(TestCase): - """ - Test compliance policy type - """ - - def test_compliance_policy_type(self): - """ - Test compliance policy type - """ - self.assertEqual( - CompliancePolicyType.COMPLIANCE_SINCE.value, 1, msg="Incorrect value" - ) - self.assertEqual( - CompliancePolicyType.COMPLIANCE_SINCE.name, - "COMPLIANCE_SINCE", - msg="Incorrect name", - ) - - self.assertEqual( - CompliancePolicyType.COMPLIANCE_CURRENT_OUTSTANDING.value, - 2, - msg="Incorrect value", - ) - self.assertEqual( - CompliancePolicyType.COMPLIANCE_CURRENT_OUTSTANDING.name, - "COMPLIANCE_CURRENT_OUTSTANDING", - msg="Incorrect name", - ) - - self.assertEqual( - CompliancePolicyType.COMPLIANCE_PERIOD_OUTSTANDING.value, - 3, - msg="Incorrect value", - ) - self.assertEqual( - CompliancePolicyType.COMPLIANCE_PERIOD_OUTSTANDING.name, - "COMPLIANCE_PERIOD_OUTSTANDING", - msg="Incorrect name", - ) - - self.assertEqual( - CompliancePolicyType.COMPLIANCE_DYNAMIC_TOLERANCE.value, - 4, - msg="Incorrect value", - ) - self.assertEqual( - CompliancePolicyType.COMPLIANCE_DYNAMIC_TOLERANCE.name, - "COMPLIANCE_DYNAMIC_TOLERANCE", - msg="Incorrect name", - ) - - self.assertEqual( - CompliancePolicyType.COMPLIANCE_RICHNESS.value, 5, msg="Incorrect value" - ) - self.assertEqual( - CompliancePolicyType.COMPLIANCE_RICHNESS.name, - "COMPLIANCE_RICHNESS", - msg="Incorrect name", - ) diff --git a/unittests/testnotebooks.py b/unittests/testnotebooks.py index 6ae6ee8a..6866eb21 100644 --- a/unittests/testnotebooks.py +++ b/unittests/testnotebooks.py @@ -185,26 +185,6 @@ def test_create_event_with_verified_domain(self): ) as notebook: self.basic_notebook_test(notebook) - def test_check_asset_compliance_current_outstanding(self): - """ - Test check_asset_compliance_current_outstanding - """ - with testbook( - "archivist/notebooks/Check Asset Compliance using CURRENT OUTSTANDING Policy.ipynb", - execute=range(1, 6), - ) as notebook: - self.basic_notebook_test(notebook) - - def test_check_asset_compliance_since(self): - """ - Test check_asset_compliance_since - """ - with testbook( - "archivist/notebooks/Check Asset Compliance using SINCE Policy.ipynb", - execute=range(1, 6), - ) as notebook: - self.basic_notebook_test(notebook) - def test_find_artist_cover_art(self): """ Test find_artist_cover_art @@ -225,56 +205,6 @@ def test_find_asset_addtl_albuminfo(self): ) as notebook: self.basic_notebook_test(notebook) - def test_playing_fetch_fiveminutes(self): - """ - Test playing_fetch_fiveminutes - """ - with testbook( - "archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb", - execute=range(1, 6), - ) as notebook: - self.basic_notebook_test(notebook) - - def test_feed_the_dog(self): - """ - Test feed_the_dog - """ - with testbook( - "archivist/notebooks/Feeding the Dog.ipynb", - execute=range(1, 6), - ) as notebook: - self.basic_notebook_test(notebook) - - def test_feed_the_doghourly(self): - """ - Test feed_the_doghourly - """ - with testbook( - "archivist/notebooks/Feeding the Dog Hourly.ipynb", - execute=range(1, 6), - ) as notebook: - self.basic_notebook_test(notebook) - - def test_checking_dogs_weight(self): - """ - Test checking_dogs_weight - """ - with testbook( - "archivist/notebooks/Checking the Dog's Weight.ipynb", - execute=range(1, 6), - ) as notebook: - self.basic_notebook_test(notebook) - - def test_feed_dog_timelymanner(self): - """ - Test feed_dog_timelymanner - """ - with testbook( - "archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb", - execute=range(1, 6), - ) as notebook: - self.basic_notebook_test(notebook) - def test_share_artist_asset_user(self): """ Test share_artist_asset_user diff --git a/unittests/testrunnercompliance.py b/unittests/testrunnercompliance.py deleted file mode 100644 index cd2aea2c..00000000 --- a/unittests/testrunnercompliance.py +++ /dev/null @@ -1,243 +0,0 @@ -""" -Test runner compliance -""" - -from logging import getLogger -from os import environ -from unittest import TestCase, mock - -# from archivist.errors import ArchivistBadRequestError -# pylint: disable=missing-docstring -# pylint: disable=protected-access -# pylint: disable=unused-variable -from archivist.archivist import Archivist -from archivist.compliance import Compliance -from archivist.compliance_policies import CompliancePolicy -from archivist.logger import set_logger - -if "DATATRAILS_LOGLEVEL" in environ and environ["DATATRAILS_LOGLEVEL"]: - set_logger(environ["DATATRAILS_LOGLEVEL"]) - -LOGGER = getLogger(__name__) - -COMPLIANCE_POLICY_NAME = "ev maintenance policy" -COMPLIANCE_POLICIES_CREATE = { - "display_name": COMPLIANCE_POLICY_NAME, - "description": "ev maintenance policy", - "compliance_type": "COMPLIANCE_DYNAMIC_TOLERANCE", - "asset_filter": [ - {"or": ["attributes.ev_pump=true"]}, - ], - "event_display_type": "Maintenance Requested", - "closing_event_display_type": "Maintenance Performed", - "dynamic_window": 700, - "dynamic_variability": 1.5, -} -IDENTITY = "compliance_policies/c25fb9e7-0a88-4236-8720-1008eb4ddd1d" -COMPLIANCE_POLICIES_RESPONSE = { - "identity": IDENTITY, - "compliance_type": "COMPLIANCE_DYNAMIC_TOLERANCE", - "description": "ev maintenance policy", - "display_name": COMPLIANCE_POLICY_NAME, - "asset_filter": [{"or": ["attributes.ev_pump=true"]}], - "event_display_type": "Maintenance Requested", - "closing_event_display_type": "Maintenance Performed", - "time_period_seconds": "0", - "dynamic_window": "700", - "dynamic_variability": 1.5, - "richness_assertions": [], -} - -COMPLIANCE_COMPLIANT_AT_NAME = "radiation bag 1" -COMPLIANCE_COMPLIANT_AT_ID = "assets/dc0dfc17-1d93-4b7a-8636-f740f40f7f52" -COMPLIANCE_RESPONSE = { - "compliant": True, - "compliance": [ - { - "compliance_policy_identity": ( - "compliance_policies/" "2154d72d-54d2-4da0-b304-3223ab3e09df" - ), - "compliant": True, - "reason": "", - }, - { - "compliance_policy_identity": ( - "compliance_policies/" "460c3071-2435-4b1e-9c93-87b1edf6e5e1" - ), - "compliant": True, - "reason": "", - }, - ], - "next_page_token": "", - "compliant_at": "2022-01-28T09:01:27Z", -} -COMPLIANCE_POLICY_NON_COMPLIANT = ( - "compliance_policies/2154d72d-54d2-4da0-b304-3223ab3e09df" -) -COMPLIANCE_FALSE_RESPONSE = { - "compliant": False, - "compliance": [ - { - "compliance_policy_identity": COMPLIANCE_POLICY_NON_COMPLIANT, - "compliant": False, - "reason": "Test reason is non compliant", - }, - { - "compliance_policy_identity": ( - "compliance_policies/460c3071-2435-4b1e-9c93-87b1edf6e5e1" - ), - "compliant": True, - "reason": "", - }, - ], - "next_page_token": "", - "compliant_at": "2022-01-28T09:01:27Z", -} - - -class TestRunnerCompliance(TestCase): - """ - Test Archivist Runner - """ - - maxDiff = None - - def setUp(self): - self.arch = Archivist("url", "authauthauth") - - def tearDown(self): - self.arch.close() - - @mock.patch("archivist.runner.time_sleep") - def test_runner_compliance_policies_create(self, mock_sleep): - """ - Test runner operation - """ - with ( - mock.patch.object( - self.arch.compliance_policies, "create_from_data" - ) as mock_compliance_policies_create, - mock.patch.object( - self.arch.compliance_policies, "delete" - ) as mock_compliance_policies_delete, - ): - mock_compliance_policies_create.return_value = CompliancePolicy( - **COMPLIANCE_POLICIES_RESPONSE - ) - self.arch.runner( - { - "steps": [ - { - "step": { - "action": "COMPLIANCE_POLICIES_CREATE", - "description": "Testing compliance_policies_create", - "print_response": True, - "delete": True, - }, - **COMPLIANCE_POLICIES_CREATE, - } - ], - } - ) - self.assertEqual( - mock_sleep.call_count, - 0, - msg="time_sleep incorrectly called", - ) - mock_compliance_policies_create.assert_called_once_with( - COMPLIANCE_POLICIES_CREATE - ) - self.assertEqual( - self.arch.runner.deletions[IDENTITY], - self.arch.compliance_policies.delete, - msg="Incorrect compliance_policy delete_method", - ) - mock_compliance_policies_delete.assert_called_once() - - @mock.patch("archivist.runner.time_sleep") - def test_runner_compliance_compliant_at(self, mock_sleep): - """ - Test runner operation - """ - with ( - mock.patch.object( - self.arch.compliance, "compliant_at" - ) as mock_compliance_compliant_at, - mock.patch.object(self.arch.runner, "identity") as mock_identity, - ): - mock_identity.return_value = COMPLIANCE_COMPLIANT_AT_ID - mock_compliance_compliant_at.return_value = Compliance( - **COMPLIANCE_RESPONSE - ) - self.arch.runner( - { - "steps": [ - { - "step": { - "action": "COMPLIANCE_COMPLIANT_AT", - "description": "Testing compliance_compliant_at", - "print_response": True, - "asset_label": COMPLIANCE_COMPLIANT_AT_NAME, - }, - } - ], - } - ) - self.assertEqual( - mock_sleep.call_count, - 0, - msg="time_sleep incorrectly called", - ) - mock_compliance_compliant_at.assert_called_once_with( - COMPLIANCE_COMPLIANT_AT_ID - ) - self.assertEqual( - len(self.arch.runner.entities), - 0, - msg="Incorrect compliance created", - ) - - @mock.patch("archivist.runner.time_sleep") - def test_runner_compliance_compliant_at_non_compliant(self, mock_sleep): - """ - Test runner operation - """ - with ( - mock.patch.object( - self.arch.compliance, "compliant_at" - ) as mock_compliance_compliant_at, - mock.patch.object(self.arch.runner, "identity") as mock_identity, - ): - mock_identity.return_value = COMPLIANCE_COMPLIANT_AT_ID - mock_compliance_compliant_at.return_value = Compliance( - **COMPLIANCE_FALSE_RESPONSE - ) - self.arch.runner( - { - "steps": [ - { - "step": { - "action": "COMPLIANCE_COMPLIANT_AT", - "description": "Testing compliance_compliant_at", - "print_response": True, - "asset_label": COMPLIANCE_COMPLIANT_AT_NAME, - }, - "report": True, - } - ], - } - ) - self.assertEqual( - mock_sleep.call_count, - 0, - msg="time_sleep incorrectly called", - ) - mock_compliance_compliant_at.assert_called_once_with( - COMPLIANCE_COMPLIANT_AT_ID, - report=True, - ) - self.assertEqual( - len(self.arch.runner.entities), - 0, - msg="Incorrect compliance created", - ) diff --git a/unittests/testrunnerstep.py b/unittests/testrunnerstep.py index 1b889627..2209c77d 100644 --- a/unittests/testrunnerstep.py +++ b/unittests/testrunnerstep.py @@ -44,60 +44,81 @@ def setUp(self): def tearDown(self): self.arch.close() - def test_runner_step_with_delete_method(self): + def test_runner_step_no_kwargs(self): """ Test runner step """ + + # this action has no keywords and thsi will test the + # keywords is not None clause + steps = { + "action": "ASSETS_ATTACHMENT_INFO", + } step = _Step( self.arch, - **{ - "action": "COMPLIANCE_POLICIES_CREATE", - "wait_time": 10, - "print_response": True, - "description": "Testing runner events list", - "asset_label": "Existing Asset", - "delete": True, - }, - ) - self.assertEqual( - step.action, - self.arch.compliance_policies.create_from_data, - msg="Incorrect action", + **steps, ) - # a second time to prove memoization is working. self.assertEqual( - step.action, - self.arch.compliance_policies.create_from_data, - msg="Incorrect action", + step.args, + [], + msg="Incorrect args", ) + def identity_method(_unused): + return "identity" + + step.init_args(identity_method, steps) self.assertEqual( - step.delete_method, - self.arch.compliance_policies.delete, - msg="Incorrect delete_method", - ) - # a second time to prove memoization is working. - self.assertEqual( - step.delete_method, - self.arch.compliance_policies.delete, - msg="Incorrect delete_method", + step.args, + [ + steps, + ], + msg="Incorrect args", ) def test_runner_step(self): """ Test runner step """ + steps = { + "action": "EVENTS_LIST", + "wait_time": 10, + "print_response": True, + "description": "Testing runner events list", + "asset_label": "Existing Asset", + "delete": True, + } step = _Step( self.arch, - **{ - "action": "EVENTS_LIST", - "wait_time": 10, - "print_response": True, - "description": "Testing runner events list", - "asset_label": "Existing Asset", - "delete": True, - }, + **steps, ) + self.assertEqual( + step.args, + [], + msg="Incorrect args", + ) + + def identity_method(_unused): + return "identity" + + step.init_args(identity_method, step) + self.assertEqual( + step.args, + [ + steps, + ], + msg="Incorrect args", + ) + step.add_arg_identity("another_identity") + self.assertEqual( + step.args, + [ + steps, + "another_identity", + ], + msg="Incorrect args", + ) + self.assertEqual( step.action, self.arch.events.list,