From ad40a3bba67d1da390b73c3315e221ed6c56217e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Sat, 29 Mar 2025 12:24:06 +0100 Subject: [PATCH 01/28] chore: update pyright config --- pyrightconfig.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyrightconfig.json b/pyrightconfig.json index 1247f650..75a75d97 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -1,15 +1,16 @@ { "typeCheckingMode": "strict", + "useLibraryCodeForTypes": true, "exclude": [ ".venv/*", "*.egg-info/*", "build/*", "dist/*", ".tox/*", - "PIconnect/_versions.py" + "PIconnect/_version.py" ], "ignore": [ ], - "pythonVersion": "3.10", + "pythonVersion": "3.11", "deprecateTypingAliases": true } From 1dc7c67b6e5befee3b96928055701ea3395474ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 31 Mar 2025 08:56:26 +0200 Subject: [PATCH 02/28] chore: add .gitkeep to docs folders --- docs/api/.gitkeep | 0 docs/explanation/.gitkeep | 0 docs/howto/.gitkeep | 0 docs/tutorials/.gitkeep | 0 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/api/.gitkeep create mode 100644 docs/explanation/.gitkeep create mode 100644 docs/howto/.gitkeep create mode 100644 docs/tutorials/.gitkeep diff --git a/docs/api/.gitkeep b/docs/api/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/docs/explanation/.gitkeep b/docs/explanation/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/docs/howto/.gitkeep b/docs/howto/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/docs/tutorials/.gitkeep b/docs/tutorials/.gitkeep new file mode 100644 index 00000000..e69de29b From 079d5cf1321dfef04d9a965e326e42f662ef1e38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 31 Mar 2025 08:57:14 +0200 Subject: [PATCH 03/28] chore: add jupyter checkpoints and notebooks in the root Notebooks in the root folder should only be used for debugging locally. Inclusion in the documentation could be valid. --- .gitignore | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitignore b/.gitignore index 837941d4..1527800a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +# Jupyter notebooks +*.ipynb_checkpoints +/*.ipynb + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] From 664e2c5f09cfa3164fa93a1e7c2d6235f8b58d25 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 31 Mar 2025 21:57:43 +0200 Subject: [PATCH 04/28] chore: add debug env to pixi config with jupyter lab --- pixi.lock | 1930 +++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 4 + 2 files changed, 1908 insertions(+), 26 deletions(-) diff --git a/pixi.lock b/pixi.lock index c1d4e431..f9a35207 100644 --- a/pixi.lock +++ b/pixi.lock @@ -55,6 +55,306 @@ environments: - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/b0/79/f0f1ca286b78f6f33c521a36b5cbd5bd697c0d66217d8856f443aeb9dd77/versioneer-0.29-py3-none-any.whl + debug: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.9.0-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.5-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.3.0-pyh71513ae_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2025.1.31-hbcca054_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.13-py312h2ec8cdc_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh3099207_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.0.2-pyhfb0248b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython_pygments_lexers-1.1.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.15.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.6-h2dba641_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h767d61c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h767d61c_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.4-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.49.1-hee588c1_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-h8f9b012_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.3-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.1-h7b32b05_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.7-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.50-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-7.0.0-py312h66e93f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.9-h9e4cc4f_1_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h178313f_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.3.0-py312hbf22597_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8c095d6_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.24.0-py312h3b7be25_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.2-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.13.0-h9fa5a19_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.13.0-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312h66e93f0_1.conda + - pypi: https://files.pythonhosted.org/packages/9c/c0/06e64a54bced4e8b885c1e7ec03ee1869e52acf69e87da40f92391a214ad/clr_loader-0.2.7.post0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/02/e2/e2cbb8d634151aab9528ef7b8bab52ee4ab10e076509285602c2a3a686e0/numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/cd/f1/bfb6811df4745f92f14c47a29e50e89a36b1533130fcc56452d4660bd2d6/pythonnet-3.0.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: . + win-64: + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.9.0-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py313ha7868ed_5.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.5-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.3.0-pyh71513ae_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py313h5813708_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2025.1.31-h56e8100_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py313ha7868ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.13.2-py313hd8ed1ab_101.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.13-py313h5813708_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh4bbf305_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.0.2-pyhca29cf9_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython_pygments_lexers-1.1.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py313hfa70ccb_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh5737063_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.15.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.6-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/krb5-1.21.3-hdf4eb48_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.4-he0c23c2_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.6-h537db12_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/liblzma-5.6.4-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libmpdec-4.0.0-h2466b09_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.49.1-h67fdade_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py313hb4c8b1a_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.3-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.4.1-ha4e3fda_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.7-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.50-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-7.0.0-py313ha7868ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh09c184e_7.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.13.2-h261c0b1_101_cp313.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.13-5_cp313.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-307-py313h5813708_3.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.15-py313h5813708_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py313hb4c8b1a_2.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.3.0-py313h2100fd5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.24.0-py313h54fc02f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.2-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.2-py313ha7868ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.13.0-h9fa5a19_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.13.0-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-hbf610ac_24.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.42.34438-hfd919c2_24.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.42.34438-h7142326_24.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyh7428d3b_8.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-ha9f60a1_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py313ha7868ed_1.conda + - pypi: https://files.pythonhosted.org/packages/9c/c0/06e64a54bced4e8b885c1e7ec03ee1869e52acf69e87da40f92391a214ad/clr_loader-0.2.7.post0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/52/17/d0dd10ab6d125c6d11ffb6dfa3423c3571befab8358d4f85cd4471964fcd/numpy-2.2.4-cp313-cp313-win_amd64.whl + - pypi: https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl + - pypi: https://files.pythonhosted.org/packages/cd/f1/bfb6811df4745f92f14c47a29e50e89a36b1533130fcc56452d4660bd2d6/pythonnet-3.0.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bf/ae/743f16ef8c2e3628df3ddfd652b7d4c555d12c84b53f3d8218498f4ade9b/wrapt-1.17.2-cp313-cp313-win_amd64.whl + - pypi: . default: channels: - url: https://conda.anaconda.org/conda-forge/ @@ -807,10 +1107,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.9-h9e4cc4f_1_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8c095d6_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.2-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda @@ -821,10 +1124,7 @@ environments: - pypi: https://files.pythonhosted.org/packages/9c/c0/06e64a54bced4e8b885c1e7ec03ee1869e52acf69e87da40f92391a214ad/clr_loader-0.2.7.post0-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/02/e2/e2cbb8d634151aab9528ef7b8bab52ee4ab10e076509285602c2a3a686e0/numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - pypi: https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/cd/f1/bfb6811df4745f92f14c47a29e50e89a36b1533130fcc56452d4660bd2d6/pythonnet-3.0.5-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/eb/38/ac33370d784287baa1c3d538978b5e2ea064d4c1b93ffbd12826c190dd10/pytz-2025.1-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - pypi: . @@ -835,7 +1135,6 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.17.1-py313ha7868ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/clr_loader-0.2.7.post0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/codacy-coverage-1.3.11-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/codecov-2.1.13-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda @@ -846,25 +1145,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2024.2.1-h57928b3_1083.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-31_h641d27c_mkl.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-31_h5e41251_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.4-he0c23c2_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.6-h537db12_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.11.2-default_ha69328c_1001.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libiconv-1.18-h135ad9c_1.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-31_h1aa476e_mkl.conda - conda: https://conda.anaconda.org/conda-forge/win-64/liblzma-5.6.4-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libmpdec-4.0.0-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.49.1-h67fdade_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libwinpthread-12.0.0.r4.gg4f2fc60ca-h57928b3_9.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/libxml2-2.13.6-he286e8c_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/mkl-2024.2.2-h66d3029_15.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/numpy-2.2.4-py313hefb8edb_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.4.1-ha4e3fda_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pandas-2.2.3-py313hf91d08e_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pip-25.0.1-pyh145f28c_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda @@ -873,13 +1161,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.13.2-h261c0b1_101_cp313.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.13-5_cp313.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/pythonnet-3.0.5-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-h62715c5_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda @@ -890,6 +1175,11 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.42.34438-hfd919c2_24.conda - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyh7428d3b_8.conda - conda: https://conda.anaconda.org/conda-forge/win-64/zstandard-0.23.0-py313ha7868ed_1.conda + - pypi: https://files.pythonhosted.org/packages/9c/c0/06e64a54bced4e8b885c1e7ec03ee1869e52acf69e87da40f92391a214ad/clr_loader-0.2.7.post0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/52/17/d0dd10ab6d125c6d11ffb6dfa3423c3571befab8358d4f85cd4471964fcd/numpy-2.2.4-cp313-cp313-win_amd64.whl + - pypi: https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl + - pypi: https://files.pythonhosted.org/packages/cd/f1/bfb6811df4745f92f14c47a29e50e89a36b1533130fcc56452d4660bd2d6/pythonnet-3.0.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/bf/ae/743f16ef8c2e3628df3ddfd652b7d4c555d12c84b53f3d8218498f4ade9b/wrapt-1.17.2-cp313-cp313-win_amd64.whl - pypi: . packages: @@ -937,6 +1227,121 @@ packages: - pkg:pypi/alabaster?source=hash-mapping size: 18684 timestamp: 1733750512696 +- conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.9.0-pyh29332c3_0.conda + sha256: b28e0f78bb0c7962630001e63af25a89224ff504e135a02e50d4d80b6155d386 + md5: 9749a2c77a7c40d432ea0927662d7e52 + depends: + - exceptiongroup >=1.0.2 + - idna >=2.8 + - python >=3.9 + - sniffio >=1.1 + - typing_extensions >=4.5 + - python + constrains: + - trio >=0.26.1 + - uvloop >=0.21 + license: MIT + license_family: MIT + purls: + - pkg:pypi/anyio?source=hash-mapping + size: 126346 + timestamp: 1742243108743 +- conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda + sha256: 7af62339394986bc470a7a231c7f37ad0173ffb41f6bc0e8e31b0be9e3b9d20f + md5: a7ee488b71c30ada51c48468337b85ba + depends: + - argon2-cffi-bindings + - python >=3.9 + - typing-extensions + constrains: + - argon2_cffi ==999 + license: MIT + license_family: MIT + purls: + - pkg:pypi/argon2-cffi?source=hash-mapping + size: 18594 + timestamp: 1733311166338 +- conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py312h66e93f0_5.conda + sha256: 3cbc3b026f5c3f26de696ead10607db8d80cbb003d87669ac3b02e884f711978 + md5: 1505fc57c305c0a3174ea7aae0a0db25 + depends: + - __glibc >=2.17,<3.0.a0 + - cffi >=1.0.1 + - libgcc >=13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + license: MIT + license_family: MIT + purls: + - pkg:pypi/argon2-cffi-bindings?source=hash-mapping + size: 34847 + timestamp: 1725356749774 +- conda: https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-bindings-21.2.0-py313ha7868ed_5.conda + sha256: 36b79f862177b3a104762f68664e445615e7c831ca5fe76dc4596ad531ed46a3 + md5: 6d6dbb065c660e9e358b32bdab9ada31 + depends: + - cffi >=1.0.1 + - python >=3.13.0rc1,<3.14.0a0 + - python_abi 3.13.* *_cp313 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: MIT + license_family: MIT + purls: + - pkg:pypi/argon2-cffi-bindings?source=hash-mapping + size: 34467 + timestamp: 1725357154522 +- conda: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda + sha256: c4b0bdb3d5dee50b60db92f99da3e4c524d5240aafc0a5fcc15e45ae2d1a3cd1 + md5: 46b53236fdd990271b03c3978d4218a9 + depends: + - python >=3.9 + - python-dateutil >=2.7.0 + - types-python-dateutil >=2.8.10 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/arrow?source=hash-mapping + size: 99951 + timestamp: 1733584345583 +- conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda + sha256: 93b14414b3b3ed91e286e1cbe4e7a60c4e1b1c730b0814d1e452a8ac4b9af593 + md5: 8f587de4bcf981e26228f268df374a9b + depends: + - python >=3.9 + constrains: + - astroid >=2,<4 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/asttokens?source=hash-mapping + size: 28206 + timestamp: 1733250564754 +- conda: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.5-pyh29332c3_0.conda + sha256: 3b7233041e462d9eeb93ea1dfe7b18aca9c358832517072054bb8761df0c324b + md5: d9d0f99095a9bb7e3641bca8c6ad2ac7 + depends: + - python >=3.9 + - typing_extensions >=4.0.0 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/async-lru?source=hash-mapping + size: 17335 + timestamp: 1742153708859 +- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.3.0-pyh71513ae_0.conda + sha256: 99c53ffbcb5dc58084faf18587b215f9ac8ced36bbfb55fa807c00967e419019 + md5: a10d11958cadc13fdb43df75f8b1903f + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/attrs?source=compressed-mapping + size: 57181 + timestamp: 1741918625732 - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda sha256: 1c656a35800b7f57f7371605bc6507c8d3ad60fbaaec65876fce7f73df1fc8ac md5: 0a01c169f0ab0f91b26e77a3301fbfe4 @@ -962,6 +1367,30 @@ packages: - pkg:pypi/beautifulsoup4?source=compressed-mapping size: 145482 timestamp: 1738740460562 +- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda + sha256: a05971bb80cca50ce9977aad3f7fc053e54ea7d5321523efc7b9a6e12901d3cd + md5: f0b4c8e370446ef89797608d60a564b3 + depends: + - python >=3.9 + - webencodings + - python + constrains: + - tinycss >=1.1.0,<1.5 + license: Apache-2.0 AND MIT + purls: + - pkg:pypi/bleach?source=hash-mapping + size: 141405 + timestamp: 1737382993425 +- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda + sha256: 0aba699344275b3972bd751f9403316edea2ceb942db12f9f493b63c74774a46 + md5: a30e9406c873940383555af4c873220d + depends: + - bleach ==6.2.0 pyh29332c3_4 + - tinycss2 + license: Apache-2.0 AND MIT + purls: [] + size: 4213 + timestamp: 1737382993425 - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hfdbb021_2.conda sha256: 949913bbd1f74d1af202d3e4bff2e0a4e792ec00271dc4dd08641d4221aa2e12 md5: d21daab070d76490cb39a8f1d1729d79 @@ -1136,6 +1565,28 @@ packages: purls: [] size: 158690 timestamp: 1738298232550 +- conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + noarch: python + sha256: 561e6660f26c35d137ee150187d89767c988413c978e1b712d53f27ddf70ea17 + md5: 9b347a7ec10940d3f7941ff6c460b551 + depends: + - cached_property >=1.5.2,<1.5.3.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 4134 + timestamp: 1615209571450 +- conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + sha256: 6dbf7a5070cc43d90a1e4c2ec0c541c69d8e30a0e25f50ce9f6e4a432e42c5d7 + md5: 576d629e47797577ab0f1b351297ef4a + depends: + - python >=3.6 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/cached-property?source=hash-mapping + size: 11065 + timestamp: 1615209567874 - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda sha256: 42a78446da06a2568cb13e69be3355169fbd0ea424b00fc80b7d840f5baaacf3 md5: c207fa5ac7ea99b149344385a9c0880d @@ -1329,6 +1780,18 @@ packages: - pkg:pypi/colorama?source=hash-mapping size: 27011 timestamp: 1733218222191 +- conda: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda + sha256: 7e87ef7c91574d9fac19faedaaee328a70f718c9b4ddadfdc0ba9ac021bd64af + md5: 74673132601ec2b7fc592755605f4c1b + depends: + - python >=3.9 + - traitlets >=5.3 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/comm?source=hash-mapping + size: 12103 + timestamp: 1733503053903 - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.7.1-py311h2dc5d0c_0.conda sha256: 88eceeaed558d6b313564142a6c013646cbd5289d5f20a61253bcdfe198e6f32 md5: 5f57c67f3880dd62b83b3867ea03d9bc @@ -1419,6 +1882,69 @@ packages: - pkg:pypi/coverage?source=hash-mapping size: 404236 timestamp: 1742157295512 +- conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.13.2-py313hd8ed1ab_101.conda + noarch: generic + sha256: 29bfebfbd410db5e90fa489b239a3a7473bc1ec776bdca24e8c26c68c5654a8c + md5: d6be72c63da6e99ac2a1b87b120d135a + depends: + - python 3.13.2.* + - python_abi * *_cp313 + license: Python-2.0 + purls: [] + size: 47792 + timestamp: 1739800762370 +- conda: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.13-py312h2ec8cdc_0.conda + sha256: 3370f9c9a94146a4136ca57ae6e13b789572ff41804cd949cccad70945ae7fb0 + md5: cfad89e517e83c4927fffdbaaf0a30ef + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + license: MIT + license_family: MIT + purls: + - pkg:pypi/debugpy?source=hash-mapping + size: 2650523 + timestamp: 1741148587127 +- conda: https://conda.anaconda.org/conda-forge/win-64/debugpy-1.8.13-py313h5813708_0.conda + sha256: 6aa7d41cd985517e2bff65bbde8c4098e2278d24bbaadc3e3f56bdc8882db903 + md5: b637b3f184d9e40f6a59afcd22ca2d93 + depends: + - python >=3.13,<3.14.0a0 + - python_abi 3.13.* *_cp313 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: MIT + license_family: MIT + purls: + - pkg:pypi/debugpy?source=hash-mapping + size: 3611025 + timestamp: 1741148935247 +- conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda + sha256: c17c6b9937c08ad63cb20a26f403a3234088e57d4455600974a0ce865cb14017 + md5: 9ce473d1d1be1cc3810856a48b3fab32 + depends: + - python >=3.9 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/decorator?source=compressed-mapping + size: 14129 + timestamp: 1740385067843 +- conda: https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2 + sha256: 9717a059677553562a8f38ff07f3b9f61727bd614f505658b0a5ecbcf8df89be + md5: 961b3a227b437d82ad7054484cfa71b2 + depends: + - python >=3.6 + license: PSF-2.0 + license_family: PSF + purls: + - pkg:pypi/defusedxml?source=hash-mapping + size: 24062 + timestamp: 1615232388757 - conda: https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda sha256: fa5966bb1718bbf6967a85075e30e4547901410cc7cb7b16daf68942e9a94823 md5: 24c1ca34138ee57de72a943237cde4cc @@ -1439,6 +1965,41 @@ packages: - pkg:pypi/exceptiongroup?source=hash-mapping size: 20486 timestamp: 1733208916977 +- conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda + sha256: 28d25ea375ebab4bf7479228f8430db20986187b04999136ff5c722ebd32eb60 + md5: ef8b5fca76806159fc25b4f48d8737eb + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/executing?source=hash-mapping + size: 28348 + timestamp: 1733569440265 +- conda: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda + sha256: 2509992ec2fd38ab27c7cdb42cf6cadc566a1cc0d1021a2673475d9fa87c6276 + md5: d3549fd50d450b6d9e7dddff25dd2110 + depends: + - cached-property >=1.3.0 + - python >=3.9,<4 + license: MPL-2.0 + license_family: MOZILLA + purls: + - pkg:pypi/fqdn?source=hash-mapping + size: 16705 + timestamp: 1733327494780 +- conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda + sha256: 622516185a7c740d5c7f27016d0c15b45782c1501e5611deec63fd70344ce7c8 + md5: 7ee49e89531c0dcbba9466f6d115d585 + depends: + - python >=3.9 + - typing_extensions + license: MIT + license_family: MIT + purls: + - pkg:pypi/h11?source=hash-mapping + size: 51846 + timestamp: 1733327599467 - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda sha256: 0aa1cdc67a9fe75ea95b5644b734a756200d6ec9d0dff66530aec3d1c1e9df75 md5: b4754fb1bdcb70c8fd54f918301582c6 @@ -1463,12 +2024,43 @@ packages: - pkg:pypi/hpack?source=hash-mapping size: 30731 timestamp: 1737618390337 -- conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda - sha256: 77af6f5fe8b62ca07d09ac60127a30d9069fdc3c68d6b256754d0ffb1f7779f8 - md5: 8e6923fc12f1fe8f8c4e5c9f343256ac +- conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda + sha256: c84d012a245171f3ed666a8bf9319580c269b7843ffa79f26468842da3abd5df + md5: 2ca8e6dbc86525c8b95e3c0ffa26442e depends: + - python >=3.8 + - h11 >=0.13,<0.15 + - h2 >=3,<5 + - sniffio 1.* + - anyio >=3.0,<5.0 + - certifi + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/httpcore?source=hash-mapping + size: 48959 + timestamp: 1731707562362 +- conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + sha256: cd0f1de3697b252df95f98383e9edb1d00386bfdd03fdf607fa42fe5fcb09950 + md5: d6989ead454181f4f9bc987d3dc4e285 + depends: + - anyio + - certifi + - httpcore 1.* + - idna - python >=3.9 - license: MIT + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/httpx?source=hash-mapping + size: 63082 + timestamp: 1733663449209 +- conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + sha256: 77af6f5fe8b62ca07d09ac60127a30d9069fdc3c68d6b256754d0ffb1f7779f8 + md5: 8e6923fc12f1fe8f8c4e5c9f343256ac + depends: + - python >=3.9 + license: MIT license_family: MIT purls: - pkg:pypi/hyperframe?source=hash-mapping @@ -1496,6 +2088,32 @@ packages: - pkg:pypi/imagesize?source=hash-mapping size: 10164 timestamp: 1656939625410 +- conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda + sha256: 598951ebdb23e25e4cec4bbff0ae369cec65ead80b50bc08b441d8e54de5cf03 + md5: f4b39bf00c69f56ac01e020ebfac066c + depends: + - python >=3.9 + - zipp >=0.5 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/importlib-metadata?source=hash-mapping + size: 29141 + timestamp: 1737420302391 +- conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda + sha256: acc1d991837c0afb67c75b77fdc72b4bf022aac71fedd8b9ea45918ac9b08a80 + md5: c85c76dc67d75619a92f51dfbce06992 + depends: + - python >=3.9 + - zipp >=3.1.0 + constrains: + - importlib-resources >=6.5.2,<6.5.3.0a0 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/importlib-resources?source=hash-mapping + size: 33781 + timestamp: 1736252433366 - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda sha256: 0ec8f4d02053cd03b0f3e63168316530949484f80e16f5e2fb199a1d117a89ca md5: 6837f3eff7dcea42ecd714ce1ac2b108 @@ -1515,6 +2133,139 @@ packages: purls: [] size: 1852356 timestamp: 1723739573141 +- conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh3099207_0.conda + sha256: 33cfd339bb4efac56edf93474b37ddc049e08b1b4930cf036c893cc1f5a1f32a + md5: b40131ab6a36ac2c09b7c57d4d3fbf99 + depends: + - __linux + - comm >=0.1.1 + - debugpy >=1.6.5 + - ipython >=7.23.1 + - jupyter_client >=6.1.12 + - jupyter_core >=4.12,!=5.0.* + - matplotlib-inline >=0.1 + - nest-asyncio + - packaging + - psutil + - python >=3.8 + - pyzmq >=24 + - tornado >=6.1 + - traitlets >=5.4.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/ipykernel?source=hash-mapping + size: 119084 + timestamp: 1719845605084 +- conda: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh4bbf305_0.conda + sha256: dc569094125127c0078aa536f78733f383dd7e09507277ef8bcd1789786e7086 + md5: 18df5fc4944a679e085e0e8f31775fc8 + depends: + - __win + - comm >=0.1.1 + - debugpy >=1.6.5 + - ipython >=7.23.1 + - jupyter_client >=6.1.12 + - jupyter_core >=4.12,!=5.0.* + - matplotlib-inline >=0.1 + - nest-asyncio + - packaging + - psutil + - python >=3.8 + - pyzmq >=24 + - tornado >=6.1 + - traitlets >=5.4.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/ipykernel?source=hash-mapping + size: 119853 + timestamp: 1719845858082 +- conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.0.2-pyhca29cf9_0.conda + sha256: 72ad5d59719d7639641f21032de870fadd43ec2349229161728b736f1df720d1 + md5: e5ba968166136311157765e8b2ccb9d0 + depends: + - __win + - colorama + - decorator + - exceptiongroup + - ipython_pygments_lexers + - jedi >=0.16 + - matplotlib-inline + - pickleshare + - prompt-toolkit >=3.0.41,<3.1.0 + - pygments >=2.4.0 + - python >=3.11 + - stack_data + - traitlets >=5.13.0 + - typing_extensions >=4.6 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/ipython?source=hash-mapping + size: 614763 + timestamp: 1741457145171 +- conda: https://conda.anaconda.org/conda-forge/noarch/ipython-9.0.2-pyhfb0248b_0.conda + sha256: 98f14471e0f492d290c4882f1e2c313fffc67a0f9a3a36e699d7b0c5d42a5196 + md5: b031bcd65b260a0a3353531eab50d465 + depends: + - __unix + - pexpect >4.3 + - decorator + - exceptiongroup + - ipython_pygments_lexers + - jedi >=0.16 + - matplotlib-inline + - pickleshare + - prompt-toolkit >=3.0.41,<3.1.0 + - pygments >=2.4.0 + - python >=3.11 + - stack_data + - traitlets >=5.13.0 + - typing_extensions >=4.6 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/ipython?source=hash-mapping + size: 615519 + timestamp: 1741457126430 +- conda: https://conda.anaconda.org/conda-forge/noarch/ipython_pygments_lexers-1.1.1-pyhd8ed1ab_0.conda + sha256: 894682a42a7d659ae12878dbcb274516a7031bbea9104e92f8e88c1f2765a104 + md5: bd80ba060603cc228d9d81c257093119 + depends: + - pygments + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/ipython-pygments-lexers?source=hash-mapping + size: 13993 + timestamp: 1737123723464 +- conda: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda + sha256: 08e838d29c134a7684bca0468401d26840f41c92267c4126d7b43a6b533b0aed + md5: 0b0154421989637d424ccf0f104be51a + depends: + - arrow >=0.15.0 + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/isoduration?source=hash-mapping + size: 19832 + timestamp: 1733493720346 +- conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda + sha256: 92c4d217e2dc68983f724aa983cca5464dcb929c566627b26a2511159667dba8 + md5: a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 + depends: + - parso >=0.8.3,<0.9.0 + - python >=3.9 + license: Apache-2.0 AND MIT + purls: + - pkg:pypi/jedi?source=hash-mapping + size: 843646 + timestamp: 1733300981994 - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhd8ed1ab_0.conda sha256: f1ac18b11637ddadc05642e8185a851c7fab5998c6f5470d716812fae943b2af md5: 446bd6c8cb26050d528881df495ce646 @@ -1527,6 +2278,307 @@ packages: - pkg:pypi/jinja2?source=compressed-mapping size: 112714 timestamp: 1741263433881 +- conda: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda + sha256: 61bca2dac194c44603446944745566d7b4e55407280f6f6cea8bbe4de26b558f + md5: cd170f82d8e5b355dfdea6adab23e4af + depends: + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/json5?source=hash-mapping + size: 31573 + timestamp: 1733272196759 +- conda: https://conda.anaconda.org/conda-forge/linux-64/jsonpointer-3.0.0-py312h7900ff3_1.conda + sha256: 76ccb7bffc7761d1d3133ffbe1f7f1710a0f0d9aaa9f7ea522652e799f3601f4 + md5: 6b51f7459ea4073eeb5057207e2e1e3d + depends: + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jsonpointer?source=hash-mapping + size: 17277 + timestamp: 1725303032027 +- conda: https://conda.anaconda.org/conda-forge/win-64/jsonpointer-3.0.0-py313hfa70ccb_1.conda + sha256: a0625cb0e86775b8996b4ee7117f1912b2fa3d76be8d10bf1d7b39578f5d99f7 + md5: 001efbf150f0ca5fd0a0c5e6e713c1d1 + depends: + - python >=3.13.0rc1,<3.14.0a0 + - python_abi 3.13.* *_cp313 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jsonpointer?source=hash-mapping + size: 42805 + timestamp: 1725303293802 +- conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda + sha256: be992a99e589146f229c58fe5083e0b60551d774511c494f91fe011931bd7893 + md5: a3cead9264b331b32fe8f0aabc967522 + depends: + - attrs >=22.2.0 + - importlib_resources >=1.4.0 + - jsonschema-specifications >=2023.03.6 + - pkgutil-resolve-name >=1.3.10 + - python >=3.9 + - referencing >=0.28.4 + - rpds-py >=0.7.1 + license: MIT + license_family: MIT + purls: + - pkg:pypi/jsonschema?source=hash-mapping + size: 74256 + timestamp: 1733472818764 +- conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda + sha256: 37127133837444cf0e6d1a95ff5a505f8214ed4e89e8e9343284840e674c6891 + md5: 3b519bc21bc80e60b456f1e62962a766 + depends: + - python >=3.9 + - referencing >=0.31.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/jsonschema-specifications?source=hash-mapping + size: 16170 + timestamp: 1733493624968 +- conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda + sha256: 6e0184530011961a0802fda100ecdfd4b0eca634ed94c37e553b72e21c26627d + md5: a5b1a8065857cc4bd8b7a38d063bb728 + depends: + - fqdn + - idna + - isoduration + - jsonpointer >1.13 + - jsonschema >=4.23.0,<4.23.1.0a0 + - rfc3339-validator + - rfc3986-validator >0.1.0 + - uri-template + - webcolors >=24.6.0 + license: MIT + license_family: MIT + purls: [] + size: 7135 + timestamp: 1733472820035 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda + sha256: 1565c8b1423a37fca00fe0ab2a17cd8992c2ecf23e7867a1c9f6f86a9831c196 + md5: 0b4c3908e5a38ea22ebb98ee5888c768 + depends: + - importlib-metadata >=4.8.3 + - jupyter_server >=1.1.2 + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyter-lsp?source=hash-mapping + size: 55221 + timestamp: 1733493006611 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda + sha256: 19d8bd5bb2fde910ec59e081eeb59529491995ce0d653a5209366611023a0b3a + md5: 4ebae00eae9705b0c3d6d1018a81d047 + depends: + - importlib-metadata >=4.8.3 + - jupyter_core >=4.12,!=5.0.* + - python >=3.9 + - python-dateutil >=2.8.2 + - pyzmq >=23.0 + - tornado >=6.2 + - traitlets >=5.3 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyter-client?source=hash-mapping + size: 106342 + timestamp: 1733441040958 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda + sha256: 732b1e8536bc22a5a174baa79842d79db2f4956d90293dd82dc1b3f6099bcccd + md5: 0a2980dada0dd7fd0998f0342308b1b1 + depends: + - __unix + - platformdirs >=2.5 + - python >=3.8 + - traitlets >=5.3 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyter-core?source=hash-mapping + size: 57671 + timestamp: 1727163547058 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh5737063_1.conda + sha256: 7c903b2d62414c3e8da1f78db21f45b98de387aae195f8ca959794113ba4b3fd + md5: 46d87d1c0ea5da0aae36f77fa406e20d + depends: + - __win + - cpython + - platformdirs >=2.5 + - python >=3.8 + - pywin32 >=300 + - traitlets >=5.3 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyter-core?source=hash-mapping + size: 58269 + timestamp: 1727164026641 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.12.0-pyh29332c3_0.conda + sha256: 37e6ac3ccf7afcc730c3b93cb91a13b9ae827fd306f35dd28f958a74a14878b5 + md5: f56000b36f09ab7533877e695e4e8cb0 + depends: + - jsonschema-with-format-nongpl >=4.18.0 + - packaging + - python >=3.9 + - python-json-logger >=2.0.4 + - pyyaml >=5.3 + - referencing + - rfc3339-validator + - rfc3986-validator >=0.1.1 + - traitlets >=5.3 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyter-events?source=compressed-mapping + size: 23647 + timestamp: 1738765986736 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.15.0-pyhd8ed1ab_0.conda + sha256: be5f9774065d94c4a988f53812b83b67618bec33fcaaa005a98067d506613f8a + md5: 6ba8c206b5c6f52b82435056cf74ee46 + depends: + - anyio >=3.1.0 + - argon2-cffi >=21.1 + - jinja2 >=3.0.3 + - jupyter_client >=7.4.4 + - jupyter_core >=4.12,!=5.0.* + - jupyter_events >=0.11.0 + - jupyter_server_terminals >=0.4.4 + - nbconvert-core >=6.4.4 + - nbformat >=5.3.0 + - overrides >=5.0 + - packaging >=22.0 + - prometheus_client >=0.9 + - python >=3.9 + - pyzmq >=24 + - send2trash >=1.8.2 + - terminado >=0.8.3 + - tornado >=6.2.0 + - traitlets >=5.6.0 + - websocket-client >=1.7 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyter-server?source=hash-mapping + size: 327747 + timestamp: 1734702771032 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda + sha256: 0890fc79422191bc29edf17d7b42cff44ba254aa225d31eb30819f8772b775b8 + md5: 2d983ff1b82a1ccb6f2e9d8784bdd6bd + depends: + - python >=3.9 + - terminado >=0.8.3 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyter-server-terminals?source=hash-mapping + size: 19711 + timestamp: 1733428049134 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.6-pyhd8ed1ab_0.conda + sha256: cf10c9b4158c4ef2796fde546f2bbe45f43c1402a0c2a175939ebbb308846ada + md5: 8b91a10c966aa65b9ad1a2702e6ef121 + depends: + - async-lru >=1.0.0 + - httpx >=0.25.0 + - importlib-metadata >=4.8.3 + - ipykernel >=6.5.0 + - jinja2 >=3.0.3 + - jupyter-lsp >=2.0.0 + - jupyter_core + - jupyter_server >=2.4.0,<3 + - jupyterlab_server >=2.27.1,<3 + - notebook-shim >=0.2 + - packaging + - python >=3.9 + - setuptools >=40.8.0 + - tomli >=1.2.2 + - tornado >=6.2.0 + - traitlets + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyterlab?source=compressed-mapping + size: 7641308 + timestamp: 1741964212957 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda + sha256: dc24b900742fdaf1e077d9a3458fd865711de80bca95fe3c6d46610c532c6ef0 + md5: fd312693df06da3578383232528c468d + depends: + - pygments >=2.4.1,<3 + - python >=3.9 + constrains: + - jupyterlab >=4.0.8,<5.0.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyterlab-pygments?source=hash-mapping + size: 18711 + timestamp: 1733328194037 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda + sha256: d03d0b7e23fa56d322993bc9786b3a43b88ccc26e58b77c756619a921ab30e86 + md5: 9dc4b2b0f41f0de41d27f3293e319357 + depends: + - babel >=2.10 + - importlib-metadata >=4.8.3 + - jinja2 >=3.0.3 + - json5 >=0.9.0 + - jsonschema >=4.18 + - jupyter_server >=1.21,<3 + - packaging >=21.3 + - python >=3.9 + - requests >=2.31 + constrains: + - openapi-core >=0.18.0,<0.19.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyterlab-server?source=hash-mapping + size: 49449 + timestamp: 1733599666357 +- conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 + sha256: 150c05a6e538610ca7c43beb3a40d65c90537497a4f6a5f4d15ec0451b6f5ebb + md5: 30186d27e2c9fa62b45fb1476b7200e3 + depends: + - libgcc-ng >=10.3.0 + license: LGPL-2.1-or-later + purls: [] + size: 117831 + timestamp: 1646151697040 +- conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda + sha256: 99df692f7a8a5c27cd14b5fb1374ee55e756631b9c3d659ed3ee60830249b238 + md5: 3f43953b7d3fb3aaa1d0d0723d91e368 + depends: + - keyutils >=1.6.1,<2.0a0 + - libedit >=3.1.20191231,<3.2.0a0 + - libedit >=3.1.20191231,<4.0a0 + - libgcc-ng >=12 + - libstdcxx-ng >=12 + - openssl >=3.3.1,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 1370023 + timestamp: 1719463201255 +- conda: https://conda.anaconda.org/conda-forge/win-64/krb5-1.21.3-hdf4eb48_0.conda + sha256: 18e8b3430d7d232dad132f574268f56b3eb1a19431d6d5de8c53c29e6c18fa81 + md5: 31aec030344e962fbd7dbbbbd68e60a9 + depends: + - openssl >=3.3.1,<4.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: MIT + license_family: MIT + purls: [] + size: 712034 + timestamp: 1719463874284 - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_4.conda sha256: db73f38155d901a610b2320525b9dd3b31e4949215c870685fd92ea61b5ce472 md5: 01f8d123c96816249efd255a31ad7712 @@ -1570,6 +2622,19 @@ packages: purls: [] size: 3733549 timestamp: 1740088502127 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda + sha256: d789471216e7aba3c184cd054ed61ce3f6dac6f87a50ec69291b9297f8c18724 + md5: c277e0a4d549b03ac1e9d6cbbe3d017b + depends: + - ncurses + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - ncurses >=6.5,<7.0a0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 134676 + timestamp: 1738479519902 - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda sha256: 56541b98447b58e52d824bd59d6382d609e11de1f8adf20b23143e353d2b8d26 md5: db833e03127376d461e1e13e76f09b6c @@ -1748,6 +2813,26 @@ packages: purls: [] size: 33408 timestamp: 1697359010159 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda + sha256: 0105bd108f19ea8e6a78d2d994a6d4a8db16d19a41212070d2d1d48a63c34161 + md5: a587892d3c13b6621a6091be690dbca2 + depends: + - libgcc-ng >=12 + license: ISC + purls: [] + size: 205978 + timestamp: 1716828628198 +- conda: https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.20-hc70643c_0.conda + sha256: 7bcb3edccea30f711b6be9601e083ecf4f435b9407d70fc48fbcf9e5d69a0fc6 + md5: 198bb594f202b205c7d18b936fa4524f + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: ISC + purls: [] + size: 202344 + timestamp: 1716828757533 - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.49.1-hee588c1_2.conda sha256: a086289bf75c33adc1daed3f1422024504ffb5c3c8b3285c49f025c29708ed16 md5: 962d6ac93c30b1dfc54c9cccafd1003e @@ -1781,6 +2866,16 @@ packages: purls: [] size: 3884556 timestamp: 1740240685253 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_2.conda + sha256: e86f38b007cf97cc2c67cd519f2de12a313c4ee3f5ef11652ad08932a5e34189 + md5: c75da67f045c2627f59e6fcb5f4e3a9b + depends: + - libstdcxx 14.2.0 h8f9b012_2 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 53830 + timestamp: 1740240722530 - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 md5: 40b61aab5c7ba9ff276c41cfffe6b80b @@ -1853,6 +2948,22 @@ packages: purls: [] size: 55476 timestamp: 1727963768015 +- conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda + sha256: 4a6bf68d2a2b669fecc9a4a009abd1cf8e72c2289522ff00d81b5a6e51ae78f5 + md5: eb227c3e0bf58f5bd69c0532b157975b + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + constrains: + - jinja2 >=3.0.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/markupsafe?source=hash-mapping + size: 24604 + timestamp: 1733219911494 - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py313h8060acc_1.conda sha256: d812caf52efcea7c9fd0eafb21d45dadfd0516812f667b928bee50e87634fae5 md5: 21b62c55924f01b6eef6827167b46acb @@ -1886,6 +2997,31 @@ packages: - pkg:pypi/markupsafe?source=hash-mapping size: 27930 timestamp: 1733220059655 +- conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda + sha256: 69b7dc7131703d3d60da9b0faa6dd8acbf6f6c396224cf6aef3e855b8c0c41c6 + md5: af6ab708897df59bd6e7283ceab1b56b + depends: + - python >=3.9 + - traitlets + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/matplotlib-inline?source=hash-mapping + size: 14467 + timestamp: 1733417051523 +- conda: https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.3-pyh29332c3_0.conda + sha256: a67484d7dd11e815a81786580f18b6e4aa2392f292f29183631a6eccc8dc37b3 + md5: 7ec6576e328bc128f4982cd646eeba85 + depends: + - python >=3.9 + - typing_extensions + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/mistune?source=hash-mapping + size: 72749 + timestamp: 1742402716323 - conda: https://conda.anaconda.org/conda-forge/win-64/mkl-2024.2.2-h66d3029_15.conda sha256: 20e52b0389586d0b914a49cd286c5ccc9c47949bed60ca6df004d1d295f2edbd md5: 302dff2807f2927b3e9e0d19d60121de @@ -1897,6 +3033,66 @@ packages: purls: [] size: 103106385 timestamp: 1730232843711 +- conda: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda + sha256: a20cff739d66c2f89f413e4ba4c6f6b59c50d5c30b5f0d840c13e8c9c2df9135 + md5: 6bb0d77277061742744176ab555b723c + depends: + - jupyter_client >=6.1.12 + - jupyter_core >=4.12,!=5.0.* + - nbformat >=5.1 + - python >=3.8 + - traitlets >=5.4 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/nbclient?source=hash-mapping + size: 28045 + timestamp: 1734628936013 +- conda: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.6-pyh29332c3_0.conda + sha256: dcccb07c5a1acb7dc8be94330e62d54754c0e9c9cb2bb6865c8e3cfe44cf5a58 + md5: d24beda1d30748afcc87c429454ece1b + depends: + - beautifulsoup4 + - bleach-with-css !=5.0.0 + - defusedxml + - importlib-metadata >=3.6 + - jinja2 >=3.0 + - jupyter_core >=4.7 + - jupyterlab_pygments + - markupsafe >=2.0 + - mistune >=2.0.3,<4 + - nbclient >=0.5.0 + - nbformat >=5.7 + - packaging + - pandocfilters >=1.4.1 + - pygments >=2.4.1 + - python >=3.9 + - traitlets >=5.1 + - python + constrains: + - pandoc >=2.9.2,<4.0.0 + - nbconvert ==7.16.6 *_0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/nbconvert?source=hash-mapping + size: 200601 + timestamp: 1738067871724 +- conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda + sha256: 7a5bd30a2e7ddd7b85031a5e2e14f290898098dc85bea5b3a5bf147c25122838 + md5: bbe1963f1e47f594070ffe87cdf612ea + depends: + - jsonschema >=2.6 + - jupyter_core >=4.12,!=5.0.* + - python >=3.9 + - python-fastjsonschema >=2.15 + - traitlets >=5.1 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/nbformat?source=hash-mapping + size: 100945 + timestamp: 1733402844974 - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda sha256: 3fde293232fa3fca98635e1167de6b7c7fda83caf24b9d6c91ec9eefb4f4d586 md5: 47e340acb35de30501a76c7c799c41d7 @@ -1907,6 +3103,29 @@ packages: purls: [] size: 891641 timestamp: 1738195959188 +- conda: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda + sha256: bb7b21d7fd0445ddc0631f64e66d91a179de4ba920b8381f29b9d006a42788c0 + md5: 598fd7d4d0de2455fb74f56063969a97 + depends: + - python >=3.9 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/nest-asyncio?source=hash-mapping + size: 11543 + timestamp: 1733325673691 +- conda: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda + sha256: 7b920e46b9f7a2d2aa6434222e5c8d739021dbc5cc75f32d124a8191d86f9056 + md5: e7f89ea5f7ea9401642758ff50a2d9c1 + depends: + - jupyter_server >=1.8,<3 + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/notebook-shim?source=hash-mapping + size: 16817 + timestamp: 1733408419340 - pypi: https://files.pythonhosted.org/packages/02/e2/e2cbb8d634151aab9528ef7b8bab52ee4ab10e076509285602c2a3a686e0/numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl name: numpy version: 2.2.4 @@ -2012,6 +3231,18 @@ packages: purls: [] size: 8515197 timestamp: 1739304103653 +- conda: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_1.conda + sha256: 1840bd90d25d4930d60f57b4f38d4e0ae3f5b8db2819638709c36098c6ba770c + md5: e51f1e4089cad105b6cac64bd8166587 + depends: + - python >=3.9 + - typing_utils + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/overrides?source=hash-mapping + size: 30139 + timestamp: 1734587755455 - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl name: packaging version: '24.2' @@ -2452,10 +3683,54 @@ packages: - pkg:pypi/pandas?source=hash-mapping size: 14215159 timestamp: 1726879653675 +- conda: https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2 + sha256: 2bb9ba9857f4774b85900c2562f7e711d08dd48e2add9bee4e1612fbee27e16f + md5: 457c2c8c08e54905d6954e79cb5b5db9 + depends: + - python !=3.0,!=3.1,!=3.2,!=3.3 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pandocfilters?source=hash-mapping + size: 11627 + timestamp: 1631603397334 +- conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda + sha256: 17131120c10401a99205fc6fe436e7903c0fa092f1b3e80452927ab377239bcc + md5: 5c092057b6badd30f75b06244ecd01c9 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/parso?source=hash-mapping + size: 75295 + timestamp: 1733271352153 +- conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda + sha256: 202af1de83b585d36445dc1fda94266697341994d1a3328fabde4989e1b3d07a + md5: d0d408b1f18883a944376da5cf8101ea + depends: + - ptyprocess >=0.5 + - python >=3.9 + license: ISC + purls: + - pkg:pypi/pexpect?source=compressed-mapping + size: 53561 + timestamp: 1733302019362 +- conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda + sha256: e2ac3d66c367dada209fc6da43e645672364b9fd5f9d28b9f016e24b81af475b + md5: 11a9d1d09a3615fc07c3faf79bc0b943 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pickleshare?source=hash-mapping + size: 11748 + timestamp: 1733327448200 - pypi: . name: piconnect - version: 0.12.3+5.g8fa1ff4.dirty - sha256: 223321229fde6ca63b82ad450dcb1291f5603baa47a47ee1b5f9832fbce20817 + version: 0.12.3+8.gad40a3b.dirty + sha256: 164cf1763c9eff6525a716677db7139643672af96fe1d4664d1317dd384e8ff5 requires_dist: - pandas>=2,<3 - numpy>=2,<3 @@ -2487,6 +3762,28 @@ packages: - pkg:pypi/pip?source=hash-mapping size: 1256460 timestamp: 1739142857253 +- conda: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda + sha256: adb2dde5b4f7da70ae81309cce6188ed3286ff280355cf1931b45d91164d2ad8 + md5: 5a5870a74432aa332f7d32180633ad05 + depends: + - python >=3.9 + license: MIT AND PSF-2.0 + purls: + - pkg:pypi/pkgutil-resolve-name?source=hash-mapping + size: 10693 + timestamp: 1733344619659 +- conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.7-pyh29332c3_0.conda + sha256: ae7d3e58224d53d6b59e1f5ac5809803bb1972f0ac4fb10cd9b8c87d4122d3e0 + md5: e57da6fe54bb3a5556cf36d199ff07d8 + depends: + - python >=3.9 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/platformdirs?source=compressed-mapping + size: 23291 + timestamp: 1742485085457 - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda sha256: 122433fc5318816b8c69283aaf267c73d87aa2d09ce39f64c9805c9a3b264819 md5: e9dcbce5f45f9ee500e728ae58b605b6 @@ -2498,8 +3795,83 @@ packages: - pkg:pypi/pluggy?source=hash-mapping size: 23595 timestamp: 1733222855563 -- pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - name: pycparser +- conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda + sha256: bc8f00d5155deb7b47702cb8370f233935704100dbc23e30747c161d1b6cf3ab + md5: 3e01e386307acc60b2f89af0b2e161aa + depends: + - python >=3.9 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/prometheus-client?source=hash-mapping + size: 49002 + timestamp: 1733327434163 +- conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.50-pyha770c72_0.conda + sha256: 0749c49a349bf55b8539ce5addce559b77592165da622944a51c630e94d97889 + md5: 7d823138f550b14ecae927a5ff3286de + depends: + - python >=3.9 + - wcwidth + constrains: + - prompt_toolkit 3.0.50 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/prompt-toolkit?source=hash-mapping + size: 271905 + timestamp: 1737453457168 +- conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-7.0.0-py312h66e93f0_0.conda + sha256: 158047d7a80e588c846437566d0df64cec5b0284c7184ceb4f3c540271406888 + md5: 8e30db4239508a538e4a3b3cdf5b9616 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/psutil?source=hash-mapping + size: 466219 + timestamp: 1740663246825 +- conda: https://conda.anaconda.org/conda-forge/win-64/psutil-7.0.0-py313ha7868ed_0.conda + sha256: d8e5d86e939d5f308c7922835a94458afb29d81c90b5d43c43a5537c9c7adbc1 + md5: 3cdf99cf98b01856af9f26c5d8036353 + depends: + - python >=3.13,<3.14.0a0 + - python_abi 3.13.* *_cp313 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/psutil?source=hash-mapping + size: 491314 + timestamp: 1740663777370 +- conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda + sha256: a7713dfe30faf17508ec359e0bc7e0983f5d94682492469bd462cdaae9c64d83 + md5: 7d9daffbb8d8e0af0f769dbbcd173a54 + depends: + - python >=3.9 + license: ISC + purls: + - pkg:pypi/ptyprocess?source=hash-mapping + size: 19457 + timestamp: 1733302371990 +- conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda + sha256: 71bd24600d14bb171a6321d523486f6a06f855e75e547fa0cb2a0953b02047f0 + md5: 3bfdfb8dbcdc4af1ae3f9a8eb3948f04 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pure-eval?source=hash-mapping + size: 16668 + timestamp: 1733569518868 +- pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl + name: pycparser version: '2.22' sha256: c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc requires_python: '>=3.8' @@ -2778,6 +4150,28 @@ packages: - pkg:pypi/python-dateutil?source=hash-mapping size: 222505 timestamp: 1733215763718 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda + sha256: 1b09a28093071c1874862422696429d0d35bd0b8420698003ac004746c5e82a2 + md5: 38e34d2d1d9dca4fb2b9a0a04f604e2c + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/fastjsonschema?source=hash-mapping + size: 226259 + timestamp: 1733236073335 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-json-logger-2.0.7-pyhd8ed1ab_0.conda + sha256: 4790787fe1f4e8da616edca4acf6a4f8ed4e7c6967aa31b920208fc8f95efcca + md5: a61bf9ec79426938ff785eb69dbb1960 + depends: + - python >=3.6 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/python-json-logger?source=hash-mapping + size: 13383 + timestamp: 1677079727691 - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.1-pyhd8ed1ab_0.conda sha256: 1597d6055d34e709ab8915091973552a0b8764c8032ede07c4e99670da029629 md5: 392c91c42edd569a7ec99ed8648f597a @@ -2903,6 +4297,112 @@ packages: - pkg:pypi/pytz?source=hash-mapping size: 186859 timestamp: 1738317649432 +- conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda + sha256: 8d2a8bf110cc1fc3df6904091dead158ba3e614d8402a83e51ed3a8aa93cdeb0 + md5: bc8e3267d44011051f2eb14d22fb0960 + depends: + - python >=3.9 + license: MIT + purls: + - pkg:pypi/pytz?source=compressed-mapping + size: 189015 + timestamp: 1742920947249 +- conda: https://conda.anaconda.org/conda-forge/win-64/pywin32-307-py313h5813708_3.conda + sha256: 0a68b324ea47ae720c62522c5d0bb5ea3e4987e1c5870d6490c7f954fbe14cbe + md5: 7113bd6cfe34e80d8211f7c019d14357 + depends: + - python >=3.13,<3.14.0a0 + - python_abi 3.13.* *_cp313 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: PSF-2.0 + license_family: PSF + purls: + - pkg:pypi/pywin32?source=hash-mapping + size: 6060096 + timestamp: 1728636763526 +- conda: https://conda.anaconda.org/conda-forge/win-64/pywinpty-2.0.15-py313h5813708_0.conda + sha256: 4210038442e3f34d67de9aeab2691fa2a6f80dc8c16ab77d5ecbb2b756e04ff0 + md5: cd1fadcdf82a423c2441a95435eeab3c + depends: + - python >=3.13,<3.14.0a0 + - python_abi 3.13.* *_cp313 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + - winpty + license: MIT + license_family: MIT + purls: + - pkg:pypi/pywinpty?source=hash-mapping + size: 217133 + timestamp: 1738661059040 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h178313f_2.conda + sha256: 159cba13a93b3fe084a1eb9bda0a07afc9148147647f0d437c3c3da60980503b + md5: cf2485f39740de96e2a7f2bb18ed2fee + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + - yaml >=0.2.5,<0.3.0a0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pyyaml?source=hash-mapping + size: 206903 + timestamp: 1737454910324 +- conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py313hb4c8b1a_2.conda + sha256: 5b496c96e48f495de41525cb1b603d0147f2079f88a8cf061aaf9e17a2fe1992 + md5: d14f685b5d204b023c641b188a8d0d7c + depends: + - python >=3.13,<3.14.0a0 + - python_abi 3.13.* *_cp313 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + - yaml >=0.2.5,<0.3.0a0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pyyaml?source=hash-mapping + size: 182783 + timestamp: 1737455202579 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.3.0-py312hbf22597_0.conda + sha256: aa96b9d13bc74f514ccbc3ad275d23bb837ec63894e6e7fb43786c7c41959bfd + md5: ec243006dd2b7dc72f1fba385e59f693 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libsodium >=1.0.20,<1.0.21.0a0 + - libstdcxx >=13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + - zeromq >=4.3.5,<4.4.0a0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pyzmq?source=hash-mapping + size: 381353 + timestamp: 1741805281237 +- conda: https://conda.anaconda.org/conda-forge/win-64/pyzmq-26.3.0-py313h2100fd5_0.conda + sha256: 899a8beb97f762a2c9326a43cda7434a7b2a9092fa259b2c004d7ff4b036c12a + md5: 6cfc56a59529694b4eb26ed194845523 + depends: + - libsodium >=1.0.20,<1.0.21.0a0 + - python >=3.13,<3.14.0a0 + - python_abi 3.13.* *_cp313 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + - zeromq >=4.3.5,<4.3.6.0a0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pyzmq?source=hash-mapping + size: 369704 + timestamp: 1741805714688 - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8c095d6_2.conda sha256: 2d6d0c026902561ed77cd646b5021aef2d4db22e57a5b0178dfc669231e06d2c md5: 283b96675859b20a825f8fa30f311446 @@ -2914,6 +4414,21 @@ packages: purls: [] size: 282480 timestamp: 1740379431762 +- conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.2-pyh29332c3_0.conda + sha256: e20909f474a6cece176dfc0dc1addac265deb5fa92ea90e975fbca48085b20c3 + md5: 9140f1c09dd5489549c6a33931b943c7 + depends: + - attrs >=22.2.0 + - python >=3.9 + - rpds-py >=0.7.0 + - typing_extensions >=4.4.0 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/referencing?source=hash-mapping + size: 51668 + timestamp: 1737836872415 - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda sha256: d701ca1136197aa121bbbe0e8c18db6b5c94acbd041c2b43c70e5ae104e1d8ad md5: a9b9368f3701a417eac9edbcae7cb737 @@ -2931,6 +4446,29 @@ packages: - pkg:pypi/requests?source=hash-mapping size: 58723 timestamp: 1733217126197 +- conda: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda + sha256: 2e4372f600490a6e0b3bac60717278448e323cab1c0fecd5f43f7c56535a99c5 + md5: 36de09a8d3e5d5e6f4ee63af49e59706 + depends: + - python >=3.9 + - six + license: MIT + license_family: MIT + purls: + - pkg:pypi/rfc3339-validator?source=hash-mapping + size: 10209 + timestamp: 1733600040800 +- conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-validator-0.1.1-pyh9f0ad1d_0.tar.bz2 + sha256: 2a5b495a1de0f60f24d8a74578ebc23b24aa53279b1ad583755f223097c41c37 + md5: 912a71cc01012ee38e6b90ddd561e36f + depends: + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/rfc3986-validator?source=hash-mapping + size: 7818 + timestamp: 1598024297745 - conda: https://conda.anaconda.org/conda-forge/noarch/roman-numerals-py-3.1.0-pyhd8ed1ab_0.conda sha256: 0116a9ca9bf3487e18979b58b2f280116dba55cb53475af7a6d835f7aa133db8 md5: 5f0f24f8032c2c1bb33f59b75974f5fc @@ -2941,6 +4479,38 @@ packages: - pkg:pypi/roman-numerals-py?source=hash-mapping size: 13348 timestamp: 1740240332327 +- conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.24.0-py312h3b7be25_0.conda + sha256: 10dad6a9d40e7c1856cb1f5f941ea06500610e13ee6ec4961fba59fccbaa0dc9 + md5: 5f5c19cbbd3526fad9c8ca0cca3e7346 + depends: + - python + - libgcc >=13 + - __glibc >=2.17,<3.0.a0 + - python_abi 3.12.* *_cp312 + constrains: + - __glibc >=2.17 + license: MIT + purls: + - pkg:pypi/rpds-py?source=hash-mapping + size: 394023 + timestamp: 1743037659894 +- conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.24.0-py313h54fc02f_0.conda + sha256: bcceb24e0462794507642caad40b4e0910942c5b70ba5e8640870157750bad5b + md5: 67eb9aea984cdc3ce949ba23402e8d89 + depends: + - python + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + - ucrt >=10.0.20348.0 + - python_abi 3.13.* *_cp313 + license: MIT + purls: + - pkg:pypi/rpds-py?source=hash-mapping + size: 255547 + timestamp: 1743037492141 - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.11.2-py312hf79aa60_0.conda sha256: 72e1934499126cb9a3a5aa00e535fc430617206f0ecd8f34f5afd6bdb572a6a8 md5: ce118d87ae26bd6204ac95aa7d7bd32e @@ -2973,6 +4543,31 @@ packages: - pkg:pypi/ruff?source=hash-mapping size: 7913132 timestamp: 1742504238312 +- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_1.conda + sha256: 00926652bbb8924e265caefdb1db100f86a479e8f1066efe395d5552dde54d02 + md5: 938c8de6b9de091997145b3bf25cdbf9 + depends: + - __linux + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/send2trash?source=hash-mapping + size: 22736 + timestamp: 1733322148326 +- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_1.conda + sha256: ba8b93df52e0d625177907852340d735026c81118ac197f61f1f5baea19071ad + md5: e6a4e906051565caf5fdae5b0415b654 + depends: + - __win + - python >=3.9 + - pywin32 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/send2trash?source=hash-mapping + size: 23359 + timestamp: 1733322590167 - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.2-pyhff2d567_0.conda sha256: 91d664ace7c22e787775069418daa9f232ee8bafdd0a6a080a5ed2395a6fa6b2 md5: 9bddfdbf4e061821a1a443f93223be61 @@ -3000,6 +4595,17 @@ packages: - pkg:pypi/six?source=hash-mapping size: 16385 timestamp: 1733381032766 +- conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda + sha256: c2248418c310bdd1719b186796ae50a8a77ce555228b6acd32768e2543a15012 + md5: bf7a226e58dfb8346c70df36065d86c9 + depends: + - python >=3.9 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/sniffio?source=hash-mapping + size: 15019 + timestamp: 1733244175724 - conda: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2 sha256: a0fd916633252d99efb6223b1050202841fa8d2d53dacca564b0ed77249d3228 md5: 4d22a9315e78c6827f806065957d566e @@ -3121,6 +4727,20 @@ packages: - pkg:pypi/sphinxcontrib-serializinghtml?source=hash-mapping size: 28669 timestamp: 1733750596111 +- conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + sha256: 570da295d421661af487f1595045760526964f41471021056e993e73089e9c41 + md5: b1b505328da7a6b246787df4b5a49fbc + depends: + - asttokens + - executing + - pure_eval + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/stack-data?source=hash-mapping + size: 26988 + timestamp: 1733569565672 - conda: https://conda.anaconda.org/conda-forge/win-64/tbb-2021.13.0-h62715c5_1.conda sha256: 03cc5442046485b03dd1120d0f49d35a7e522930a2ab82f275e938e17b07b302 md5: 9190dd0a23d925f7602f9628b3aed511 @@ -3134,6 +4754,46 @@ packages: purls: [] size: 151460 timestamp: 1732982860332 +- conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh0d859eb_0.conda + sha256: b300557c0382478cf661ddb520263508e4b3b5871b471410450ef2846e8c352c + md5: efba281bbdae5f6b0a1d53c6d4a97c93 + depends: + - __linux + - ptyprocess + - python >=3.8 + - tornado >=6.1.0 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/terminado?source=hash-mapping + size: 22452 + timestamp: 1710262728753 +- conda: https://conda.anaconda.org/conda-forge/noarch/terminado-0.18.1-pyh5737063_0.conda + sha256: 8cb078291fd7882904e3de594d299c8de16dd3af7405787fce6919a385cfc238 + md5: 4abd500577430a942a995fd0d09b76a2 + depends: + - __win + - python >=3.8 + - pywinpty >=1.1.0 + - tornado >=6.1.0 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/terminado?source=hash-mapping + size: 22883 + timestamp: 1710262943966 +- conda: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda + sha256: cad582d6f978276522f84bd209a5ddac824742fe2d452af6acf900f8650a73a2 + md5: f1acf5fdefa8300de697982bcb1761c9 + depends: + - python >=3.5 + - webencodings >=0.4 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/tinycss2?source=hash-mapping + size: 28285 + timestamp: 1729802975370 - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e md5: d453b98d9c83e71da0741bb0ff4d76bc @@ -3179,6 +4839,56 @@ packages: - pkg:pypi/tomli?source=hash-mapping size: 19167 timestamp: 1733256819729 +- conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.conda + sha256: 062a3a3a37fa8615ce57929ba7e982c76f5a5810bcebd435950f6d6c4147c310 + md5: e417822cb989e80a0d2b1b576fdd1657 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/tornado?source=hash-mapping + size: 840414 + timestamp: 1732616043734 +- conda: https://conda.anaconda.org/conda-forge/win-64/tornado-6.4.2-py313ha7868ed_0.conda + sha256: 062e8b77b825463fc59f373d4033fae7cf65a4170e761814bcbf25cd0627bd1d + md5: 3d63fe6a4757924a085ab10196049854 + depends: + - python >=3.13,<3.14.0a0 + - python_abi 3.13.* *_cp313 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/tornado?source=hash-mapping + size: 865881 + timestamp: 1732616355868 +- conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda + sha256: f39a5620c6e8e9e98357507262a7869de2ae8cc07da8b7f84e517c9fd6c2b959 + md5: 019a7385be9af33791c989871317e1ed + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/traitlets?source=hash-mapping + size: 110051 + timestamp: 1733367480074 +- conda: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda + sha256: 8b98cd9464837174ab58aaa912fc95d5831879864676650a383994033533b8d1 + md5: 1dbc4a115e2ad9fb7f9d5b68397f66f9 + depends: + - python >=3.9 + license: Apache-2.0 AND MIT + purls: + - pkg:pypi/types-python-dateutil?source=hash-mapping + size: 22104 + timestamp: 1733612458611 - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda noarch: python sha256: c8e9c1c467b5f960b627d7adc1c65fece8e929a3de89967e91ef0f726422fd32 @@ -3190,6 +4900,15 @@ packages: purls: [] size: 10075 timestamp: 1733188758872 +- conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.13.0-h9fa5a19_1.conda + sha256: 4dc1002493f05bf4106e09f0de6df57060c9aab97ad709392ab544ceb62faadd + md5: 3fbcc45b908040dca030d3f78ed9a212 + depends: + - typing_extensions ==4.13.0 pyh29332c3_1 + license: PSF-2.0 + purls: [] + size: 89631 + timestamp: 1743201626659 - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda sha256: 337be7af5af8b2817f115b3b68870208b30c31d3439bec07bfb2d8f4823e3568 md5: d17f13df8b65464ca316cbc000a3cb64 @@ -3201,11 +4920,38 @@ packages: - pkg:pypi/typing-extensions?source=hash-mapping size: 39637 timestamp: 1733188758212 +- conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.13.0-pyh29332c3_1.conda + sha256: 18eb76e8f19336ecc9733c02901b30503cdc4c1d8de94f7da7419f89b3ff4c2f + md5: 4c446320a86cc5d48e3b80e332d6ebd7 + depends: + - python >=3.9 + - python + license: PSF-2.0 + purls: + - pkg:pypi/typing-extensions?source=hash-mapping + size: 52077 + timestamp: 1743201626659 +- conda: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda + sha256: 3088d5d873411a56bf988eee774559335749aed6f6c28e07bf933256afb9eb6c + md5: f6d7aa696c67756a650e91e15e88223c + depends: + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/typing-utils?source=hash-mapping + size: 15183 + timestamp: 1733331395943 - pypi: https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl name: tzdata version: '2025.1' sha256: 7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639 requires_python: '>=2' +- pypi: https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl + name: tzdata + version: '2025.2' + sha256: 1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8 + requires_python: '>=2' - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda sha256: c4b1ae8a2931fe9b274c44af29c5475a85b37693999f8c792dad0f8c6734b1de md5: dbcace4706afdfb7eb891f7b37d07c04 @@ -3222,6 +4968,17 @@ packages: purls: [] size: 559710 timestamp: 1728377334097 +- conda: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda + sha256: e0eb6c8daf892b3056f08416a96d68b0a358b7c46b99c8a50481b22631a4dfc0 + md5: e7cb0f5745e4c5035a460248334af7eb + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/uri-template?source=hash-mapping + size: 23990 + timestamp: 1733323714454 - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda sha256: 114919ffa80c328127dab9c8e7a38f9d563c617691fb81fccb11c1e86763727e md5: 32674f8dbfb7b26410ed580dd3c10a29 @@ -3268,6 +5025,60 @@ packages: requires_dist: - tomli ; python_full_version < '3.11' and extra == 'toml' requires_python: '>=3.7' +- conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.42.34438-h7142326_24.conda + sha256: a7104d3d605d191c8ee8d85d4175df3630d61830583494a5d1e62cd9f1260420 + md5: 1dd2e838eb13190ae1f1e2760c036fdc + depends: + - vc14_runtime >=14.42.34438 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 17474 + timestamp: 1741043406612 +- conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda + sha256: f21e63e8f7346f9074fd00ca3b079bd3d2fa4d71f1f89d5b6934bf31446dc2a5 + md5: b68980f2495d096e71c7fd9d7ccf63e6 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/wcwidth?source=hash-mapping + size: 32581 + timestamp: 1733231433877 +- conda: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda + sha256: 08315dc2e61766a39219b2d82685fc25a56b2817acf84d5b390176080eaacf99 + md5: b49f7b291e15494aafb0a7d74806f337 + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/webcolors?source=hash-mapping + size: 18431 + timestamp: 1733359823938 +- conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda + sha256: 19ff205e138bb056a46f9e3839935a2e60bd1cf01c8241a5e172a422fed4f9c6 + md5: 2841eb5bfc75ce15e9a0054b98dcd64d + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/webencodings?source=hash-mapping + size: 15496 + timestamp: 1733236131358 +- conda: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda + sha256: 1dd84764424ffc82030c19ad70607e6f9e3b9cb8e633970766d697185652053e + md5: 84f8f77f0a9c6ef401ee96611745da8f + depends: + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/websocket-client?source=hash-mapping + size: 46718 + timestamp: 1733157432924 - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda sha256: 1b34021e815ff89a4d902d879c3bd2040bc1bd6169b32e9427497fa05c55f1ce md5: 75cb7132eb58d97896e173ef12ac9986 @@ -3290,6 +5101,13 @@ packages: - pkg:pypi/win-inet-pton?source=hash-mapping size: 9555 timestamp: 1733130678956 +- conda: https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2 + sha256: 9df10c5b607dd30e05ba08cbd940009305c75db242476f4e845ea06008b0a283 + md5: 1cee351bf20b830d991dbe0bc8cd7dfe + license: MIT + license_family: MIT + purls: [] + size: 1176306 - pypi: https://files.pythonhosted.org/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl name: wrapt version: 1.17.2 @@ -3320,6 +5138,66 @@ packages: version: 1.17.2 sha256: 8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc requires_python: '>=3.8' +- conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 + sha256: a4e34c710eeb26945bdbdaba82d3d74f60a78f54a874ec10d373811a5d217535 + md5: 4cb3ad778ec2d5a7acbdf254eb1c42ae + depends: + - libgcc-ng >=9.4.0 + license: MIT + license_family: MIT + purls: [] + size: 89141 + timestamp: 1641346969816 +- conda: https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2 + sha256: 4e2246383003acbad9682c7c63178e2e715ad0eb84f03a8df1fbfba455dfedc5 + md5: adbfb9f45d1004a26763652246a33764 + depends: + - vc >=14.1,<15.0a0 + - vs2015_runtime >=14.16.27012 + license: MIT + license_family: MIT + purls: [] + size: 63274 + timestamp: 1641347623319 +- conda: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda + sha256: a4dc72c96848f764bb5a5176aa93dd1e9b9e52804137b99daeebba277b31ea10 + md5: 3947a35e916fcc6b9825449affbf4214 + depends: + - __glibc >=2.17,<3.0.a0 + - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=13 + - libsodium >=1.0.20,<1.0.21.0a0 + - libstdcxx >=13 + license: MPL-2.0 + license_family: MOZILLA + purls: [] + size: 335400 + timestamp: 1731585026517 +- conda: https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.5-ha9f60a1_7.conda + sha256: 15cc8e2162d0a33ffeb3f7b7c7883fd830c54a4b1be6a4b8c7ee1f4fef0088fb + md5: e03f2c245a5ee6055752465519363b1c + depends: + - krb5 >=1.21.3,<1.22.0a0 + - libsodium >=1.0.20,<1.0.21.0a0 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: MPL-2.0 + license_family: MOZILLA + purls: [] + size: 2527503 + timestamp: 1731585151036 +- conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda + sha256: 567c04f124525c97a096b65769834b7acb047db24b15a56888a322bf3966c3e1 + md5: 0c3cc595284c5e8f0f9900a9b228a332 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/zipp?source=hash-mapping + size: 21809 + timestamp: 1732827613585 - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py311h9ecbd09_1.conda sha256: 1a824220227f356f35acec5ff6a4418b1ccd0238fd752ceebeb04a0bd37acf0f md5: 6d229edd907b6bb39961b74e3d52de9c diff --git a/pyproject.toml b/pyproject.toml index 0854c044..a5b197d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,6 +96,9 @@ python = "3.13.*" [tool.pixi.feature.py313.tasks] test313 = "pytest" +[tool.pixi.feature.debug.dependencies] +jupyterlab = "*" + [tool.pixi.environments] build = { features = ["build"], no-default-feature = true } test = { features = ["test"], solve-group = "default" } @@ -104,6 +107,7 @@ docs = ["docs"] py311 = ["py311", "test"] py313 = ["py313", "test"] py312 = ["py312", "test"] +debug = { features = ["debug"], solve-group = "default" } [tool.pytest.ini_options] minversion = "6.0" From a19147ed3f5e5bae1b26a8d2433e66a20a443e77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 31 Mar 2025 21:58:55 +0200 Subject: [PATCH 05/28] fix: import fake SDK directly during type checking --- PIconnect/AFSDK.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/PIconnect/AFSDK.py b/PIconnect/AFSDK.py index 33e560b0..b279c252 100644 --- a/PIconnect/AFSDK.py +++ b/PIconnect/AFSDK.py @@ -70,8 +70,8 @@ def __fallback(): if typing.TYPE_CHECKING: # This branch is separate from previous one as otherwise no typechecking takes place # on the main logic. - _af, _System, _AF_SDK_version = __fallback() - -AF = _af -System = _System -AF_SDK_VERSION = _AF_SDK_version + from ._typing import AF, AF_SDK_VERSION, System +else: + AF = _af + System = _System + AF_SDK_VERSION = _AF_SDK_version From 31c0dfe94f84034671b2ea67c18f01ebcb601881 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 31 Mar 2025 22:04:05 +0200 Subject: [PATCH 06/28] feat: expand type hints for AF SDK Primarily added the Search module --- PIconnect/_typing/AF.py | 20 ++++++------------ PIconnect/_typing/Asset.py | 29 +++++++++++++++++++++----- PIconnect/_typing/Database.py | 12 +++++++++++ PIconnect/_typing/Search.py | 38 +++++++++++++++++++++++++++++++++++ 4 files changed, 80 insertions(+), 19 deletions(-) create mode 100644 PIconnect/_typing/Database.py create mode 100644 PIconnect/_typing/Search.py diff --git a/PIconnect/_typing/AF.py b/PIconnect/_typing/AF.py index aaa89a93..e2c15b71 100644 --- a/PIconnect/_typing/AF.py +++ b/PIconnect/_typing/AF.py @@ -2,13 +2,15 @@ from collections.abc import Iterator -from . import PI, Asset, Data, EventFrame, Time, UnitsOfMeasure +from . import PI, Asset, Data, EventFrame, Search, Time, UnitsOfMeasure +from .Database import AFDatabase __all__ = [ "Asset", "Data", "EventFrame", "PI", + "Search", "Time", "UnitsOfMeasure", "AFDatabase", @@ -28,17 +30,6 @@ def __init__(self, elements: list[AFCategory]) -> None: self._values = elements -class AFDatabase: - """Mock class of the AF.AFDatabase class.""" - - def __init__(self, name: str) -> None: - self.Name = name - self.Elements = Asset.AFElements( - [Asset.AFElement("TestElement"), Asset.AFElement("BaseElement")] - ) - self.Tables = Asset.AFTables([Asset.AFTable("TestTable")]) - - class PISystem: """Mock class of the AF.PISystem class.""" @@ -46,10 +37,11 @@ class InternalDatabases: """Mock class for the AF.PISystem.Databases property.""" def __init__(self) -> None: - self.DefaultDatabase = AFDatabase("TestDatabase") + self.DefaultDatabase: AFDatabase | None = AFDatabase("TestDatabase") def __iter__(self) -> Iterator[AFDatabase]: - return (x for x in [self.DefaultDatabase]) + if self.DefaultDatabase is not None: + yield from [self.DefaultDatabase] def __init__(self, name: str) -> None: self.Name = name diff --git a/PIconnect/_typing/Asset.py b/PIconnect/_typing/Asset.py index fb2cf445..57f0daf5 100644 --- a/PIconnect/_typing/Asset.py +++ b/PIconnect/_typing/Asset.py @@ -23,13 +23,19 @@ class AFAttribute: - def __init__(self, name: str, parent: "AFAttribute | None" = None) -> None: + def __init__( + self, + name: str, + parent: "AFAttribute | None" = None, + _element: "AFBaseElement | None" = None, + ) -> None: self.Attributes: AFAttributes + self.Element: "AFBaseElement" = AFBaseElement(name) if _element is None else _element if parent is None: self.Attributes = AFAttributes( [ - AFAttribute("Attribute1", parent=self), - AFAttribute("Attribute2", parent=self), + AFAttribute("Attribute1", parent=self, _element=self.Element), + AFAttribute("Attribute2", parent=self, _element=self.Element), ] ) self.Data: Data.AFData @@ -44,6 +50,11 @@ def GetValue() -> AFValue: """Stub for getting a value.""" return AFValue(0) + @staticmethod + def GetPath() -> str: + """Stub for getting the path.""" + return "Path\\to\\the|attribute" + class AFAttributes(list[AFAttribute]): def __init__(self, elements: list[AFAttribute]) -> None: @@ -58,8 +69,8 @@ class AFBaseElement: def __init__(self, name: str, parent: "AFElement | None" = None) -> None: self.Attributes = AFAttributes( [ - AFAttribute("Attribute1"), - AFAttribute("Attribute2"), + AFAttribute("Attribute1", _element=self), + AFAttribute("Attribute2", _element=self), ] ) self.Categories: AF.AFCategories @@ -100,6 +111,14 @@ class AFElementTemplate: """Mock class of the AF.Asset.AFElementTemplate class.""" +class AFEnumerationValue: + """Mock class of the AF.Asset.AFEnumerationValue class.""" + + def __init__(self, name: str, value: int) -> None: + self.Name = name + self.Value = value + + class AFDataReference: from . import PI diff --git a/PIconnect/_typing/Database.py b/PIconnect/_typing/Database.py new file mode 100644 index 00000000..af4112f7 --- /dev/null +++ b/PIconnect/_typing/Database.py @@ -0,0 +1,12 @@ +from . import Asset + + +class AFDatabase: + """Mock class of the AF.AFDatabase class.""" + + def __init__(self, name: str) -> None: + self.Name = name + self.Elements = Asset.AFElements( + [Asset.AFElement("TestElement"), Asset.AFElement("BaseElement")] + ) + self.Tables = Asset.AFTables([Asset.AFTable("TestTable")]) diff --git a/PIconnect/_typing/Search.py b/PIconnect/_typing/Search.py new file mode 100644 index 00000000..c042e250 --- /dev/null +++ b/PIconnect/_typing/Search.py @@ -0,0 +1,38 @@ +"""The Search namespace provides query based searches.""" + +from collections.abc import Iterable +from typing import Generic, TypeVar + +from .Asset import AFAttribute, AFElement +from .Database import AFDatabase + +_AFSearchable = TypeVar( + "_AFSearchable", +) + + +class AFSearch(Generic[_AFSearchable]): + """Base class for AFSearch.""" + + def __init__(self, database: AFDatabase, name: str, query: str) -> None: + self.Database = database + self.SearchName = name + self.TokenCollection = query + + def GetTotalCount(self) -> int: + """Return the total count of the search results.""" + return len(self.TokenCollection) + + def FindObjects(self) -> Iterable[_AFSearchable]: + """Return the search results.""" + obs: list[_AFSearchable] = [] + for item in obs: + yield item + + +class AFAttributeSearch(AFSearch[AFAttribute]): + """Search for AF attributes.""" + + +class AFElementSearch(AFSearch[AFElement]): + """Search for AF elements.""" From 31201ad384699c8a4eef4fb6bfea1f690be09f6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 31 Mar 2025 22:17:18 +0200 Subject: [PATCH 07/28] feat!: rename _time to Time and import AFSDK as SDK Renamed Time module for parallel with module in the SDK. Import the SDK as SDK to prevent name clash with new AF module to mirror the AF module in the SDK. --- PIconnect/PIAF.py | 38 ++++++++++++----------- PIconnect/PIAFAttribute.py | 4 +-- PIconnect/PIAFBase.py | 6 ++-- PIconnect/PIData.py | 54 ++++++++++++++++----------------- PIconnect/PIPoint.py | 6 ++-- PIconnect/{_time.py => Time.py} | 26 ++++++++++++++-- PIconnect/__init__.py | 2 +- 7 files changed, 80 insertions(+), 56 deletions(-) rename PIconnect/{_time.py => Time.py} (75%) diff --git a/PIconnect/PIAF.py b/PIconnect/PIAF.py index 7466bb1c..21b90958 100644 --- a/PIconnect/PIAF.py +++ b/PIconnect/PIAF.py @@ -6,7 +6,8 @@ import pandas as pd -from PIconnect import AF, PIAFAttribute, PIAFBase, PIConsts, _time +import PIconnect.AFSDK as SDK +from PIconnect import PIAFBase, PIConsts, Search, Time from PIconnect._utils import InitialisationWarning from PIconnect.AFSDK import System @@ -17,20 +18,20 @@ class PIAFServer: """Reference to a PI AF server and its databases.""" - server: AF.PISystem - databases: dict[str, AF.AFDatabase] = dataclasses.field(default_factory=dict) + server: SDK.AF.PISystem + databases: dict[str, SDK.AF.AFDatabase] = dataclasses.field(default_factory=dict) - def __getitem__(self, attr: str) -> AF.PISystem | dict[str, AF.AFDatabase]: + def __getitem__(self, attr: str) -> SDK.AF.PISystem | dict[str, SDK.AF.AFDatabase]: """Allow access to attributes as if they were dictionary items.""" return getattr(self, attr) -ServerSpec = dict[str, AF.PISystem | dict[str, AF.AFDatabase]] +ServerSpec = dict[str, SDK.AF.PISystem | dict[str, SDK.AF.AFDatabase]] def _lookup_servers() -> dict[str, ServerSpec]: servers: dict[str, PIAFServer] = {} - for s in AF.PISystems(): + for s in SDK.AF.PISystems(): try: servers[s.Name] = server = PIAFServer(s) for d in s.Databases: @@ -61,15 +62,15 @@ def _lookup_servers() -> dict[str, ServerSpec]: def _lookup_default_server() -> ServerSpec | None: servers = _lookup_servers() - if AF.PISystems().DefaultPISystem: - return servers[AF.PISystems().DefaultPISystem.Name] + if SDK.AF.PISystems().DefaultPISystem: + return servers[SDK.AF.PISystems().DefaultPISystem.Name] elif len(servers) > 0: return servers[list(_lookup_servers())[0]] else: return None -class PIAFDatabase(object): +class PIAFDatabase: """Context manager for connections to the PI Asset Framework database.""" version = "0.3.0" @@ -79,8 +80,9 @@ class PIAFDatabase(object): def __init__(self, server: str | None = None, database: str | None = None) -> None: server_spec = self._initialise_server(server) - self.server: AF.PISystem = server_spec["server"] # type: ignore - self.database: AF.AFDatabase = self._initialise_database(server_spec, database) + self.server: SDK.AF.PISystem = server_spec["server"] # type: ignore + self.database: SDK.AF.AFDatabase = self._initialise_database(server_spec, database) + self.search = Search.Search(self.database) def _initialise_server(self, server: str | None) -> ServerSpec: if server is None: @@ -99,7 +101,9 @@ def _initialise_server(self, server: str | None) -> ServerSpec: return self.servers[server] - def _initialise_database(self, server: ServerSpec, database: str | None) -> AF.AFDatabase: + def _initialise_database( + self, server: ServerSpec, database: str | None + ) -> SDK.AF.AFDatabase: def default_db(): default = self.server.Databases.DefaultDatabase if default is None: @@ -109,7 +113,7 @@ def default_db(): if database is None: return default_db() - databases = cast(dict[str, AF.AFDatabase], server["databases"]) + databases = cast(dict[str, SDK.AF.AFDatabase], server["databases"]) if database not in databases: message = 'Database "{database}" not found, using the default database.' warnings.warn( @@ -185,18 +189,18 @@ def search(self, query: str | list[str]) -> list[PIAFAttribute.PIAFAttribute]: def event_frames( self, - start_time: _time.TimeLike = "", + start_time: Time.TimeLike = "", start_index: int = 0, max_count: int = 1000, search_mode: PIConsts.EventFrameSearchMode = _DEFAULT_EVENTFRAME_SEARCH_MODE, search_full_hierarchy: bool = False, ) -> dict[str, "PIAFEventFrame"]: """Search for event frames in the database.""" - _start_time = _time.to_af_time(start_time) - _search_mode = AF.EventFrame.AFEventFrameSearchMode(int(search_mode)) + _start_time = Time.to_af_time(start_time) + _search_mode = SDK.AF.EventFrame.AFEventFrameSearchMode(int(search_mode)) return { frame.Name: PIAFEventFrame(frame) - for frame in AF.EventFrame.AFEventFrame.FindEventFrames( + for frame in SDK.AF.EventFrame.AFEventFrame.FindEventFrames( self.database, None, _start_time, diff --git a/PIconnect/PIAFAttribute.py b/PIconnect/PIAFAttribute.py index 11cf6f41..3fbfa452 100644 --- a/PIconnect/PIAFAttribute.py +++ b/PIconnect/PIAFAttribute.py @@ -4,7 +4,7 @@ import datetime from typing import Any -from PIconnect import AF, PIData, PIPoint, _time +from PIconnect import AF, PIData, PIPoint, Time from ._typing import AF as _AFtyping @@ -75,7 +75,7 @@ def description(self) -> str: @property def last_update(self) -> datetime.datetime: """Return the time at which the current_value was last updated.""" - return _time.timestamp_to_index(self.attribute.GetValue().Timestamp.UtcTime) + return Time.timestamp_to_index(self.attribute.GetValue().Timestamp.UtcTime) @property def units_of_measurement(self) -> str: diff --git a/PIconnect/PIAFBase.py b/PIconnect/PIAFBase.py index 87a2ece3..a47f9818 100644 --- a/PIconnect/PIAFBase.py +++ b/PIconnect/PIAFBase.py @@ -2,10 +2,10 @@ from typing import Generic, TypeVar +import PIconnect.AFSDK as SDK import PIconnect.PIAFAttribute as PIattr -from PIconnect import AF -ElementType = TypeVar("ElementType", bound=AF.Asset.AFBaseElement) +ElementType = TypeVar("ElementType", bound=SDK.AF.Asset.AFBaseElement) class PIAFBaseElement(Generic[ElementType]): @@ -31,7 +31,7 @@ def attributes(self) -> dict[str, PIattr.PIAFAttribute]: return {a.Name: PIattr.PIAFAttribute(self.element, a) for a in self.element.Attributes} @property - def categories(self) -> AF.AFCategories: + def categories(self) -> SDK.AF.AFCategories: """Return the categories of the current element.""" return self.element.Categories diff --git a/PIconnect/PIData.py b/PIconnect/PIData.py index 02b78f71..8d5504d5 100644 --- a/PIconnect/PIData.py +++ b/PIconnect/PIData.py @@ -7,7 +7,7 @@ import pandas as pd import PIconnect._typing.AF as _AFtyping -from PIconnect import AF, PIConsts, _time +from PIconnect import AF, PIConsts, Time __all__ = [ "PISeries", @@ -80,8 +80,8 @@ def _current_value(self) -> Any: def filtered_summaries( self, - start_time: _time.TimeLike, - end_time: _time.TimeLike, + start_time: Time.TimeLike, + end_time: Time.TimeLike, interval: str, filter_expression: str, summary_types: PIConsts.SummaryType, @@ -132,7 +132,7 @@ def filtered_summaries( pandas.DataFrame: Dataframe with the unique timestamps as row index and the summary name as column name. """ - time_range = _time.to_af_time_range(start_time, end_time) + time_range = Time.to_af_time_range(start_time, end_time) _interval = AF.Time.AFTimeSpan.Parse(interval) _filter_expression = self._normalize_filter_expression(filter_expression) _summary_types = AF.Data.AFSummaryTypes(int(summary_types)) @@ -155,7 +155,7 @@ def filtered_summaries( key = PIConsts.SummaryType(int(summary.Key)).name timestamps, values = zip( *[ - (_time.timestamp_to_index(value.Timestamp.UtcTime), value.Value) + (Time.timestamp_to_index(value.Timestamp.UtcTime), value.Value) for value in summary.Value ], strict=True, @@ -179,7 +179,7 @@ def _filtered_summaries( ) -> _AFtyping.Data.SummariesDict: pass - def interpolated_value(self, time: _time.TimeLike) -> PISeries: + def interpolated_value(self, time: Time.TimeLike) -> PISeries: """Return a PISeries with an interpolated value at the given time. Parameters @@ -197,7 +197,7 @@ def interpolated_value(self, time: _time.TimeLike) -> PISeries: PISeries: A PISeries with a single row, with the corresponding time as the index """ - from . import _time as time_module + from . import Time as time_module _time = time_module.to_af_time(time) pivalue = self._interpolated_value(_time) @@ -214,8 +214,8 @@ def _interpolated_value(self, time: AF.Time.AFTime) -> AF.Asset.AFValue: def interpolated_values( self, - start_time: _time.TimeLike, - end_time: _time.TimeLike, + start_time: Time.TimeLike, + end_time: Time.TimeLike, interval: str, filter_expression: str = "", ) -> PISeries: @@ -254,7 +254,7 @@ def interpolated_values( ------- PISeries: Timeseries of the values returned by the SDK """ - time_range = _time.to_af_time_range(start_time, end_time) + time_range = Time.to_af_time_range(start_time, end_time) _interval = AF.Time.AFTimeSpan.Parse(interval) _filter_expression = self._normalize_filter_expression(filter_expression) pivalues = self._interpolated_values(time_range, _interval, _filter_expression) @@ -262,7 +262,7 @@ def interpolated_values( timestamps: list[datetime.datetime] = [] values: list[Any] = [] for value in pivalues: - timestamps.append(_time.timestamp_to_index(value.Timestamp.UtcTime)) + timestamps.append(Time.timestamp_to_index(value.Timestamp.UtcTime)) values.append(value.Value) return PISeries( # type: ignore tag=self.name, @@ -291,7 +291,7 @@ def _normalize_filter_expression(self, filter_expression: str) -> str: def recorded_value( self, - time: _time.TimeLike, + time: Time.TimeLike, retrieval_mode: PIConsts.RetrievalMode = PIConsts.RetrievalMode.AUTO, ) -> PISeries: """Return a PISeries with the recorded value at or close to the given time. @@ -310,7 +310,7 @@ def recorded_value( PISeries: A PISeries with a single row, with the corresponding time as the index """ - from . import _time as time_module + from . import Time as time_module _time = time_module.to_af_time(time) _retrieval_mode = AF.Data.AFRetrievalMode(int(retrieval_mode)) @@ -330,8 +330,8 @@ def _recorded_value( def recorded_values( self, - start_time: _time.TimeLike, - end_time: _time.TimeLike, + start_time: Time.TimeLike, + end_time: Time.TimeLike, boundary_type: str = "inside", filter_expression: str = "", ): @@ -382,7 +382,7 @@ def recorded_values( ValueError: If the provided `boundary_type` is not a valid key a `ValueError` is raised. """ - time_range = _time.to_af_time_range(start_time, end_time) + time_range = Time.to_af_time_range(start_time, end_time) _boundary_type = self.__boundary_types.get(boundary_type.lower()) if _boundary_type is None: raise ValueError( @@ -396,7 +396,7 @@ def recorded_values( timestamps: list[datetime.datetime] = [] values: list[Any] = [] for value in pivalues: - timestamps.append(_time.timestamp_to_index(value.Timestamp.UtcTime)) + timestamps.append(Time.timestamp_to_index(value.Timestamp.UtcTime)) values.append(value.Value) return PISeries( # type: ignore tag=self.name, @@ -422,8 +422,8 @@ def _recorded_values( def summary( self, - start_time: _time.TimeLike, - end_time: _time.TimeLike, + start_time: Time.TimeLike, + end_time: Time.TimeLike, summary_types: PIConsts.SummaryType, calculation_basis: PIConsts.CalculationBasis = PIConsts.CalculationBasis.TIME_WEIGHTED, time_type: PIConsts.TimestampCalculation = PIConsts.TimestampCalculation.AUTO, @@ -456,7 +456,7 @@ def summary( pandas.DataFrame: Dataframe with the unique timestamps as row index and the summary name as column name. """ - time_range = _time.to_af_time_range(start_time, end_time) + time_range = Time.to_af_time_range(start_time, end_time) _summary_types = AF.Data.AFSummaryTypes(int(summary_types)) _calculation_basis = AF.Data.AFCalculationBasis(int(calculation_basis)) _time_type = AF.Data.AFTimestampCalculation(int(time_type)) @@ -465,7 +465,7 @@ def summary( for summary in pivalues: key = PIConsts.SummaryType(int(summary.Key)).name value = summary.Value - timestamp = _time.timestamp_to_index(value.Timestamp.UtcTime) + timestamp = Time.timestamp_to_index(value.Timestamp.UtcTime) value = value.Value df = df.join( # type: ignore pd.DataFrame(data={key: value}, index=[timestamp]), how="outer" @@ -484,8 +484,8 @@ def _summary( def summaries( self, - start_time: _time.TimeLike, - end_time: _time.TimeLike, + start_time: Time.TimeLike, + end_time: Time.TimeLike, interval: str, summary_types: PIConsts.SummaryType, calculation_basis: PIConsts.CalculationBasis = PIConsts.CalculationBasis.TIME_WEIGHTED, @@ -522,7 +522,7 @@ def summaries( pandas.DataFrame: Dataframe with the unique timestamps as row index and the summary name as column name. """ - time_range = _time.to_af_time_range(start_time, end_time) + time_range = Time.to_af_time_range(start_time, end_time) _interval = AF.Time.AFTimeSpan.Parse(interval) _summary_types = AF.Data.AFSummaryTypes(int(summary_types)) _calculation_basis = AF.Data.AFCalculationBasis(int(calculation_basis)) @@ -535,7 +535,7 @@ def summaries( key = PIConsts.SummaryType(int(summary.Key)).name timestamps, values = zip( *[ - (_time.timestamp_to_index(value.Timestamp.UtcTime), value.Value) + (Time.timestamp_to_index(value.Timestamp.UtcTime), value.Value) for value in summary.Value ], strict=True, @@ -565,7 +565,7 @@ def units_of_measurement(self) -> str | None: def update_value( self, value: Any, - time: _time.TimeLike | None = None, + time: Time.TimeLike | None = None, update_mode: PIConsts.UpdateMode = PIConsts.UpdateMode.NO_REPLACE, buffer_mode: PIConsts.BufferMode = PIConsts.BufferMode.BUFFER_IF_POSSIBLE, ) -> None: @@ -580,7 +580,7 @@ def update_value( You can combine update_mode and time to change already stored value. """ - from . import _time as time_module + from . import Time as time_module if time is not None: _value = AF.Asset.AFValue(value, time_module.to_af_time(time)) diff --git a/PIconnect/PIPoint.py b/PIconnect/PIPoint.py index 2d9bf4d8..f9cacf6f 100644 --- a/PIconnect/PIPoint.py +++ b/PIconnect/PIPoint.py @@ -3,7 +3,7 @@ from typing import Any import PIconnect._typing.AF as _AFtyping -from PIconnect import AF, PIData, _time +from PIconnect import AF, PIData, Time class PIPoint(PIData.PISeriesContainer): @@ -33,7 +33,7 @@ def __repr__(self): @property def created(self): """Return the creation datetime of a point.""" - return _time.timestamp_to_index(self.raw_attributes["creationdate"]) + return Time.timestamp_to_index(self.raw_attributes["creationdate"]) @property def description(self): @@ -48,7 +48,7 @@ def description(self): @property def last_update(self): """Return the time at which the last value for this PI Point was recorded.""" - return _time.timestamp_to_index(self.pi_point.CurrentValue().Timestamp.UtcTime) + return Time.timestamp_to_index(self.pi_point.CurrentValue().Timestamp.UtcTime) @property def name(self) -> str: diff --git a/PIconnect/_time.py b/PIconnect/Time.py similarity index 75% rename from PIconnect/_time.py rename to PIconnect/Time.py index 6355f2d6..7159c506 100644 --- a/PIconnect/_time.py +++ b/PIconnect/Time.py @@ -1,13 +1,15 @@ """Time related functions and classes.""" -# pyright: strict import datetime import zoneinfo +import pandas as pd # type: ignore + from PIconnect import AF, PIConfig from PIconnect.AFSDK import System TimeLike = str | datetime.datetime +IntervalLike = str | datetime.timedelta | pd.Timedelta def to_af_time_range(start_time: TimeLike, end_time: TimeLike) -> AF.Time.AFTimeRange: @@ -27,7 +29,7 @@ def to_af_time_range(start_time: TimeLike, end_time: TimeLike) -> AF.Time.AFTime Returns ------- - :afsdk:`AF.Time.AFTimeRange `: + :afsdk:`AF.Time.AFTimeRange `: Time range covered by the start and end time. """ if isinstance(start_time, datetime.datetime): @@ -47,7 +49,7 @@ def to_af_time(time: TimeLike) -> AF.Time.AFTime: Returns ------- - :afsdk:`AF.Time.AFTime `: + :afsdk:`AF.Time.AFTime `: AFTime version of time. """ if isinstance(time, datetime.datetime): @@ -56,6 +58,24 @@ def to_af_time(time: TimeLike) -> AF.Time.AFTime: return AF.Time.AFTime(time) +def to_af_time_span(interval: IntervalLike) -> AF.Time.AFTimeSpan: + """Convert a time interval to a AFTimeSpan value. + + Parameters + ---------- + interval (str | datetime.timedelta | pd.Timedelta): Interval to convert to AFTimeSpan. + + Returns + ------- + :afsdk:`AF.Time.AFTimeSpan `: + AFTimeSpan version of interval. + """ + if isinstance(interval, (datetime.timedelta, pd.Timedelta)): + interval = f"{interval.total_seconds()}s" + + return AF.Time.AFTimeSpan.Parse(interval) + + def timestamp_to_index(timestamp: System.DateTime) -> datetime.datetime: """Convert AFTime object to datetime in local timezone. diff --git a/PIconnect/__init__.py b/PIconnect/__init__.py index 8a8c650e..77db809b 100644 --- a/PIconnect/__init__.py +++ b/PIconnect/__init__.py @@ -8,7 +8,7 @@ from . import _version __version__ = _version.get_versions()["version"] -__sdk_version = tuple(int(x) for x in AF.PISystems().Version.split(".")) +__sdk_version = tuple(int(x) for x in AF_SDK_VERSION.split(".")) __all__ = [ "AF", From bc83e71ae5ea2f62b12844a248cf211949ab924d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 31 Mar 2025 22:19:32 +0200 Subject: [PATCH 08/28] feat!: move PIAFElement and PIAFTable to Asset, add Search --- PIconnect/AF.py | 114 +++++ PIconnect/Asset.py | 379 ++++++++++++++++ PIconnect/Data.py | 888 +++++++++++++++++++++++++++++++++++++ PIconnect/PIAF.py | 118 +---- PIconnect/PIAFAttribute.py | 52 ++- PIconnect/PIAFBase.py | 109 ++++- PIconnect/Search.py | 145 ++++++ tests/test_PIAF.py | 22 +- 8 files changed, 1699 insertions(+), 128 deletions(-) create mode 100644 PIconnect/AF.py create mode 100644 PIconnect/Asset.py create mode 100644 PIconnect/Data.py create mode 100644 PIconnect/Search.py diff --git a/PIconnect/AF.py b/PIconnect/AF.py new file mode 100644 index 00000000..2911ed93 --- /dev/null +++ b/PIconnect/AF.py @@ -0,0 +1,114 @@ +"""Generics for AF collections.""" + +from collections.abc import Iterable, Iterator, MutableSequence +from typing import Protocol, Self, TypeVar, overload + + +class NamedItem(Protocol): + """Protocol for an item with a name.""" + + @property + def name(self) -> str: + """Return the name of the item.""" + ... + + +NamedItemType = TypeVar("NamedItemType", bound=NamedItem) + + +class NamedItemList(MutableSequence[NamedItemType]): + """A list of items with names. + + This class provides a way to access items by index or by name. + """ + + def __init__(self, elements: MutableSequence[NamedItemType]) -> None: + self._elements = elements + + @overload + def __getitem__(self, index: int | str) -> NamedItemType: ... + @overload + def __getitem__(self, index: slice) -> Self: ... + def __getitem__(self, index: int | str | slice) -> NamedItemType | Self: + """Return the list item at the given index or the list item with the given name.""" + match index: + case int(): + return self._elements[index] + case str(): + for attr in self._elements: + if attr.name == index: + return attr + raise KeyError(f"List item {index} not found.") + case slice(): + return self.__class__(self._elements[index]) + case _: + raise TypeError("Index must be an int, string or slice of int.") # type: ignore + + def __len__(self) -> int: + """Return the number of items in the list.""" + return len(self._elements) + + def __iter__(self) -> Iterator[NamedItemType]: + """Return an iterator over the items in the list.""" + return iter(self._elements) + + @overload + def __setitem__(self, index: int | str, value: NamedItemType) -> None: ... + @overload + def __setitem__(self, index: slice, value: Iterable[NamedItemType]) -> None: ... + def __setitem__( + self, index: int | str | slice, value: NamedItemType | Iterable[NamedItemType] + ) -> None: + """Set the list item at the given index or the list item with the given name.""" + match index: + case int(): + self._elements[index] = value # type: ignore + case str(): + for i, attr in enumerate(self._elements): + if attr.name == index: + self._elements[i] = value # type: ignore + return + raise KeyError(f"List item {index} not found.") + case slice(): + if isinstance(value, Iterable): + self._elements[index] = list(value) + else: + raise TypeError("Value must be an iterable.") + case _: + raise TypeError("Index must be an int or string.") # type: ignore + + def __delitem__(self, index: int | str | slice) -> None: + """Delete the list item at the given index or the list item with the given name.""" + match index: + case int(): + del self._elements[index] + case slice(): + del self._elements[index] + case str(): + for i, attr in enumerate(self._elements): + if attr.name == index: + del self._elements[i] + return + raise KeyError(f"List item {index} not found.") + case _: + raise TypeError("Index must be an int or string.") # type: ignore + + def insert(self, index: int, value: NamedItemType) -> None: + """Insert a new item at the given index.""" + self._elements.insert(index, value) + + def append(self, value: NamedItemType) -> None: + """Append a new item to the end of the list.""" + self._elements.append(value) + + def extend(self, values: Iterable[NamedItemType]) -> None: + """Extend the list with a new iterable of items.""" + self._elements.extend(values) + + def __reversed__(self) -> Iterator[NamedItemType]: + """Return a reverse iterator over the items in the list.""" + return reversed(self._elements) + + def __repr__(self) -> str: + """Return the string representation of the list.""" + return f"{self.__class__.__qualname__}({len(self._elements)} items)" diff --git a/PIconnect/Asset.py b/PIconnect/Asset.py new file mode 100644 index 00000000..93f6b78f --- /dev/null +++ b/PIconnect/Asset.py @@ -0,0 +1,379 @@ +"""Mirror of the OSISoft.AF.Asset namespace.""" + +import dataclasses +from typing import Generic, Self, TypeVar, overload + +import pandas as pd # type: ignore + +import PIconnect._typing.AF as _AFtyping +import PIconnect.AF as AF +import PIconnect.AFSDK as SDK +from PIconnect import Data, PIPoint + +__all__ = [ + "AFDataReference", + "AFAttribute", + "AFAttributeList", +] + +T = TypeVar("T") +ElementType = TypeVar("ElementType", bound=SDK.AF.Asset.AFBaseElement) + + +@dataclasses.dataclass +class AFDataReference: + """Reference to the data source of an AF attribute.""" + + data_reference: SDK.AF.Asset.AFDataReference + + @property + def attribute(self) -> "AFAttribute": + """Return the attribute associated with the data reference.""" + return AFAttribute(self.data_reference.Attribute) + + @property + def name(self) -> str: + """Return the name of the data reference.""" + return self.data_reference.Name + + @property + def pi_point(self) -> PIPoint.PIPoint | None: + """Return the PI Point associated with the data reference, if any.""" + if self.data_reference.PIPoint is not None: + return PIPoint.PIPoint(self.data_reference.PIPoint) + + +class AFEnumerationValue: + """Representation of an AF enumeration value.""" + + def __init__(self, value: SDK.AF.Asset.AFEnumerationValue) -> None: + self._value = value + + def __str__(self) -> str: + """Return the string representation of the enumeration value.""" + return self._value.Name + + def __int__(self) -> int: + """Return the integer representation of the enumeration value.""" + return self._value.Value + + def __repr__(self): + """Return the string representation of the enumeration value.""" + return f"{self.__class__.__qualname__}({self._value.Name})" + + @property + def name(self) -> str: + """Return the name of the enumeration value.""" + return self._value.Name + + @property + def value(self) -> int: + """Return the integer value of the enumeration value.""" + return self._value.Value + + @overload + @staticmethod + def wrap_enumeration_value( + value: SDK.AF.Asset.AFEnumerationValue, + ) -> "AFEnumerationValue": ... + @overload + @staticmethod + def wrap_enumeration_value( + value: T, + ) -> T: ... + @staticmethod + def wrap_enumeration_value( + value: T | SDK.AF.Asset.AFEnumerationValue, + ) -> "T | AFEnumerationValue": + """Wrap the value in an AFEnumerationValue if it is an enumeration value.""" + if isinstance(value, SDK.AF.Asset.AFEnumerationValue): + return AFEnumerationValue(value) + return value + + +class AFAttribute(Data.DataContainer): + """Representation of an AF attribute.""" + + def __init__(self, attribute: SDK.AF.Asset.AFAttribute) -> None: + super().__init__() + self.attribute = attribute + + def __repr__(self): + """Return the string representation of the current attribute.""" + description = ", ".join([x for x in [self.name, self.description] if x]) + value = " ".join( + [str(x) for x in [self.current_value, self.units_of_measurement] if x] + ) + return f"{self.__class__.__qualname__}({description}; Current Value: {value})" + + @property + def element(self) -> SDK.AF.Asset.AFBaseElement: + """Return the element to which the attribute belongs.""" + return self.attribute.Element + + @property + def parent(self) -> Self | None: + """Return the parent attribute of the current attribute, or None if it has none.""" + if not self.attribute.Parent: + return None + return self.__class__(self.attribute.Parent) + + @property + def children(self) -> dict[str, Self]: + """Return a dictionary of the direct child attributes of the current attribute.""" + return {a.Name: self.__class__(a) for a in self.attribute.Attributes} + + @property + def path(self) -> str: + """Return the path of the attribute.""" + return self.attribute.GetPath() + + @property + def name(self) -> str: + """Return the name of the attribute.""" + return self.path.split("\\")[-1] + + @property + def data_reference(self) -> AFDataReference: + """Return the data reference of the attribute.""" + return AFDataReference(self.attribute.DataReference) + + @property + def description(self) -> str: + """Return the description of the attribute.""" + return self.attribute.Description + + @property + def units_of_measurement(self) -> str: + """Return the units of measurement of the attribute.""" + return str(self.attribute.DefaultUOM or "") + + def _normalize_filter_expression(self, filter_expression: str) -> str: + return super()._normalize_filter_expression( + filter_expression.replace("%attribute%", f"'{self.attribute.Name}'") + ) + + def _current_value(self) -> object: + """Return the current value of the attribute.""" + return AFEnumerationValue.wrap_enumeration_value(self.attribute.GetValue().Value) + + def _filtered_summaries( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + filter_expression: str, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + filter_evaluation: SDK.AF.Data.AFSampleType, + filter_interval: SDK.AF.Time.AFTimeSpan, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummariesDict: + return self.attribute.Data.FilteredSummaries( + time_range, + interval, + filter_expression, + summary_types, + calculation_basis, + filter_evaluation, + filter_interval, + time_type, + ) + + def _interpolated_value(self, time: SDK.AF.Time.AFTime): + """Return a single value for this PI Point.""" + return self.attribute.Data.InterpolatedValue(time, self.attribute.DefaultUOM) + + def _recorded_value( + self, time: SDK.AF.Time.AFTime, retrieval_mode: SDK.AF.Data.AFRetrievalMode + ) -> SDK.AF.Asset.AFValue: + """Return a single value for this PI Point.""" + return self.attribute.Data.RecordedValue( + time, retrieval_mode, self.attribute.DefaultUOM + ) + + def _recorded_values( + self, + time_range: SDK.AF.Time.AFTimeRange, + boundary_type: SDK.AF.Data.AFBoundaryType, + filter_expression: str, + ) -> SDK.AF.Asset.AFValues: + include_filtered_values = False + return self.attribute.Data.RecordedValues( + time_range, + boundary_type, + self.attribute.DefaultUOM, + filter_expression, + include_filtered_values, + ) + + def _interpolated_values( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + filter_expression: str, + ) -> SDK.AF.Asset.AFValues: + """Query the pi af attribute, internal implementation.""" + include_filtered_values = False + return self.attribute.Data.InterpolatedValues( + time_range, + interval, + self.attribute.DefaultUOM, + filter_expression, + include_filtered_values, + ) + + def _summaries( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummariesDict: + return self.attribute.Data.Summaries( + time_range, interval, summary_types, calculation_basis, time_type + ) + + def _summary( + self, + time_range: SDK.AF.Time.AFTimeRange, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummaryDict: + return self.attribute.Data.Summary( + time_range, summary_types, calculation_basis, time_type + ) + + def _update_value( + self, + value: SDK.AF.Asset.AFValue, + update_mode: SDK.AF.Data.AFUpdateOption, + buffer_mode: SDK.AF.Data.AFBufferOption, + ) -> None: + return self.attribute.Data.UpdateValue( + value, + update_mode, + buffer_mode, + ) + + +class AFAttributeList(Data.DataContainerCollection[AFAttribute]): + """A list of AF attributes.""" + + pass + + +class AFBaseElement(Generic[ElementType]): + """Container for PI AF elements in the database.""" + + version = "0.1.0" + + def __init__(self, element: ElementType) -> None: + self.element = element + + def __repr__(self) -> str: + """Return the string representation of the element.""" + return f"{self.__class__.__qualname__}({self.name})" + + @property + def name(self) -> str: + """Return the name of the current element.""" + return self.element.Name + + @property + def attributes(self) -> dict[str, AFAttribute]: + """Return a dictionary of the attributes of the current element.""" + return {a.Name: AFAttribute(a) for a in self.element.Attributes} + + @property + def categories(self) -> SDK.AF.AFCategories: + """Return the categories of the current element.""" + return self.element.Categories + + @property + def description(self) -> str: + """Return the description of the current element.""" + return self.element.Description + + +class AFElement(AFBaseElement[SDK.AF.Asset.AFElement]): + """Container for PI AF elements in the database.""" + + version = "0.1.0" + + @property + def parent(self) -> Self | None: + """Return the parent element of the current element, or None if it has none.""" + if not self.element.Parent: + return None + return self.__class__(self.element.Parent) + + @property + def children(self) -> dict[str, Self]: + """Return a dictionary of the direct child elements of the current element.""" + return {c.Name: self.__class__(c) for c in self.element.Elements} + + def descendant(self, path: str) -> Self: + """Return a descendant of the current element from an exact path.""" + return self.__class__(self.element.Elements.get_Item(path)) + + +class AFElementList(AF.NamedItemList[AFElement]): + """Container for a list of PIAFElement objects.""" + + pass + + +class AFEventFrame(AFBaseElement[SDK.AF.EventFrame.AFEventFrame]): + """Container for PI AF Event Frames in the database.""" + + version = "0.1.0" + + @property + def event_frame(self) -> SDK.AF.EventFrame.AFEventFrame: + """Return the underlying AF Event Frame object.""" + return self.element + + @property + def parent(self) -> Self | None: + """Return the parent element of the current event frame, or None if it has none.""" + if not self.element.Parent: + return None + return self.__class__(self.element.Parent) + + @property + def children(self) -> dict[str, Self]: + """Return a dictionary of the direct child event frames of the current event frame.""" + return {c.Name: self.__class__(c) for c in self.element.EventFrames} + + +class AFTable: + """Container for PI AF Tables in the database.""" + + def __init__(self, table: SDK.AF.Asset.AFTable) -> None: + self._table = table + + @property + def columns(self) -> list[str]: + """Return the names of the columns in the table.""" + return [col.ColumnName for col in self._table.Table.Columns] + + @property + def _rows(self) -> list[SDK.System.Data.DataRow]: + return self._table.Table.Rows + + @property + def name(self) -> str: + """Return the name of the table.""" + return self._table.Name + + @property + def shape(self) -> tuple[int, int]: + """Return the shape of the table.""" + return (len(self._rows), len(self.columns)) + + @property + def data(self) -> pd.DataFrame: + """Return the data in the table as a pandas DataFrame.""" + return pd.DataFrame([{col: row[col] for col in self.columns} for row in self._rows]) diff --git a/PIconnect/Data.py b/PIconnect/Data.py new file mode 100644 index 00000000..82fa9354 --- /dev/null +++ b/PIconnect/Data.py @@ -0,0 +1,888 @@ +"""Data access and manipulation classes.""" + +import abc +import datetime +import enum +from collections.abc import Callable +from typing import Any, Concatenate, ParamSpec, TypeVar, cast + +import pandas as pd # type: ignore + +import PIconnect._typing.AF as _AFtyping +import PIconnect.AF as AF +import PIconnect.AFSDK as SDK +from PIconnect import Time + + +class BoundaryType(enum.IntEnum): + """BoundaryType indicates how to handle the boundaries of a time range. + + Detailed information is available at + :afsdk:`AF.Data.AFBoundaryType `. + """ + + #: The first value after the start time and the last value before the end time + INSIDE = 0 + #: The last value before the start time and the first value after the end time + OUTSIDE = 1 + #: Interpolate values to the specified start and end time + INTERPOLATED = 2 + + +class SummaryType(enum.IntFlag): + """SummaryType indicates which types of summary should be calculated. + + `SummaryType`'s are `enum.IntFlag`'s and can be or'ed together to select + multiple summary types. For example: + + >>> SummaryType.MINIMUM | SummaryType.MAXIMUM # Returns minimum and maximum + + + Detailed information is available at + :afsdk:`AF.Data.AFSummaryTypes `. + """ + + #: No summary data + NONE = 0 + #: A total over the time span + TOTAL = 1 + #: Average value over the time span + AVERAGE = 2 + #: The minimum value in the time span + MINIMUM = 4 + #: The maximum value in the time span + MAXIMUM = 8 + #: The range of the values (max-min) in the time span + RANGE = 16 + #: The sample standard deviation of the values over the time span + STD_DEV = 32 + #: The population standard deviation of the values over the time span + POP_STD_DEV = 64 + #: The sum of the event count (when the calculation is event weighted). + #: The sum of the event time duration (when the calculation is time weighted.) + COUNT = 128 + #: The percentage of the data with a good value over the time range. + #: Based on time for time weighted calculations, + #: based on event count for event weigthed calculations. + PERCENT_GOOD = 8192 + #: The total over the time span, + #: with the unit of measurement that's associated with the input + #: (or no units if not defined for the input). + TOTAL_WITH_UOM = 16384 + #: A convenience to retrieve all summary types + ALL = 24831 + #: A convenience to retrieve all summary types for non-numeric data + ALL_FOR_NON_NUMERIC = 8320 + + +class CalculationBasis(enum.IntEnum): + """CalculationBasis indicates how values should be weighted over a time range. + + Detailed information is available at + :afsdk:`AF.Data.AFCalculationBasis `. + """ + + #: Each event is weighted according to the time over which it applies. + TIME_WEIGHTED = 0 + #: Each event is weighted equally. + EVENT_WEIGHTED = 1 + #: Each event is time weighted, but interpolation is always done as if it is + #: continous data. + TIME_WEIGHTED_CONTINUOUS = 2 + #: Each event is time weighted, but interpolation is always done as if it is + #: discrete, stepped, data. + TIME_WEIGHTED_DISCRETE = 3 + #: Each event is weighted equally, except data at the end of the interval is + #: excluded. + EVENT_WEIGHTED_EXCLUDE_MOST_RECENT = 4 + #: Each event is weighted equally, except data at the beginning of the interval + #: is excluded. + EVENT_WEIGHTED_EXCLUDE_EARLIEST = 5 + #: Each event is weighted equally, data at both boundaries of the interval are + #: explicitly included. + EVENT_WEIGHTED_INCLUDE_BOTH_ENDS = 6 + + +class ExpressionSampleType(enum.IntEnum): + """ExpressionSampleType indicates how expressions are evaluated over a time range. + + Detailed information is available at + :afsdk:`AF.Data.AFSampleType `. + """ + + #: The expression is evaluated at each archive event. + EXPRESSION_RECORDED_VALUES = 0 + #: The expression is evaluated at a sampling interval, passed as a separate argument. + INTERVAL = 1 + + +class TimestampCalculation(enum.IntEnum): + """ + TimestampCalculation defines the timestamp returned for a given summary calculation. + + Detailed information is available at + :afsdk:`AF.Data.AFTimeStampCalculation `. + """ + + #: The timestamp is the event time of the minimum or maximum for those summaries + #: or the beginning of the interval otherwise. + AUTO = 0 + #: The timestamp is always the beginning of the interval. + EARLIEST_TIME = 1 + #: The timestamp is always the end of the interval. + MOST_RECENT_TIME = 2 + + +class RetrievalMode(enum.IntEnum): + """RetrievalMode indicates which recorded value should be returned. + + Detailed information is available at + :afsdk:`AF.Data.AFRetrievalMode `. + """ + + #: Autmatic detection + AUTO = 0 + #: At the exact time if available, else the first before the requested time + AT_OR_BEFORE = 1 + #: The first before the requested time + BEFORE = 6 + #: At the exact time if available, else the first after the requested time + AT_OR_AFTER = 2 + #: The first after the requested time + AFTER = 7 + #: At the exact time if available, else return an error + EXACT = 4 + + +class UpdateMode(enum.IntEnum): + """Indicates how to treat duplicate values in the archive. + + Only used when supported by the Data Reference. + + Detailed information is available at + :afsdk:`AF.Data.AFUpdateOption ` + """ + + #: Add the value to the archive. + #: If any values exist at the same time, will overwrite one of them and set its + #: Substituted flag. + REPLACE = 0 + #: Add the value to the archive. Any existing values at the same time are not overwritten. + INSERT = 1 + #: Add the value to the archive only if no value exists at the same time. + #: If a value already exists for that time, the passed value is ignored. + NO_REPLACE = 2 + #: Replace an existing value in the archive at the specified time. + #: If no existing value is found, the passed value is ignored. + REPLACE_ONLY = 3 + #: Add the value to the archive without compression. + #: If this value is written to the snapshot, the previous snapshot value will be written to + #: the archive, + #: without regard to compression settings. + #: Note that if a subsequent snapshot value is written without the InsertNoCompression + #: option, + #: the value added with the InsertNoCompression option is still subject to compression. + INSERT_NO_COMPRESSION = 5 + #: Remove the value from the archive if a value exists at the passed time. + REMOVE = 6 + + +class BufferMode(enum.IntEnum): + """Indicates buffering option in updating values, when supported by the Data Reference. + + Detailed information is available at + :afsdk:`AF.Data.AFBufferOption ` + """ + + #: Updating data reference values without buffer. + DO_NOT_BUFFER = 0 + #: Try updating data reference values with buffer. + #: If fails (e.g. data reference AFDataMethods does not support Buffering, + #: or its Buffering system is not available), + #: then try updating directly without buffer. + BUFFER_IF_POSSIBLE = 1 + # Updating data reference values with buffer. + BUFFER = 2 + + +_DEFAULT_CALCULATION_BASIS = CalculationBasis.TIME_WEIGHTED +_DEFAULT_FILTER_EVALUATION = ExpressionSampleType.EXPRESSION_RECORDED_VALUES +_DEFAULT_TIMESTAMP_CALCULATION = TimestampCalculation.AUTO + + +class DataContainer(abc.ABC): + """Abstract base class for data containers.""" + + @property + @abc.abstractmethod + def name(self) -> str: + """Return the name of the data container.""" + pass + + @property + def current_value(self) -> Any: + """Return the current value of the attribute.""" + return self._current_value() + + @abc.abstractmethod + def _current_value(self) -> Any: + """Return the current value of the attribute.""" + pass + + def filtered_summaries( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + interval: Time.IntervalLike, + filter_expression: str, + summary_types: SummaryType, + calculation_basis: CalculationBasis = _DEFAULT_CALCULATION_BASIS, + filter_evaluation: ExpressionSampleType = _DEFAULT_FILTER_EVALUATION, + filter_interval: Time.IntervalLike | None = None, + time_type: TimestampCalculation = _DEFAULT_TIMESTAMP_CALCULATION, + ) -> pd.DataFrame: + """Return one or more summary values for each interval within a time range. + + Parameters + ---------- + start_time (str or datetime): String containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using + :afsdk:`AF.Time.AFTimeRange `. + end_time (str or datetime): String containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using + :afsdk:`AF.Time.AFTimeRange `. + interval (str, datetime.timedelta or pandas.Timedelta): String containing the + interval at which to extract data. This is parsed using + :afsdk:`AF.Time.AFTimeSpan.Parse `. + filter_expression (str, optional): Defaults to ''. Query on which + data to include in the results. See :ref:`filtering_values` + for more information on filter queries. + summary_types (int or PIConsts.SummaryType): Type(s) of summaries + of the data within the requested time range. + calculation_basis (int or PIConsts.CalculationBasis, optional): + Event weighting within an interval. See :ref:`event_weighting` + and :any:`CalculationBasis` for more information. Defaults to + CalculationBasis.TIME_WEIGHTED. + filter_evaluation (int or PIConsts.ExpressionSampleType, optional): + Determines whether the filter is applied to the raw events in + the database, of if it is applied to an interpolated series + with a regular interval. Defaults to + ExpressionSampleType.EXPRESSION_RECORDED_VALUES. + filter_interval (str, optional): String containing the interval at + which to extract apply the filter. This is parsed using + :afsdk:`AF.Time.AFTimeSpan.Parse `. + time_type (int or PIConsts.TimestampCalculation, optional): + Timestamp to return for each of the requested summaries. See + :ref:`summary_timestamps` and :any:`TimestampCalculation` for + more information. Defaults to TimestampCalculation.AUTO. + + Returns + ------- + pandas.DataFrame: Dataframe with the unique timestamps as row index + and the summary name as column name. + """ + time_range = Time.to_af_time_range(start_time, end_time) + _interval = Time.to_af_time_span(interval) + _filter_expression = self._normalize_filter_expression(filter_expression) + _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) + _filter_evaluation = SDK.AF.Data.AFSampleType(int(filter_evaluation)) + _filter_interval = Time.to_af_time_span(filter_interval) + _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) + pivalues = self._filtered_summaries( + time_range, + _interval, + _filter_expression, + _summary_types, + _calculation_basis, + _filter_evaluation, + _filter_interval, + _time_type, + ) + df = pd.DataFrame() + for summary in pivalues: + key = SummaryType(int(summary.Key)).name + timestamps, values = zip( + *[ + (Time.timestamp_to_index(value.Timestamp.UtcTime), value.Value) + for value in summary.Value + ], + strict=True, + ) + df = df.join( + pd.DataFrame(data={key: values}, index=timestamps), # type: ignore + how="outer", + ) + return df + + @abc.abstractmethod + def _filtered_summaries( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + filter_expression: str, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + filter_evaluation: SDK.AF.Data.AFSampleType, + filter_interval: SDK.AF.Time.AFTimeSpan, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummariesDict: + pass + + def interpolated_value(self, time: Time.TimeLike) -> pd.Series: + """Return a pd.Series with an interpolated value at the given time. + + Parameters + ---------- + time (str, datetime): String containing the date, and possibly time, + for which to retrieve the value. This is parsed, using + :ref:`Time.to_af_time`. + + Returns + ------- + pd.Series: A pd.Series with a single row, with the corresponding time as + the index + """ + _time = Time.to_af_time(time) + pivalue = self._interpolated_value(_time) + result = pd.Series( + data=[pivalue.Value], + index=[Time.timestamp_to_index(pivalue.Timestamp.UtcTime)], + name=self.name, + ) + result.attrs["uom"] = self.units_of_measurement + return result + + @abc.abstractmethod + def _interpolated_value(self, time: SDK.AF.Time.AFTime) -> SDK.AF.Asset.AFValue: + pass + + def interpolated_values( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + interval: Time.IntervalLike, + filter_expression: str = "", + ) -> pd.Series: + """Return a pd.Series of interpolated data. + + Data is returned between *start_time* and *end_time* at a fixed + *interval*. All three values are parsed by AF.Time and the first two + allow for time specification relative to "now" by use of the + asterisk. + + *filter_expression* is an optional string to filter the returned + values, see OSIsoft PI documentation for more information. + + The AF SDK allows for inclusion of filtered data, with filtered + values marked as such. At this point PIconnect does not support this + and filtered values are always left out entirely. + + Parameters + ---------- + start_time (str or datetime): Containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using :ref:`Time.to_af_time_range`. + end_time (str or datetime): Containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using :ref:`Time.to_af_time_range`. + interval (str, datetime.timedelta or pd.Timedelta): String containing the interval + at which to extract data. This is parsed using :ref:`Time.to_af_time_span`. + filter_expression (str, optional): Defaults to ''. Query on which + data to include in the results. See :ref:`filtering_values` + for more information on filter queries. + + Returns + ------- + pd.Series: Timeseries of the values returned by the SDK + """ + time_range = Time.to_af_time_range(start_time, end_time) + _interval = Time.to_af_time_span(interval) + _filter_expression = self._normalize_filter_expression(filter_expression) + pivalues = self._interpolated_values(time_range, _interval, _filter_expression) + + timestamps: list[datetime.datetime] = [] + values: list[Any] = [] + for value in pivalues: + timestamps.append(Time.timestamp_to_index(value.Timestamp.UtcTime)) + values.append(value.Value) + result = pd.Series( + data=values, + index=timestamps, + name=self.name, + ) + result.attrs["uom"] = self.units_of_measurement + return result + + @abc.abstractmethod + def _interpolated_values( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + filter_expression: str, + ) -> SDK.AF.Asset.AFValues: + pass + + def _normalize_filter_expression(self, filter_expression: str) -> str: + return filter_expression + + def recorded_value( + self, + time: Time.TimeLike, + retrieval_mode: RetrievalMode = RetrievalMode.AUTO, + ) -> pd.Series: + """Return a pd.Series with the recorded value at or close to the given time. + + Parameters + ---------- + time (str): String containing the date, and possibly time, + for which to retrieve the value. This is parsed, using + :afsdk:`AF.Time.AFTime `. + retrieval_mode (int or :any:`PIConsts.RetrievalMode`): Flag determining + which value to return if no value available at the exact requested + time. + + Returns + ------- + pd.Series: A pd.Series with a single row, with the corresponding time as + the index + """ + from . import Time as time_module + + _time = time_module.to_af_time(time) + _retrieval_mode = SDK.AF.Data.AFRetrievalMode(int(retrieval_mode)) + pivalue = self._recorded_value(_time, _retrieval_mode) + result = pd.Series( + data=[pivalue.Value], + index=[Time.timestamp_to_index(pivalue.Timestamp.UtcTime)], + name=self.name, + ) + result.attrs["uom"] = self.units_of_measurement + return result + + @abc.abstractmethod + def _recorded_value( + self, time: SDK.AF.Time.AFTime, retrieval_mode: SDK.AF.Data.AFRetrievalMode + ) -> SDK.AF.Asset.AFValue: + pass + + def recorded_values( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + boundary_type: BoundaryType = BoundaryType.INSIDE, + filter_expression: str = "", + ): + """Return a pd.Series of recorded data. + + Data is returned between the given *start_time* and *end_time*, + inclusion of the boundaries is determined by the *boundary_type* + attribute. Both *start_time* and *end_time* are parsed by AF.Time and + allow for time specification relative to "now" by use of the asterisk. + + By default the *boundary_type* is set to 'inside', which returns from + the first value after *start_time* to the last value before *end_time*. + The other options are 'outside', which returns from the last value + before *start_time* to the first value before *end_time*, and + 'interpolate', which interpolates the first value to the given + *start_time* and the last value to the given *end_time*. + + *filter_expression* is an optional string to filter the returned + values, see OSIsoft PI documentation for more information. + + The AF SDK allows for inclusion of filtered data, with filtered values + marked as such. At this point PIconnect does not support this and + filtered values are always left out entirely. + + Parameters + ---------- + start_time (str or datetime): Containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using :ref:`Time.to_af_time_range`. + end_time (str or datetime): Containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using :ref:`Time.to_af_time_range`. + boundary_type (BoundaryType): Specification for how to handle values near the + specified start and end time. Defaults to `BoundaryType.INSIDE`. + filter_expression (str, optional): Defaults to ''. Query on which + data to include in the results. See :ref:`filtering_values` + for more information on filter queries. + + Returns + ------- + pd.Series: Timeseries of the values returned by the SDK + """ + time_range = Time.to_af_time_range(start_time, end_time) + _boundary_type = SDK.AF.Data.AFBoundaryType(int(boundary_type)) + _filter_expression = self._normalize_filter_expression(filter_expression) + + pivalues = self._recorded_values(time_range, _boundary_type, _filter_expression) + + timestamps: list[datetime.datetime] = [] + values: list[Any] = [] + for value in pivalues: + timestamps.append(Time.timestamp_to_index(value.Timestamp.UtcTime)) + values.append(value.Value) + result = pd.Series( + data=values, + index=timestamps, + name=self.name, + ) + result.attrs["uom"] = self.units_of_measurement + return result + + @abc.abstractmethod + def _recorded_values( + self, + time_range: SDK.AF.Time.AFTimeRange, + boundary_type: SDK.AF.Data.AFBoundaryType, + filter_expression: str, + ) -> SDK.AF.Asset.AFValues: + """Abstract implementation for recorded values. + + The internals for retrieving recorded values from PI and PI-AF are + different and should therefore be implemented by the respective data + containers. + """ + pass + + def summary( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + summary_types: SummaryType, + calculation_basis: CalculationBasis = _DEFAULT_CALCULATION_BASIS, + time_type: TimestampCalculation = _DEFAULT_TIMESTAMP_CALCULATION, + ) -> pd.DataFrame: + """Return one or more summary values over a single time range. + + Parameters + ---------- + start_time (str or datetime): Containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using :ref:`Time.to_af_time_range`. + end_time (str or datetime): Containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using :ref:`Time.to_af_time_range`. + summary_types (int or SummaryType): Type(s) of summaries + of the data within the requested time range. + calculation_basis (int or CalculationBasis, optional): + Event weighting within an interval. See :ref:`event_weighting` + and :any:`CalculationBasis` for more information. Defaults to + CalculationBasis.TIME_WEIGHTED. + time_type (int or TimestampCalculation, optional): + Timestamp to return for each of the requested summaries. See + :ref:`summary_timestamps` and :any:`TimestampCalculation` for + more information. Defaults to TimestampCalculation.AUTO. + + Returns + ------- + pandas.DataFrame: Dataframe with the unique timestamps as row index + and the summary name as column name. + """ + time_range = Time.to_af_time_range(start_time, end_time) + _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) + _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) + pivalues = self._summary(time_range, _summary_types, _calculation_basis, _time_type) + df = pd.DataFrame() + for summary in pivalues: + key = SummaryType(int(summary.Key)).name + value = summary.Value + timestamp = Time.timestamp_to_index(value.Timestamp.UtcTime) + value = value.Value + df = df.join( + pd.DataFrame(data={key: value}, index=[timestamp]), # type: ignore + how="outer", + ) + return df + + @abc.abstractmethod + def _summary( + self, + time_range: SDK.AF.Time.AFTimeRange, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummaryDict: + pass + + def summaries( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + interval: Time.IntervalLike, + summary_types: SummaryType, + calculation_basis: CalculationBasis = _DEFAULT_CALCULATION_BASIS, + time_type: TimestampCalculation = _DEFAULT_TIMESTAMP_CALCULATION, + ) -> pd.DataFrame: + """Return one or more summary values for each interval within a time range. + + Parameters + ---------- + start_time (str or datetime): Containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using :ref:`Time.to_af_time_range`. + end_time (str or datetime): Containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using :ref:`Time.to_af_time_range`. + interval (str, datetime.timedelta or pd.Timedelta): String containing the interval + at which to extract data. This is parsed using :ref:`Time.to_af_time_span`. + summary_types (int or PIConsts.SummaryType): Type(s) of summaries + of the data within the requested time range. + calculation_basis (int or PIConsts.CalculationBasis, optional): + Event weighting within an interval. See :ref:`event_weighting` + and :any:`CalculationBasis` for more information. Defaults to + CalculationBasis.TIME_WEIGHTED. + time_type (int or PIConsts.TimestampCalculation, optional): + Timestamp to return for each of the requested summaries. See + :ref:`summary_timestamps` and :any:`TimestampCalculation` for + more information. Defaults to TimestampCalculation.AUTO. + + Returns + ------- + pandas.DataFrame: Dataframe with the unique timestamps as row index + and the summary name as column name. + """ + time_range = Time.to_af_time_range(start_time, end_time) + _interval = Time.to_af_time_span(interval) + _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) + _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) + pivalues = self._summaries( + time_range, _interval, _summary_types, _calculation_basis, _time_type + ) + df = pd.DataFrame() + for summary in pivalues: + key = SummaryType(int(summary.Key)).name + timestamps, values = zip( + *[ + (Time.timestamp_to_index(value.Timestamp.UtcTime), value.Value) + for value in summary.Value + ], + strict=True, + ) + df = df.join( + pd.DataFrame(data={key: values}, index=timestamps), # type: ignore + how="outer", + ) + return df + + @abc.abstractmethod + def _summaries( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummariesDict: + pass + + @property + @abc.abstractmethod + def units_of_measurement(self) -> str | None: + """Return the units of measurement of the values in the current object.""" + pass + + def update_value( + self, + value: Any, + time: Time.TimeLike | None = None, + update_mode: UpdateMode = UpdateMode.NO_REPLACE, + buffer_mode: BufferMode = BufferMode.BUFFER_IF_POSSIBLE, + ) -> None: + """Update value for existing PI object. + + Parameters + ---------- + value: value type should be in cohesion with PI object or + it will raise PIException: [-10702] STATE Not Found + time (datetime, optional): it is not possible to set future value, + it raises PIException: [-11046] Target Date in Future. + + You can combine update_mode and time to change already stored value. + """ + from . import Time as time_module + + if time is not None: + _value = SDK.AF.Asset.AFValue(value, time_module.to_af_time(time)) + else: + _value = SDK.AF.Asset.AFValue(value) + + _update_mode = SDK.AF.Data.AFUpdateOption(int(update_mode)) + _buffer_mode = SDK.AF.Data.AFBufferOption(int(buffer_mode)) + self._update_value(_value, _update_mode, _buffer_mode) + + @abc.abstractmethod + def _update_value( + self, + value: SDK.AF.Asset.AFValue, + update_mode: SDK.AF.Data.AFUpdateOption, + buffer_mode: SDK.AF.Data.AFBufferOption, + ) -> None: + pass + + +DataContainerType = TypeVar("DataContainerType", bound=DataContainer) +Parameters = ParamSpec("Parameters") + + +class DataContainerCollection(AF.NamedItemList[DataContainerType]): + """Container for a collection of data containers.""" + + @property + def _element_type(self) -> type[DataContainerType]: + if len(self._elements) == 0: + return cast(type[DataContainerType], DataContainer) + return type(self._elements[0]) + + def _combine_dfs_to_df( + self, + func: Callable[Concatenate[DataContainerType, Parameters], pd.DataFrame], + *args: Parameters.args, + **kwargs: Parameters.kwargs, + ) -> pd.DataFrame: + """Combine the results of a function applied to each element in the collection.""" + df = pd.DataFrame() + for element in self._elements: + result = func(element, *args, **kwargs) + df = pd.concat( + [ + df, + result.set_axis( # type: ignore + pd.MultiIndex.from_product([[element.name], result.columns]), axis=1 + ), + ], + axis=1, + ) + return df + + @property + def current_value(self) -> pd.Series: + """Return the current values of all attributes in the collection.""" + idx, value = zip( + *[(element.name, element.current_value) for element in self._elements], strict=True + ) + return pd.Series(value, index=idx) + + def filtered_summaries( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + interval: Time.IntervalLike, + filter_expression: str, + summary_types: SummaryType, + calculation_basis: CalculationBasis = _DEFAULT_CALCULATION_BASIS, + filter_evaluation: ExpressionSampleType = _DEFAULT_FILTER_EVALUATION, + filter_interval: Time.IntervalLike | None = None, + time_type: TimestampCalculation = _DEFAULT_TIMESTAMP_CALCULATION, + ) -> pd.DataFrame: + """Return one or more summary values for each interval within a time range.""" + return self._combine_dfs_to_df( + self._element_type.filtered_summaries, + start_time, + end_time, + interval, + filter_expression, + summary_types, + calculation_basis, + filter_evaluation, + filter_interval, + time_type, + ) + + def interpolated_value(self, time: Time.TimeLike) -> pd.DataFrame: + """Return a pd.DataFrame with an interpolated value at the given time. + + .. warning:: + Relative times are evaluated for each element in the collection, + resulting in a different time for each element. To overcome this, use + a fixed time, for example using the datetime module: + + >>> import datetime + >>> time = datetime.datetime.now() - datetime.timedelta(days=1) + >>> collection.interpolated_value(time) + + Parameters + ---------- + time (str, datetime): String containing the date, and possibly time, + for which to retrieve the value. This is parsed, using + :ref:`Time.to_af_time`. + + Returns + ------- + pd.Series: A pd.Series with a single row, with the corresponding time as + the index + """ + + def _interpolated_value( + element: DataContainerType, + time: Time.TimeLike, + ) -> pd.DataFrame: + return element.interpolated_value(time).to_frame() + + return self._combine_dfs_to_df( + _interpolated_value, + time, + ).droplevel(0, axis=1) + + def interpolated_values( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + interval: Time.IntervalLike, + filter_expression: str = "", + ) -> pd.DataFrame: + """Return a pd.DataFrame of interpolated data. + + Data is returned between *start_time* and *end_time* at a fixed + *interval*. All three values are parsed by AF.Time and the first two + allow for time specification relative to "now" by use of the + asterisk. + + *filter_expression* is an optional string to filter the returned + values, see OSIsoft PI documentation for more information. + + The AF SDK allows for inclusion of filtered data, with filtered + values marked as such. At this point PIconnect does not support this + and filtered values are always left out entirely. + + .. warning:: + Relative times are evaluated for each element in the collection, + resulting in a different time for each element. To overcome this, use + a fixed time, for example using the datetime module: + + >>> import datetime + >>> time = datetime.datetime.now() - datetime.timedelta(days=1) + >>> collection.interpolated_value(time) + + Parameters + ---------- + start_time (str or datetime): Containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using :ref:`Time.to_af_time_range`. + end_time (str or datetime): Containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using :ref:`Time.to_af_time_range`. + interval (str, datetime.timedelta or pd.Timedelta): String containing the interval + at which to extract data. This is parsed using :ref:`Time.to_af_time_span`. + filter_expression (str, optional): Defaults to ''. Query on which + data to include in the results. See :ref:`filtering_values` + for more information on filter queries. + + Returns + ------- + pd.DataFrame: Timeseries of the values returned by the SDK + """ + + def _interpolated_values( + element: DataContainerType, + ) -> pd.DataFrame: + return element.interpolated_values( + start_time, end_time, interval, filter_expression + ).to_frame() + + return self._combine_dfs_to_df(_interpolated_values).droplevel(0, axis=1) diff --git a/PIconnect/PIAF.py b/PIconnect/PIAF.py index 21b90958..dbc4ff9e 100644 --- a/PIconnect/PIAF.py +++ b/PIconnect/PIAF.py @@ -4,8 +4,6 @@ import warnings from typing import Any, cast -import pandas as pd - import PIconnect.AFSDK as SDK from PIconnect import PIAFBase, PIConsts, Search, Time from PIconnect._utils import InitialisationWarning @@ -150,42 +148,18 @@ def database_name(self) -> str: return self.database.Name @property - def children(self) -> dict[str, "PIAFElement"]: + def children(self) -> dict[str, PIAFBase.PIAFElement]: """Return a dictionary of the direct child elements of the database.""" - return {c.Name: PIAFElement(c) for c in self.database.Elements} + return {c.Name: PIAFBase.PIAFElement(c) for c in self.database.Elements} @property - def tables(self) -> dict[str, "PIAFTable"]: + def tables(self) -> dict[str, PIAFBase.PIAFTable]: """Return a dictionary of the tables in the database.""" - return {t.Name: PIAFTable(t) for t in self.database.Tables} + return {t.Name: PIAFBase.PIAFTable(t) for t in self.database.Tables} - def descendant(self, path: str) -> "PIAFElement": + def descendant(self, path: str) -> PIAFBase.PIAFElement: """Return a descendant of the database from an exact path.""" - return PIAFElement(self.database.Elements.get_Item(path)) - - def search(self, query: str | list[str]) -> list[PIAFAttribute.PIAFAttribute]: - """Search PIAFAttributes by element|attribute path strings. - - Return a list of PIAFAttributes directly from a list of element|attribute path strings - - like this: - - list("BaseElement/childElement/childElement|Attribute|ChildAttribute|ChildAttribute", - "BaseElement/childElement/childElement|Attribute|ChildAttribute|ChildAttribute") - - """ - attributelist: list[PIAFAttribute.PIAFAttribute] = [] - if isinstance(query, list): - return [y for x in query for y in self.search(x)] - if "|" in query: - splitpath = query.split("|") - elem = self.descendant(splitpath[0]) - attribute = elem.attributes[splitpath[1]] - if len(splitpath) > 2: - for x in range(len(splitpath) - 2): - attribute = attribute.children[splitpath[x + 2]] - attributelist.append(attribute) - return attributelist + return PIAFBase.PIAFElement(self.database.Elements.get_Item(path)) def event_frames( self, @@ -194,12 +168,12 @@ def event_frames( max_count: int = 1000, search_mode: PIConsts.EventFrameSearchMode = _DEFAULT_EVENTFRAME_SEARCH_MODE, search_full_hierarchy: bool = False, - ) -> dict[str, "PIAFEventFrame"]: + ) -> dict[str, PIAFBase.PIAFEventFrame]: """Search for event frames in the database.""" _start_time = Time.to_af_time(start_time) _search_mode = SDK.AF.EventFrame.AFEventFrameSearchMode(int(search_mode)) return { - frame.Name: PIAFEventFrame(frame) + frame.Name: PIAFBase.PIAFEventFrame(frame) for frame in SDK.AF.EventFrame.AFEventFrame.FindEventFrames( self.database, None, @@ -214,79 +188,3 @@ def event_frames( search_full_hierarchy, ) } - - -class PIAFElement(PIAFBase.PIAFBaseElement[AF.Asset.AFElement]): - """Container for PI AF elements in the database.""" - - version = "0.1.0" - - @property - def parent(self) -> "PIAFElement | None": - """Return the parent element of the current element, or None if it has none.""" - if not self.element.Parent: - return None - return self.__class__(self.element.Parent) - - @property - def children(self) -> dict[str, "PIAFElement"]: - """Return a dictionary of the direct child elements of the current element.""" - return {c.Name: self.__class__(c) for c in self.element.Elements} - - def descendant(self, path: str) -> "PIAFElement": - """Return a descendant of the current element from an exact path.""" - return self.__class__(self.element.Elements.get_Item(path)) - - -class PIAFEventFrame(PIAFBase.PIAFBaseElement[AF.EventFrame.AFEventFrame]): - """Container for PI AF Event Frames in the database.""" - - version = "0.1.0" - - @property - def event_frame(self) -> AF.EventFrame.AFEventFrame: - """Return the underlying AF Event Frame object.""" - return self.element - - @property - def parent(self) -> "PIAFEventFrame | None": - """Return the parent element of the current event frame, or None if it has none.""" - if not self.element.Parent: - return None - return self.__class__(self.element.Parent) - - @property - def children(self) -> dict[str, "PIAFEventFrame"]: - """Return a dictionary of the direct child event frames of the current event frame.""" - return {c.Name: self.__class__(c) for c in self.element.EventFrames} - - -class PIAFTable: - """Container for PI AF Tables in the database.""" - - def __init__(self, table: AF.Asset.AFTable) -> None: - self._table = table - - @property - def columns(self) -> list[str]: - """Return the names of the columns in the table.""" - return [col.ColumnName for col in self._table.Table.Columns] - - @property - def _rows(self) -> list[System.Data.DataRow]: - return self._table.Table.Rows - - @property - def name(self) -> str: - """Return the name of the table.""" - return self._table.Name - - @property - def shape(self) -> tuple[int, int]: - """Return the shape of the table.""" - return (len(self._rows), len(self.columns)) - - @property - def data(self) -> pd.DataFrame: - """Return the data in the table as a pandas DataFrame.""" - return pd.DataFrame([{col: row[col] for col in self.columns} for row in self._rows]) diff --git a/PIconnect/PIAFAttribute.py b/PIconnect/PIAFAttribute.py index 3fbfa452..b0d2ebcd 100644 --- a/PIconnect/PIAFAttribute.py +++ b/PIconnect/PIAFAttribute.py @@ -2,7 +2,8 @@ import dataclasses import datetime -from typing import Any +from collections.abc import Iterator, Sequence +from typing import Any, overload from PIconnect import AF, PIData, PIPoint, Time @@ -31,11 +32,9 @@ class PIAFAttribute(PIData.PISeriesContainer): version = "0.1.0" - def __init__( - self, element: AF.Asset.AFBaseElement, attribute: AF.Asset.AFAttribute - ) -> None: + def __init__(self, attribute: AF.Asset.AFAttribute) -> None: super().__init__() - self.element = element + self.element = attribute.Element self.attribute = attribute def __repr__(self): @@ -60,12 +59,12 @@ def parent(self) -> "PIAFAttribute | None": """Return the parent attribute of the current attribute, or None if it has none.""" if not self.attribute.Parent: return None - return self.__class__(self.element, self.attribute.Parent) + return self.__class__(self.attribute.Parent) @property def children(self) -> dict[str, "PIAFAttribute"]: """Return a dictionary of the direct child attributes of the current attribute.""" - return {a.Name: self.__class__(self.element, a) for a in self.attribute.Attributes} + return {a.Name: self.__class__(a) for a in self.attribute.Attributes} @property def description(self) -> str: @@ -184,3 +183,42 @@ def _update_value( update_mode, buffer_mode, ) + + +class PIAFAttributeList(Sequence[PIAFAttribute]): + def __init__(self, attributes: Sequence[PIAFAttribute]) -> None: + self._attributes = attributes + + @overload + def __getitem__(self, index: int | str) -> PIAFAttribute: ... + @overload + def __getitem__(self, index: slice) -> "PIAFAttributeList": ... + def __getitem__(self, index: int | str | slice) -> "PIAFAttribute | PIAFAttributeList": + """Return the attribute at the given index or the attribute with the given name.""" + match index: + case int(): + return self._attributes[index] + case str(): + for attr in self._attributes: + if attr.name == index: + return attr + raise KeyError(f"Attribute {index} not found.") + case slice(): + return PIAFAttributeList(self._attributes[index]) + case _: + raise TypeError("Index must be an int or a string.") + + def __len__(self) -> int: + """Return the number of attributes in the list.""" + return len(self._attributes) + + def __iter__(self) -> Iterator[PIAFAttribute]: + """Return an iterator over the attributes in the list.""" + return iter(self._attributes) + + def __reversed__(self) -> Iterator[PIAFAttribute]: + return reversed(self._attributes) + + def __repr__(self) -> str: + """Return the string representation of the attribute list.""" + return f"{self.__class__.__qualname__}({len(self._attributes)} attributes)" diff --git a/PIconnect/PIAFBase.py b/PIconnect/PIAFBase.py index a47f9818..ffe66084 100644 --- a/PIconnect/PIAFBase.py +++ b/PIconnect/PIAFBase.py @@ -1,9 +1,13 @@ """Base element class for PI AF elements.""" -from typing import Generic, TypeVar +from collections.abc import Iterator, Sequence +from typing import Generic, Self, TypeVar, overload + +import pandas as pd # type: ignore import PIconnect.AFSDK as SDK import PIconnect.PIAFAttribute as PIattr +from PIconnect.AFSDK import System ElementType = TypeVar("ElementType", bound=SDK.AF.Asset.AFBaseElement) @@ -28,7 +32,7 @@ def name(self) -> str: @property def attributes(self) -> dict[str, PIattr.PIAFAttribute]: """Return a dictionary of the attributes of the current element.""" - return {a.Name: PIattr.PIAFAttribute(self.element, a) for a in self.element.Attributes} + return {a.Name: PIattr.PIAFAttribute(a) for a in self.element.Attributes} @property def categories(self) -> SDK.AF.AFCategories: @@ -39,3 +43,104 @@ def categories(self) -> SDK.AF.AFCategories: def description(self) -> str: """Return the description of the current element.""" return self.element.Description + + +class PIAFElement(PIAFBaseElement[SDK.AF.Asset.AFElement]): + """Container for PI AF elements in the database.""" + + version = "0.1.0" + + @property + def parent(self) -> "PIAFElement | None": + """Return the parent element of the current element, or None if it has none.""" + if not self.element.Parent: + return None + return self.__class__(self.element.Parent) + + @property + def children(self) -> dict[str, "PIAFElement"]: + """Return a dictionary of the direct child elements of the current element.""" + return {c.Name: self.__class__(c) for c in self.element.Elements} + + def descendant(self, path: str) -> "PIAFElement": + """Return a descendant of the current element from an exact path.""" + return self.__class__(self.element.Elements.get_Item(path)) + + +class PIAFElementList(Sequence[PIAFElement]): + """Container for a list of PIAFElement objects.""" + + def __init__(self, elements: list[PIAFElement]) -> None: + self._elements = elements + + @overload + def __getitem__(self, index: int) -> PIAFElement: ... + @overload + def __getitem__(self, index: slice) -> Self: ... + def __getitem__(self, index: int | slice) -> PIAFElement | Self: + """Return the element at the specified index.""" + if isinstance(index, slice): + return self.__class__(self._elements[index]) + return self._elements[index] + + def __len__(self) -> int: + """Return the number of elements in the list.""" + return len(self._elements) + + def __iter__(self) -> Iterator[PIAFElement]: + """Return an iterator over the elements in the list.""" + yield from self._elements + + +class PIAFEventFrame(PIAFBaseElement[SDK.AF.EventFrame.AFEventFrame]): + """Container for PI AF Event Frames in the database.""" + + version = "0.1.0" + + @property + def event_frame(self) -> SDK.AF.EventFrame.AFEventFrame: + """Return the underlying AF Event Frame object.""" + return self.element + + @property + def parent(self) -> "PIAFEventFrame | None": + """Return the parent element of the current event frame, or None if it has none.""" + if not self.element.Parent: + return None + return self.__class__(self.element.Parent) + + @property + def children(self) -> dict[str, "PIAFEventFrame"]: + """Return a dictionary of the direct child event frames of the current event frame.""" + return {c.Name: self.__class__(c) for c in self.element.EventFrames} + + +class PIAFTable: + """Container for PI AF Tables in the database.""" + + def __init__(self, table: SDK.AF.Asset.AFTable) -> None: + self._table = table + + @property + def columns(self) -> list[str]: + """Return the names of the columns in the table.""" + return [col.ColumnName for col in self._table.Table.Columns] + + @property + def _rows(self) -> list[System.Data.DataRow]: + return self._table.Table.Rows + + @property + def name(self) -> str: + """Return the name of the table.""" + return self._table.Name + + @property + def shape(self) -> tuple[int, int]: + """Return the shape of the table.""" + return (len(self._rows), len(self.columns)) + + @property + def data(self) -> pd.DataFrame: + """Return the data in the table as a pandas DataFrame.""" + return pd.DataFrame([{col: row[col] for col in self.columns} for row in self._rows]) diff --git a/PIconnect/Search.py b/PIconnect/Search.py new file mode 100644 index 00000000..d9fe0d60 --- /dev/null +++ b/PIconnect/Search.py @@ -0,0 +1,145 @@ +"""Search the AF database for different objects.""" + +import abc +import warnings +from collections.abc import Iterator, Sequence +from typing import Generic, TypeVar + +import PIconnect.AFSDK as SDK + +from . import Asset + +SearchResultType = TypeVar( + "SearchResultType", + # PIAFBase.PIAFElement, + # PIAFBase.PIAFEventFrame, + # PIAFAttribute.PIAFAttribute, +) +AFSearchResultType = TypeVar("AFSearchResultType", covariant=True) + + +class SearchResult(Generic[AFSearchResultType, SearchResultType], abc.ABC): + """Container for search results.""" + + def __init__( + self, + search: "SDK.AF.Search.AFSearch[AFSearchResultType]", + ) -> None: + self.search = search + self.result_type: type[SearchResultType] + + @property + def name(self) -> str: + """Return the name of the search result.""" + return self.search.SearchName + + @property + def count(self) -> int: + """Return the number of items in the search result.""" + return self.search.GetTotalCount() + + def __iter__(self) -> Iterator[SearchResultType]: + """Return an iterator over the items in the search result.""" + for item in self.search.FindObjects(): + yield self.result_type(item) # type: ignore + + def one(self) -> SearchResultType: + """Return the only item in the search result. + + Raises + ------ + ValueError: If there are no results or more than one result. + """ + if self.count == 0: + raise ValueError("No results found") + if self.count > 1: + raise ValueError(f"More than one result found ({self.count} results)") + return next(iter(self)) + + @abc.abstractmethod + def to_list(self) -> Sequence[SearchResultType]: + """Return all items in the search result in a sequence.""" + pass + + +class AttributeSearchResult(SearchResult[SDK.AF.Asset.AFAttribute, Asset.AFAttribute]): + """Container for attribute search results.""" + + def __init__( + self, + search: SDK.AF.Search.AFAttributeSearch, + ) -> None: + super().__init__(search) + self.result_type = Asset.AFAttribute + + def to_list(self) -> Asset.AFAttributeList: + """Return all items in the search result.""" + return Asset.AFAttributeList(list(self)) + + +class ElementSearchResult(SearchResult[SDK.AF.Asset.AFElement, Asset.AFElement]): + """Container for attribute search results.""" + + def __init__( + self, + search: SDK.AF.Search.AFElementSearch, + ) -> None: + super().__init__(search) + self.result_type = Asset.AFElement + + def to_list(self) -> Asset.AFElementList: + """Return all items in the search result.""" + return Asset.AFElementList(list(self)) + + +class Search: + """Search the AF database for different objects.""" + + def __init__(self, database: SDK.AF.AFDatabase) -> None: + self.database = database + + def attributes( + self, query: str, query_name: str = "element_search" + ) -> AttributeSearchResult: + """Search for elements in the AF database.""" + search = SDK.AF.Search.AFAttributeSearch(self.database, query_name, query) + return AttributeSearchResult(search) + + def elements(self, query: str, query_name: str = "element_search") -> ElementSearchResult: + """Search for elements in the AF database.""" + search = SDK.AF.Search.AFElementSearch(self.database, query_name, query) + return ElementSearchResult(search) + + def _descendant(self, path: str) -> Asset.AFElement: + return Asset.AFElement(self.database.Elements.get_Item(path)) + + def __call__(self, query: str | list[str]) -> Asset.AFAttributeList: + """Search AFAttributes by element|attribute path strings. + + Return a list of AFAttributes directly from a list of element|attribute path strings + + like this: + + list("BaseElement/childElement/childElement|Attribute|ChildAttribute|ChildAttribute", + "BaseElement/childElement/childElement|Attribute|ChildAttribute|ChildAttribute") + + """ + warnings.warn( + """Call to Search.__call__ is deprecated, use Search. instead""", + DeprecationWarning, + stacklevel=2, + ) + attributelist = Asset.AFAttributeList([]) + if isinstance(query, list): + for x in query: + attributelist.extend(self(x)) + return attributelist + if "|" in query: + splitpath = query.split("|") + elem = self._descendant(splitpath[0]) + attribute = elem.attributes[splitpath[1]] + if len(splitpath) > 2: + for x in range(len(splitpath) - 2): + attribute = attribute.children[splitpath[x + 2]] + attributelist.append(attribute) + return attributelist diff --git a/tests/test_PIAF.py b/tests/test_PIAF.py index 8ff5e99e..ab564d3f 100644 --- a/tests/test_PIAF.py +++ b/tests/test_PIAF.py @@ -7,6 +7,7 @@ import PIconnect as PI import PIconnect.AFSDK as AFSDK import PIconnect.PIAF as PIAF +from PIconnect import Asset from PIconnect._typing import AF AFSDK.AF, AFSDK.System, AFSDK.AF_SDK_VERSION = AFSDK.__fallback() @@ -61,18 +62,21 @@ class TestDatabaseSearch: def test_search(self): """Test that calling attributes on the database returns a list of attributes.""" - with PI.PIAFDatabase() as db: - attributes = db.search([r"", r""]) - assert isinstance(attributes, list) + with pytest.warns(DeprecationWarning): + with PI.PIAFDatabase() as db: + attributes = db.search([r"", r""]) + assert isinstance(attributes, Asset.AFAttributeList) def test_split_element_attribute(self): """Test that calling attributes on the database returns a list of attributes.""" - with PI.PIAFDatabase() as db: - attributes = db.search(r"BaseElement|Attribute1") - assert attributes[0].name == "Attribute1" + with pytest.warns(DeprecationWarning): + with PI.PIAFDatabase() as db: + attributes = db.search(r"BaseElement|Attribute1") + assert isinstance(attributes[0].name, str) def test_split_element_nested_attribute(self): """Test that calling attributes on the database returns a list of attributes.""" - with PI.PIAFDatabase() as db: - attributes = db.search(r"BaseElement|Attribute1|Attribute2") - assert attributes[0].name == "Attribute2" + with pytest.warns(DeprecationWarning): + with PI.PIAFDatabase() as db: + attributes = db.search(r"BaseElement|Attribute1|Attribute2") + assert isinstance(attributes[0].name, str) From 5c395acb0e3981f60ffbf9cf67c9f8a35fbdff91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Wed, 2 Apr 2025 15:59:23 +0200 Subject: [PATCH 09/28] chore: simplify Search.__call__ --- PIconnect/Search.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/PIconnect/Search.py b/PIconnect/Search.py index d9fe0d60..b384a305 100644 --- a/PIconnect/Search.py +++ b/PIconnect/Search.py @@ -133,8 +133,7 @@ def __call__(self, query: str | list[str]) -> Asset.AFAttributeList: if isinstance(query, list): for x in query: attributelist.extend(self(x)) - return attributelist - if "|" in query: + elif "|" in query: splitpath = query.split("|") elem = self._descendant(splitpath[0]) attribute = elem.attributes[splitpath[1]] From 2f42e5ef6589a4e9bb24931b87c985f55d0173f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Wed, 2 Apr 2025 16:00:05 +0200 Subject: [PATCH 10/28] feat: add AFAttribute.stepped_data and AFBaseElement.path properties --- PIconnect/Asset.py | 10 ++++++++++ PIconnect/Data.py | 6 ++++++ PIconnect/_typing/Asset.py | 6 ++++++ 3 files changed, 22 insertions(+) diff --git a/PIconnect/Asset.py b/PIconnect/Asset.py index 93f6b78f..049f7c68 100644 --- a/PIconnect/Asset.py +++ b/PIconnect/Asset.py @@ -106,6 +106,11 @@ def __repr__(self): ) return f"{self.__class__.__qualname__}({description}; Current Value: {value})" + @property + def stepped_data(self) -> bool: + """Return True if the attribute is a stepped data type.""" + return self.attribute.Step + @property def element(self) -> SDK.AF.Asset.AFBaseElement: """Return the element to which the attribute belongs.""" @@ -296,6 +301,11 @@ def description(self) -> str: """Return the description of the current element.""" return self.element.Description + @property + def path(self) -> str: + """Return the path of the current element.""" + return self.element.GetPath() + class AFElement(AFBaseElement[SDK.AF.Asset.AFElement]): """Container for PI AF elements in the database.""" diff --git a/PIconnect/Data.py b/PIconnect/Data.py index 82fa9354..7e11fdad 100644 --- a/PIconnect/Data.py +++ b/PIconnect/Data.py @@ -219,6 +219,12 @@ def name(self) -> str: """Return the name of the data container.""" pass + @property + @abc.abstractmethod + def stepped_data(self) -> bool: + """Return True if the data is stepped, False if it is continuous.""" + pass + @property def current_value(self) -> Any: """Return the current value of the attribute.""" diff --git a/PIconnect/_typing/Asset.py b/PIconnect/_typing/Asset.py index 57f0daf5..acf430a5 100644 --- a/PIconnect/_typing/Asset.py +++ b/PIconnect/_typing/Asset.py @@ -44,6 +44,7 @@ def __init__( self.DefaultUOM = UOM.UOM() self.Name = name self.Parent = parent + self.Step = False @staticmethod def GetValue() -> AFValue: @@ -87,6 +88,11 @@ def __init__(self, name: str, parent: "AFElement | None" = None) -> None: self.Name = name self.Parent = parent + @staticmethod + def GetPath() -> str: + """Stub for getting the path.""" + return "Path\\to\\the|element" + class AFElement(AFBaseElement): """Mock class of the AF.AFElement class.""" From 29c1ae0c49f741d2f9c7c2e3f4e3cab80546a983 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Wed, 2 Apr 2025 16:02:01 +0200 Subject: [PATCH 11/28] feat: complete DataContainerCollection data queries --- PIconnect/Data.py | 295 +++++++++++++++++++++++++++++++++++++++------- 1 file changed, 251 insertions(+), 44 deletions(-) diff --git a/PIconnect/Data.py b/PIconnect/Data.py index 7e11fdad..f50b2c54 100644 --- a/PIconnect/Data.py +++ b/PIconnect/Data.py @@ -4,7 +4,7 @@ import datetime import enum from collections.abc import Callable -from typing import Any, Concatenate, ParamSpec, TypeVar, cast +from typing import Any, Concatenate, Literal, ParamSpec, TypeVar, cast import pandas as pd # type: ignore @@ -455,9 +455,7 @@ def recorded_value( pd.Series: A pd.Series with a single row, with the corresponding time as the index """ - from . import Time as time_module - - _time = time_module.to_af_time(time) + _time = Time.to_af_time(time) _retrieval_mode = SDK.AF.Data.AFRetrievalMode(int(retrieval_mode)) pivalue = self._recorded_value(_time, _retrieval_mode) result = pd.Series( @@ -735,6 +733,8 @@ def _update_value( DataContainerType = TypeVar("DataContainerType", bound=DataContainer) Parameters = ParamSpec("Parameters") +Align = Literal["auto", "ffill", "bfill", "nearest", "time", False] + class DataContainerCollection(AF.NamedItemList[DataContainerType]): """Container for a collection of data containers.""" @@ -747,24 +747,55 @@ def _element_type(self) -> type[DataContainerType]: def _combine_dfs_to_df( self, - func: Callable[Concatenate[DataContainerType, Parameters], pd.DataFrame], + func: Callable[Concatenate[DataContainerType, Parameters], pd.DataFrame | pd.Series], + _align: Align = False, + _add_name_to_index: bool = False, *args: Parameters.args, **kwargs: Parameters.kwargs, ) -> pd.DataFrame: """Combine the results of a function applied to each element in the collection.""" - df = pd.DataFrame() - for element in self._elements: + + def add_name_to_index(df: pd.DataFrame, element: DataContainerType) -> pd.DataFrame: + if _add_name_to_index: + return df.set_axis( # type: ignore + pd.MultiIndex.from_product([[element.name], df.columns]), axis=1 + ) + return df + + def apply_func(element: DataContainerType) -> pd.DataFrame: result = func(element, *args, **kwargs) - df = pd.concat( - [ - df, - result.set_axis( # type: ignore - pd.MultiIndex.from_product([[element.name], result.columns]), axis=1 - ), - ], - axis=1, - ) - return df + match result: + case pd.DataFrame(): + df = result + case pd.Series(): + df = result.to_frame() + return add_name_to_index(df, element) + + def align(df: pd.DataFrame) -> pd.DataFrame: + match _align: + case False: + return df + case "auto": + for col in df.columns.levels[0]: # type: ignore + if self[str(col)].stepped_data: # type: ignore + df[col] = df[col].ffill(axis=0) # type: ignore + else: + df[col] = ( + df[col] + .apply(pd.to_numeric, axis=1, errors="coerce") # type: ignore + .interpolate(method="time", axis=0) # type: ignore + ) + return df + case "ffill": + return df.ffill(axis=0) # type: ignore + case "bfill": + return df.bfill(axis=0) # type: ignore + case "nearest": + return df.interpolate(method="nearest", axis=0) # type: ignore + case "time": + return df.interpolate(method="time", axis=0) # type: ignore + + return align(pd.concat(map(apply_func, self._elements), axis=1)) @property def current_value(self) -> pd.Series: @@ -785,22 +816,25 @@ def filtered_summaries( filter_evaluation: ExpressionSampleType = _DEFAULT_FILTER_EVALUATION, filter_interval: Time.IntervalLike | None = None, time_type: TimestampCalculation = _DEFAULT_TIMESTAMP_CALCULATION, + align: Align = False, ) -> pd.DataFrame: """Return one or more summary values for each interval within a time range.""" return self._combine_dfs_to_df( self._element_type.filtered_summaries, - start_time, - end_time, - interval, - filter_expression, - summary_types, - calculation_basis, - filter_evaluation, - filter_interval, - time_type, + _align=align, + _add_name_to_index=True, + start_time=start_time, + end_time=end_time, + interval=interval, + filter_expression=filter_expression, + summary_types=summary_types, + calculation_basis=calculation_basis, + filter_evaluation=filter_evaluation, + filter_interval=filter_interval, + time_type=time_type, ) - def interpolated_value(self, time: Time.TimeLike) -> pd.DataFrame: + def interpolated_value(self, time: Time.TimeLike, align: Align = False) -> pd.DataFrame: """Return a pd.DataFrame with an interpolated value at the given time. .. warning:: @@ -823,17 +857,9 @@ def interpolated_value(self, time: Time.TimeLike) -> pd.DataFrame: pd.Series: A pd.Series with a single row, with the corresponding time as the index """ - - def _interpolated_value( - element: DataContainerType, - time: Time.TimeLike, - ) -> pd.DataFrame: - return element.interpolated_value(time).to_frame() - return self._combine_dfs_to_df( - _interpolated_value, - time, - ).droplevel(0, axis=1) + self._element_type.interpolated_value, _align=align, time=time + ) def interpolated_values( self, @@ -841,6 +867,7 @@ def interpolated_values( end_time: Time.TimeLike, interval: Time.IntervalLike, filter_expression: str = "", + align: Align = False, ) -> pd.DataFrame: """Return a pd.DataFrame of interpolated data. @@ -883,12 +910,192 @@ def interpolated_values( ------- pd.DataFrame: Timeseries of the values returned by the SDK """ + return self._combine_dfs_to_df( + self._element_type.interpolated_values, + _align=align, + start_time=start_time, + end_time=end_time, + interval=interval, + filter_expression=filter_expression, + ) + + def recorded_value( + self, + time: Time.TimeLike, + retrieval_mode: RetrievalMode = RetrievalMode.AUTO, + align: Align = False, + ) -> pd.DataFrame: + """Return a pd.Series with the recorded value at or close to the given time. + + Parameters + ---------- + time (str): String containing the date, and possibly time, + for which to retrieve the value. This is parsed, using + :afsdk:`AF.Time.AFTime `. + retrieval_mode (int or :any:`PIConsts.RetrievalMode`): Flag determining + which value to return if no value available at the exact requested + time. + + Returns + ------- + pd.Series: A pd.Series with a single row, with the corresponding time as + the index + """ + return self._combine_dfs_to_df( + self._element_type.recorded_value, + _align=align, + time=time, + retrieval_mode=retrieval_mode, + ) + + def recorded_values( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + boundary_type: BoundaryType = BoundaryType.INSIDE, + filter_expression: str = "", + align: Align = False, + ) -> pd.DataFrame: + """Return a pd.Series of recorded data. - def _interpolated_values( - element: DataContainerType, - ) -> pd.DataFrame: - return element.interpolated_values( - start_time, end_time, interval, filter_expression - ).to_frame() + Data is returned between the given *start_time* and *end_time*, + inclusion of the boundaries is determined by the *boundary_type* + attribute. Both *start_time* and *end_time* are parsed by AF.Time and + allow for time specification relative to "now" by use of the asterisk. + + By default the *boundary_type* is set to 'inside', which returns from + the first value after *start_time* to the last value before *end_time*. + The other options are 'outside', which returns from the last value + before *start_time* to the first value before *end_time*, and + 'interpolate', which interpolates the first value to the given + *start_time* and the last value to the given *end_time*. + + *filter_expression* is an optional string to filter the returned + values, see OSIsoft PI documentation for more information. - return self._combine_dfs_to_df(_interpolated_values).droplevel(0, axis=1) + The AF SDK allows for inclusion of filtered data, with filtered values + marked as such. At this point PIconnect does not support this and + filtered values are always left out entirely. + + Parameters + ---------- + start_time (str or datetime): Containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using :ref:`Time.to_af_time_range`. + end_time (str or datetime): Containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using :ref:`Time.to_af_time_range`. + boundary_type (BoundaryType): Specification for how to handle values near the + specified start and end time. Defaults to `BoundaryType.INSIDE`. + filter_expression (str, optional): Defaults to ''. Query on which + data to include in the results. See :ref:`filtering_values` + for more information on filter queries. + + Returns + ------- + pd.Series: Timeseries of the values returned by the SDK + """ + return self._combine_dfs_to_df( + self._element_type.recorded_values, + _align=align, + start_time=start_time, + end_time=end_time, + boundary_type=boundary_type, + filter_expression=filter_expression, + ) + + def summary( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + summary_types: SummaryType, + calculation_basis: CalculationBasis = _DEFAULT_CALCULATION_BASIS, + time_type: TimestampCalculation = _DEFAULT_TIMESTAMP_CALCULATION, + align: Align = False, + ) -> pd.DataFrame: + """Return one or more summary values over a single time range. + + Parameters + ---------- + start_time (str or datetime): Containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using :ref:`Time.to_af_time_range`. + end_time (str or datetime): Containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using :ref:`Time.to_af_time_range`. + summary_types (int or SummaryType): Type(s) of summaries + of the data within the requested time range. + calculation_basis (int or CalculationBasis, optional): + Event weighting within an interval. See :ref:`event_weighting` + and :any:`CalculationBasis` for more information. Defaults to + CalculationBasis.TIME_WEIGHTED. + time_type (int or TimestampCalculation, optional): + Timestamp to return for each of the requested summaries. See + :ref:`summary_timestamps` and :any:`TimestampCalculation` for + more information. Defaults to TimestampCalculation.AUTO. + + Returns + ------- + pandas.DataFrame: Dataframe with the unique timestamps as row index + and the summary name as column name. + """ + return self._combine_dfs_to_df( + self._element_type.summary, + _align=align, + _add_name_to_index=True, + start_time=start_time, + end_time=end_time, + summary_types=summary_types, + calculation_basis=calculation_basis, + time_type=time_type, + ) + + def summaries( + self, + start_time: Time.TimeLike, + end_time: Time.TimeLike, + interval: Time.IntervalLike, + summary_types: SummaryType, + calculation_basis: CalculationBasis = _DEFAULT_CALCULATION_BASIS, + time_type: TimestampCalculation = _DEFAULT_TIMESTAMP_CALCULATION, + align: Align = False, + ) -> pd.DataFrame: + """Return one or more summary values for each interval within a time range. + + Parameters + ---------- + start_time (str or datetime): Containing the date, and possibly time, + from which to retrieve the values. This is parsed, together + with `end_time`, using :ref:`Time.to_af_time_range`. + end_time (str or datetime): Containing the date, and possibly time, + until which to retrieve values. This is parsed, together + with `start_time`, using :ref:`Time.to_af_time_range`. + interval (str, datetime.timedelta or pd.Timedelta): String containing the interval + at which to extract data. This is parsed using :ref:`Time.to_af_time_span`. + summary_types (int or PIConsts.SummaryType): Type(s) of summaries + of the data within the requested time range. + calculation_basis (int or PIConsts.CalculationBasis, optional): + Event weighting within an interval. See :ref:`event_weighting` + and :any:`CalculationBasis` for more information. Defaults to + CalculationBasis.TIME_WEIGHTED. + time_type (int or PIConsts.TimestampCalculation, optional): + Timestamp to return for each of the requested summaries. See + :ref:`summary_timestamps` and :any:`TimestampCalculation` for + more information. Defaults to TimestampCalculation.AUTO. + + Returns + ------- + pandas.DataFrame: Dataframe with the unique timestamps as row index + and the summary name as column name. + """ + return self._combine_dfs_to_df( + self._element_type.summaries, + _align=align, + _add_name_to_index=True, + start_time=start_time, + end_time=end_time, + interval=interval, + summary_types=summary_types, + calculation_basis=calculation_basis, + time_type=time_type, + ) From 3101b198ea2a618f197846bab298b379b4029e62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Fri, 4 Apr 2025 14:10:04 +0200 Subject: [PATCH 12/28] feat: migrate collections to new module, rename PIAF to AF --- PIconnect/AF.py | 246 +++++++++++++++++++++----------------- PIconnect/PIAF.py | 190 ----------------------------- PIconnect/_collections.py | 114 ++++++++++++++++++ 3 files changed, 252 insertions(+), 298 deletions(-) delete mode 100644 PIconnect/PIAF.py create mode 100644 PIconnect/_collections.py diff --git a/PIconnect/AF.py b/PIconnect/AF.py index 2911ed93..568163d9 100644 --- a/PIconnect/AF.py +++ b/PIconnect/AF.py @@ -1,114 +1,144 @@ -"""Generics for AF collections.""" +"""AF - Core containers for connections to the PI Asset Framework.""" + +import warnings +from typing import Any, Self + +import PIconnect.AFSDK as SDK +from PIconnect import Asset, PIConsts, Search, Time +from PIconnect.AFSDK import System + +_DEFAULT_EVENTFRAME_SEARCH_MODE = PIConsts.EventFrameSearchMode.STARTING_AFTER + + +class AFDatabase: + """Context manager for connections to the PI Asset Framework database.""" + + version = "0.3.0" + + @classmethod + def servers(cls) -> dict[str, SDK.AF.PISystem]: + """Return a dictionary of the known servers.""" + return {server.Name: server for server in SDK.AF.PISystems()} + + @classmethod + def default_server(cls) -> SDK.AF.PISystem | None: + """Return the default server.""" + if SDK.AF.PISystems().DefaultPISystem: + return SDK.AF.PISystems().DefaultPISystem + servers = SDK.AF.PISystems() + if servers.Count > 0: + return next(iter(servers)) + else: + return None + + def __init__(self, server: str | None = None, database: str | None = None) -> None: + self.server: SDK.AF.PISystem = self._initialise_server(server) + self.database: SDK.AF.AFDatabase = self._initialise_database(database) + self.search = Search.Search(self.database) + + def _initialise_server(self, server: str | None) -> SDK.AF.PISystem: + """Initialise the server connection.""" + default_server = self.default_server() + if server is None: + if default_server is None: + raise ValueError("No server specified and no default server found.") + return default_server + + try: + return SDK.AF.PISystems()[server] + except (Exception, System.Exception): # type: ignore + if default_server is None: + raise ValueError( + f'Server "{server}" not found and no default server found.' + ) from None + message = f'Server "{server}" not found, using the default server.' + warnings.warn(message=message, category=UserWarning, stacklevel=2) + return default_server + + def _initialise_database(self, database: str | None) -> SDK.AF.AFDatabase: + def default_db(): + default = self.server.Databases.DefaultDatabase + if default is None: + raise ValueError("No database specified and no default database found.") + return default + + if database is None: + return default_db() + + try: + return self.server.Databases[database] + except (Exception, System.Exception): # type: ignore + message = f'Database "{database}" not found, using the default database.' + warnings.warn(message=message, category=UserWarning, stacklevel=2) + return default_db() + + def __enter__(self) -> Self: + """Open the PI AF server connection context.""" + self.server.Connect() + return self + + def __exit__( + self, + *args: Any, # type: ignore + ) -> bool: + """Close the PI AF server connection context.""" + return False + # Disabled disconnecting because garbage collection sometimes impedes + # connecting to another server later + # self.server.Disconnect() -from collections.abc import Iterable, Iterator, MutableSequence -from typing import Protocol, Self, TypeVar, overload + def __repr__(self) -> str: + """Return a representation of the PI AF database connection.""" + return f"{self.__class__.__qualname__}(\\\\{self.server_name}\\{self.database_name})" + @property + def server_name(self) -> str: + """Return the name of the connected PI AF server.""" + return self.server.Name -class NamedItem(Protocol): - """Protocol for an item with a name.""" + @property + def database_name(self) -> str: + """Return the name of the connected PI AF database.""" + return self.database.Name @property - def name(self) -> str: - """Return the name of the item.""" - ... - - -NamedItemType = TypeVar("NamedItemType", bound=NamedItem) - - -class NamedItemList(MutableSequence[NamedItemType]): - """A list of items with names. - - This class provides a way to access items by index or by name. - """ - - def __init__(self, elements: MutableSequence[NamedItemType]) -> None: - self._elements = elements - - @overload - def __getitem__(self, index: int | str) -> NamedItemType: ... - @overload - def __getitem__(self, index: slice) -> Self: ... - def __getitem__(self, index: int | str | slice) -> NamedItemType | Self: - """Return the list item at the given index or the list item with the given name.""" - match index: - case int(): - return self._elements[index] - case str(): - for attr in self._elements: - if attr.name == index: - return attr - raise KeyError(f"List item {index} not found.") - case slice(): - return self.__class__(self._elements[index]) - case _: - raise TypeError("Index must be an int, string or slice of int.") # type: ignore - - def __len__(self) -> int: - """Return the number of items in the list.""" - return len(self._elements) - - def __iter__(self) -> Iterator[NamedItemType]: - """Return an iterator over the items in the list.""" - return iter(self._elements) - - @overload - def __setitem__(self, index: int | str, value: NamedItemType) -> None: ... - @overload - def __setitem__(self, index: slice, value: Iterable[NamedItemType]) -> None: ... - def __setitem__( - self, index: int | str | slice, value: NamedItemType | Iterable[NamedItemType] - ) -> None: - """Set the list item at the given index or the list item with the given name.""" - match index: - case int(): - self._elements[index] = value # type: ignore - case str(): - for i, attr in enumerate(self._elements): - if attr.name == index: - self._elements[i] = value # type: ignore - return - raise KeyError(f"List item {index} not found.") - case slice(): - if isinstance(value, Iterable): - self._elements[index] = list(value) - else: - raise TypeError("Value must be an iterable.") - case _: - raise TypeError("Index must be an int or string.") # type: ignore - - def __delitem__(self, index: int | str | slice) -> None: - """Delete the list item at the given index or the list item with the given name.""" - match index: - case int(): - del self._elements[index] - case slice(): - del self._elements[index] - case str(): - for i, attr in enumerate(self._elements): - if attr.name == index: - del self._elements[i] - return - raise KeyError(f"List item {index} not found.") - case _: - raise TypeError("Index must be an int or string.") # type: ignore - - def insert(self, index: int, value: NamedItemType) -> None: - """Insert a new item at the given index.""" - self._elements.insert(index, value) - - def append(self, value: NamedItemType) -> None: - """Append a new item to the end of the list.""" - self._elements.append(value) - - def extend(self, values: Iterable[NamedItemType]) -> None: - """Extend the list with a new iterable of items.""" - self._elements.extend(values) - - def __reversed__(self) -> Iterator[NamedItemType]: - """Return a reverse iterator over the items in the list.""" - return reversed(self._elements) + def children(self) -> dict[str, Asset.AFElement]: + """Return a dictionary of the direct child elements of the database.""" + return {c.Name: Asset.AFElement(c) for c in self.database.Elements} - def __repr__(self) -> str: - """Return the string representation of the list.""" - return f"{self.__class__.__qualname__}({len(self._elements)} items)" + @property + def tables(self) -> dict[str, Asset.AFTable]: + """Return a dictionary of the tables in the database.""" + return {t.Name: Asset.AFTable(t) for t in self.database.Tables} + + def descendant(self, path: str) -> Asset.AFElement: + """Return a descendant of the database from an exact path.""" + return Asset.AFElement(self.database.Elements.get_Item(path)) + + def event_frames( + self, + start_time: Time.TimeLike = "", + start_index: int = 0, + max_count: int = 1000, + search_mode: PIConsts.EventFrameSearchMode = _DEFAULT_EVENTFRAME_SEARCH_MODE, + search_full_hierarchy: bool = False, + ) -> dict[str, Asset.AFEventFrame]: + """Search for event frames in the database.""" + _start_time = Time.to_af_time(start_time) + _search_mode = SDK.AF.EventFrame.AFEventFrameSearchMode(int(search_mode)) + return { + frame.Name: Asset.AFEventFrame(frame) + for frame in SDK.AF.EventFrame.AFEventFrame.FindEventFrames( + self.database, + None, + _start_time, + start_index, + max_count, + _search_mode, + None, + None, + None, + None, + search_full_hierarchy, + ) + } diff --git a/PIconnect/PIAF.py b/PIconnect/PIAF.py deleted file mode 100644 index dbc4ff9e..00000000 --- a/PIconnect/PIAF.py +++ /dev/null @@ -1,190 +0,0 @@ -"""PIAF - Core containers for connections to the PI Asset Framework.""" - -import dataclasses -import warnings -from typing import Any, cast - -import PIconnect.AFSDK as SDK -from PIconnect import PIAFBase, PIConsts, Search, Time -from PIconnect._utils import InitialisationWarning -from PIconnect.AFSDK import System - -_DEFAULT_EVENTFRAME_SEARCH_MODE = PIConsts.EventFrameSearchMode.STARTING_AFTER - - -@dataclasses.dataclass(frozen=True) -class PIAFServer: - """Reference to a PI AF server and its databases.""" - - server: SDK.AF.PISystem - databases: dict[str, SDK.AF.AFDatabase] = dataclasses.field(default_factory=dict) - - def __getitem__(self, attr: str) -> SDK.AF.PISystem | dict[str, SDK.AF.AFDatabase]: - """Allow access to attributes as if they were dictionary items.""" - return getattr(self, attr) - - -ServerSpec = dict[str, SDK.AF.PISystem | dict[str, SDK.AF.AFDatabase]] - - -def _lookup_servers() -> dict[str, ServerSpec]: - servers: dict[str, PIAFServer] = {} - for s in SDK.AF.PISystems(): - try: - servers[s.Name] = server = PIAFServer(s) - for d in s.Databases: - try: - server.databases[d.Name] = d - except (Exception, System.Exception) as e: # type: ignore - warnings.warn( - f"Failed loading database data for {d.Name} on {s.Name} " - f"with error {type(cast(Exception, e)).__qualname__}", - InitialisationWarning, - stacklevel=2, - ) - except (Exception, System.Exception) as e: # type: ignore - warnings.warn( - f"Failed loading server data for {s.Name} " - f"with error {type(cast(Exception, e)).__qualname__}", - InitialisationWarning, - stacklevel=2, - ) - return { - server_name: { - "server": server.server, - "databases": dict(server.databases.items()), - } - for server_name, server in servers.items() - } - - -def _lookup_default_server() -> ServerSpec | None: - servers = _lookup_servers() - if SDK.AF.PISystems().DefaultPISystem: - return servers[SDK.AF.PISystems().DefaultPISystem.Name] - elif len(servers) > 0: - return servers[list(_lookup_servers())[0]] - else: - return None - - -class PIAFDatabase: - """Context manager for connections to the PI Asset Framework database.""" - - version = "0.3.0" - - servers: dict[str, ServerSpec] = _lookup_servers() - default_server: ServerSpec | None = _lookup_default_server() - - def __init__(self, server: str | None = None, database: str | None = None) -> None: - server_spec = self._initialise_server(server) - self.server: SDK.AF.PISystem = server_spec["server"] # type: ignore - self.database: SDK.AF.AFDatabase = self._initialise_database(server_spec, database) - self.search = Search.Search(self.database) - - def _initialise_server(self, server: str | None) -> ServerSpec: - if server is None: - if self.default_server is None: - raise ValueError("No server specified and no default server found.") - return self.default_server - - if server not in self.servers: - if self.default_server is None: - raise ValueError(f'Server "{server}" not found and no default server found.') - message = 'Server "{server}" not found, using the default server.' - warnings.warn( - message=message.format(server=server), category=UserWarning, stacklevel=2 - ) - return self.default_server - - return self.servers[server] - - def _initialise_database( - self, server: ServerSpec, database: str | None - ) -> SDK.AF.AFDatabase: - def default_db(): - default = self.server.Databases.DefaultDatabase - if default is None: - raise ValueError("No database specified and no default database found.") - return default - - if database is None: - return default_db() - - databases = cast(dict[str, SDK.AF.AFDatabase], server["databases"]) - if database not in databases: - message = 'Database "{database}" not found, using the default database.' - warnings.warn( - message=message.format(database=database), category=UserWarning, stacklevel=2 - ) - return default_db() - - return databases[database] - - def __enter__(self) -> "PIAFDatabase": - """Open the PI AF server connection context.""" - self.server.Connect() - return self - - def __exit__(self, *args: Any) -> None: - """Close the PI AF server connection context.""" - pass - # Disabled disconnecting because garbage collection sometimes impedes - # connecting to another server later - # self.server.Disconnect() - - def __repr__(self) -> str: - """Return a representation of the PI AF database connection.""" - return f"{self.__class__.__qualname__}(\\\\{self.server_name}\\{self.database_name})" - - @property - def server_name(self) -> str: - """Return the name of the connected PI AF server.""" - return self.server.Name - - @property - def database_name(self) -> str: - """Return the name of the connected PI AF database.""" - return self.database.Name - - @property - def children(self) -> dict[str, PIAFBase.PIAFElement]: - """Return a dictionary of the direct child elements of the database.""" - return {c.Name: PIAFBase.PIAFElement(c) for c in self.database.Elements} - - @property - def tables(self) -> dict[str, PIAFBase.PIAFTable]: - """Return a dictionary of the tables in the database.""" - return {t.Name: PIAFBase.PIAFTable(t) for t in self.database.Tables} - - def descendant(self, path: str) -> PIAFBase.PIAFElement: - """Return a descendant of the database from an exact path.""" - return PIAFBase.PIAFElement(self.database.Elements.get_Item(path)) - - def event_frames( - self, - start_time: Time.TimeLike = "", - start_index: int = 0, - max_count: int = 1000, - search_mode: PIConsts.EventFrameSearchMode = _DEFAULT_EVENTFRAME_SEARCH_MODE, - search_full_hierarchy: bool = False, - ) -> dict[str, PIAFBase.PIAFEventFrame]: - """Search for event frames in the database.""" - _start_time = Time.to_af_time(start_time) - _search_mode = SDK.AF.EventFrame.AFEventFrameSearchMode(int(search_mode)) - return { - frame.Name: PIAFBase.PIAFEventFrame(frame) - for frame in SDK.AF.EventFrame.AFEventFrame.FindEventFrames( - self.database, - None, - _start_time, - start_index, - max_count, - _search_mode, - None, - None, - None, - None, - search_full_hierarchy, - ) - } diff --git a/PIconnect/_collections.py b/PIconnect/_collections.py new file mode 100644 index 00000000..2911ed93 --- /dev/null +++ b/PIconnect/_collections.py @@ -0,0 +1,114 @@ +"""Generics for AF collections.""" + +from collections.abc import Iterable, Iterator, MutableSequence +from typing import Protocol, Self, TypeVar, overload + + +class NamedItem(Protocol): + """Protocol for an item with a name.""" + + @property + def name(self) -> str: + """Return the name of the item.""" + ... + + +NamedItemType = TypeVar("NamedItemType", bound=NamedItem) + + +class NamedItemList(MutableSequence[NamedItemType]): + """A list of items with names. + + This class provides a way to access items by index or by name. + """ + + def __init__(self, elements: MutableSequence[NamedItemType]) -> None: + self._elements = elements + + @overload + def __getitem__(self, index: int | str) -> NamedItemType: ... + @overload + def __getitem__(self, index: slice) -> Self: ... + def __getitem__(self, index: int | str | slice) -> NamedItemType | Self: + """Return the list item at the given index or the list item with the given name.""" + match index: + case int(): + return self._elements[index] + case str(): + for attr in self._elements: + if attr.name == index: + return attr + raise KeyError(f"List item {index} not found.") + case slice(): + return self.__class__(self._elements[index]) + case _: + raise TypeError("Index must be an int, string or slice of int.") # type: ignore + + def __len__(self) -> int: + """Return the number of items in the list.""" + return len(self._elements) + + def __iter__(self) -> Iterator[NamedItemType]: + """Return an iterator over the items in the list.""" + return iter(self._elements) + + @overload + def __setitem__(self, index: int | str, value: NamedItemType) -> None: ... + @overload + def __setitem__(self, index: slice, value: Iterable[NamedItemType]) -> None: ... + def __setitem__( + self, index: int | str | slice, value: NamedItemType | Iterable[NamedItemType] + ) -> None: + """Set the list item at the given index or the list item with the given name.""" + match index: + case int(): + self._elements[index] = value # type: ignore + case str(): + for i, attr in enumerate(self._elements): + if attr.name == index: + self._elements[i] = value # type: ignore + return + raise KeyError(f"List item {index} not found.") + case slice(): + if isinstance(value, Iterable): + self._elements[index] = list(value) + else: + raise TypeError("Value must be an iterable.") + case _: + raise TypeError("Index must be an int or string.") # type: ignore + + def __delitem__(self, index: int | str | slice) -> None: + """Delete the list item at the given index or the list item with the given name.""" + match index: + case int(): + del self._elements[index] + case slice(): + del self._elements[index] + case str(): + for i, attr in enumerate(self._elements): + if attr.name == index: + del self._elements[i] + return + raise KeyError(f"List item {index} not found.") + case _: + raise TypeError("Index must be an int or string.") # type: ignore + + def insert(self, index: int, value: NamedItemType) -> None: + """Insert a new item at the given index.""" + self._elements.insert(index, value) + + def append(self, value: NamedItemType) -> None: + """Append a new item to the end of the list.""" + self._elements.append(value) + + def extend(self, values: Iterable[NamedItemType]) -> None: + """Extend the list with a new iterable of items.""" + self._elements.extend(values) + + def __reversed__(self) -> Iterator[NamedItemType]: + """Return a reverse iterator over the items in the list.""" + return reversed(self._elements) + + def __repr__(self) -> str: + """Return the string representation of the list.""" + return f"{self.__class__.__qualname__}({len(self._elements)} items)" From 39e3851db566b94d5285ec7b0264f2691eda78e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Fri, 4 Apr 2025 14:10:35 +0200 Subject: [PATCH 13/28] feat: add type hints for item getters --- PIconnect/_typing/AF.py | 13 +++++++++++++ PIconnect/_typing/PI.py | 6 ++++++ 2 files changed, 19 insertions(+) diff --git a/PIconnect/_typing/AF.py b/PIconnect/_typing/AF.py index e2c15b71..986ddabf 100644 --- a/PIconnect/_typing/AF.py +++ b/PIconnect/_typing/AF.py @@ -43,6 +43,12 @@ def __iter__(self) -> Iterator[AFDatabase]: if self.DefaultDatabase is not None: yield from [self.DefaultDatabase] + def __getitem__(self, name: str) -> AFDatabase: + """Return the AFDatabase with the given name.""" + if name == self.DefaultDatabase.Name: + return self.DefaultDatabase + raise KeyError(f"AFDatabase {name} not found") + def __init__(self, name: str) -> None: self.Name = name self.Databases = PISystem.InternalDatabases() @@ -64,6 +70,13 @@ class PISystems: def __init__(self) -> None: self.DefaultPISystem = PISystem("TestingAF") + self.Count = 1 def __iter__(self) -> Iterator[PISystem]: return (x for x in [self.DefaultPISystem]) + + def __getitem__(self, name: str) -> PISystem: + """Return the PISystem with the given name.""" + if name == self.DefaultPISystem.Name: + return self.DefaultPISystem + raise KeyError(f"PISystem {name} not found") diff --git a/PIconnect/_typing/PI.py b/PIconnect/_typing/PI.py index 1a5a8edf..04f58c87 100644 --- a/PIconnect/_typing/PI.py +++ b/PIconnect/_typing/PI.py @@ -53,6 +53,12 @@ def __init__(self) -> None: def __iter__(self) -> Iterator[PIServer]: return (x for x in [self.DefaultPIServer]) + def __getitem__(self, server: str) -> PIServer: + """Stub to mock getting a server by name.""" + if server == self.DefaultPIServer.Name: + return self.DefaultPIServer + raise KeyError(f"Server '{server}' not found.") + class PIPoint: """Mock class of the AF.PI.PIPoint class.""" From 448799bb3c479acb4de842137c7044f224a6f572 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Fri, 4 Apr 2025 14:11:31 +0200 Subject: [PATCH 14/28] feat: complete migration AF to _collections --- PIconnect/Asset.py | 11 ++++++--- PIconnect/Data.py | 9 ++++--- PIconnect/PI.py | 60 +++++++++++++++++++++++++++++----------------- tests/test_PI.py | 20 +++++++++------- tests/test_PIAF.py | 14 +++++------ 5 files changed, 67 insertions(+), 47 deletions(-) diff --git a/PIconnect/Asset.py b/PIconnect/Asset.py index 049f7c68..53856b98 100644 --- a/PIconnect/Asset.py +++ b/PIconnect/Asset.py @@ -6,9 +6,8 @@ import pandas as pd # type: ignore import PIconnect._typing.AF as _AFtyping -import PIconnect.AF as AF import PIconnect.AFSDK as SDK -from PIconnect import Data, PIPoint +from PIconnect import Data, PIPoint, _collections __all__ = [ "AFDataReference", @@ -329,7 +328,7 @@ def descendant(self, path: str) -> Self: return self.__class__(self.element.Elements.get_Item(path)) -class AFElementList(AF.NamedItemList[AFElement]): +class AFElementList(_collections.NamedItemList[AFElement]): """Container for a list of PIAFElement objects.""" pass @@ -358,6 +357,12 @@ def children(self) -> dict[str, Self]: return {c.Name: self.__class__(c) for c in self.element.EventFrames} +class AFEventFrameList(_collections.NamedItemList[AFEventFrame]): + """Container for a list of PIAFEventFrame objects.""" + + pass + + class AFTable: """Container for PI AF Tables in the database.""" diff --git a/PIconnect/Data.py b/PIconnect/Data.py index f50b2c54..9abf3347 100644 --- a/PIconnect/Data.py +++ b/PIconnect/Data.py @@ -9,9 +9,8 @@ import pandas as pd # type: ignore import PIconnect._typing.AF as _AFtyping -import PIconnect.AF as AF import PIconnect.AFSDK as SDK -from PIconnect import Time +from PIconnect import Time, _collections class BoundaryType(enum.IntEnum): @@ -736,7 +735,7 @@ def _update_value( Align = Literal["auto", "ffill", "bfill", "nearest", "time", False] -class DataContainerCollection(AF.NamedItemList[DataContainerType]): +class DataContainerCollection(_collections.NamedItemList[DataContainerType]): """Container for a collection of data containers.""" @property @@ -776,13 +775,13 @@ def align(df: pd.DataFrame) -> pd.DataFrame: case False: return df case "auto": - for col in df.columns.levels[0]: # type: ignore + for col in df.columns.get_level_values(0): # type: ignore if self[str(col)].stepped_data: # type: ignore df[col] = df[col].ffill(axis=0) # type: ignore else: df[col] = ( df[col] - .apply(pd.to_numeric, axis=1, errors="coerce") # type: ignore + .apply(pd.to_numeric, errors="coerce", by_row=False) # type: ignore .interpolate(method="time", axis=0) # type: ignore ) return df diff --git a/PIconnect/PI.py b/PIconnect/PI.py index a8895ec3..35c9e024 100644 --- a/PIconnect/PI.py +++ b/PIconnect/PI.py @@ -3,6 +3,7 @@ import warnings from typing import Any, cast +import PIconnect.AFSDK as SDK import PIconnect.PIPoint as PIPoint_ from PIconnect import AF, PIConsts from PIconnect._utils import InitialisationWarning @@ -14,10 +15,10 @@ _DEFAULT_AUTH_MODE = PIConsts.AuthenticationMode.PI_USER_AUTHENTICATION -def _lookup_servers() -> dict[str, AF.PI.PIServer]: - servers: dict[str, AF.PI.PIServer] = {} +def _lookup_servers() -> dict[str, SDK.AF.PI.PIServer]: + servers: dict[str, SDK.AF.PI.PIServer] = {} - for server in AF.PI.PIServers(): + for server in SDK.AF.PI.PIServers(): try: servers[server.Name] = server except (Exception, System.Exception) as e: # type: ignore @@ -30,10 +31,10 @@ def _lookup_servers() -> dict[str, AF.PI.PIServer]: return servers -def _lookup_default_server() -> AF.PI.PIServer | None: +def _lookup_default_server() -> SDK.AF.PI.PIServer | None: default_server = None try: - default_server = AF.PI.PIServers().DefaultPIServer + default_server = SDK.AF.PI.PIServers().DefaultPIServer except Exception: warnings.warn("Could not load the default PI Server", ResourceWarning, stacklevel=2) return default_server @@ -59,9 +60,22 @@ class PIServer(object): # pylint: disable=useless-object-inheritance version = "0.2.2" #: Dictionary of known servers, as reported by the SDK - servers = _lookup_servers() - #: Default server, as reported by the SDK - default_server = _lookup_default_server() + _servers: dict[str, SDK.AF.PI.PIServer] | None = None + _default_server: SDK.AF.PI.PIServer | None = None + + @classmethod + def servers(cls) -> dict[str, SDK.AF.PI.PIServer]: + """Return a dictionary of the known servers.""" + if cls._servers is None: + cls._servers = _lookup_servers() + return cls._servers + + @classmethod + def default_server(cls) -> SDK.AF.PI.PIServer | None: + """Return the default server.""" + if cls._default_server is None: + cls._default_server = _lookup_default_server() + return cls._default_server def __init__( self, @@ -72,22 +86,24 @@ def __init__( authentication_mode: PIConsts.AuthenticationMode = _DEFAULT_AUTH_MODE, timeout: int | None = None, ) -> None: + default_server = self.default_server() if server is None: - if self.default_server is None: + if default_server is None: raise ValueError("No server was specified and no default server was found.") - self.connection = self.default_server - elif server not in self.servers: - if self.default_server is None: - raise ValueError( - f"Server '{server}' not found and no default server was found." - ) - message = 'Server "{server}" not found, using the default server.' - warnings.warn( - message=message.format(server=server), category=UserWarning, stacklevel=1 - ) - self.connection = self.default_server + self.connection = default_server else: - self.connection = self.servers[server] + try: + self.connection = SDK.AF.PI.PIServers()[server] + except (Exception, System.Exception): # type: ignore + if default_server is None: + raise ValueError( + f"Server '{server}' not found and no default server was found." + ) from None + message = 'Server "{server}" not found, using the default server.' + warnings.warn( + message=message.format(server=server), category=UserWarning, stacklevel=1 + ) + self.connection = default_server if bool(username) != bool(password): raise ValueError( @@ -162,7 +178,7 @@ def search( # 'got type ' + str(type(query))) return [ PIPoint_.PIPoint(pi_point) - for pi_point in AF.PI.PIPoint.FindPIPoints( + for pi_point in SDK.AF.PI.PIPoint.FindPIPoints( self.connection, str(query), source, None ) ] diff --git a/tests/test_PI.py b/tests/test_PI.py index 4a3d22c1..edd8f0cf 100644 --- a/tests/test_PI.py +++ b/tests/test_PI.py @@ -22,28 +22,30 @@ def test_connection(self): def test_server_name(self): """Test that the server reports the same name as which was connected to.""" - default_server = PI.PIServer.default_server + default_server = PI.PIServer.default_server() if default_server is None: pytest.skip("No default server found.") - servername = default_server.Name - server = PI.PIServer(servername) - assert server.server_name == servername + else: + servername = default_server.Name + server = PI.PIServer(servername) + assert server.server_name == servername def test_warn_unkown_server(self): """Test that the server reports a warning when an unknown host is specified.""" - server_names = list(PI.PIServer.servers) + server_names = list(PI.PIServer.servers()) server_name = "__".join(server_names + ["UnknownHostName"]) with pytest.warns(UserWarning): PI.PIServer(server_name) def test_repr(self): """Test that the server representation matches the connected server.""" - default_server = PI.PIServer.default_server + default_server = PI.PIServer.default_server() if default_server is None: pytest.skip("No default server found.") - servername = default_server.Name - server = PI.PIServer(servername) - assert repr(server) == "PIServer(\\\\{})".format(servername) + else: + servername = default_server.Name + server = PI.PIServer(servername) + assert repr(server) == "PIServer(\\\\{})".format(servername) class TestSearchPIPoints: diff --git a/tests/test_PIAF.py b/tests/test_PIAF.py index ab564d3f..bb4fca0f 100644 --- a/tests/test_PIAF.py +++ b/tests/test_PIAF.py @@ -12,8 +12,6 @@ AFSDK.AF, AFSDK.System, AFSDK.AF_SDK_VERSION = AFSDK.__fallback() PI.AF = PIAF.AF = AFSDK.AF -PI.PIAFDatabase.servers = PIAF._lookup_servers() -PI.PIAFDatabase.default_server = PIAF._lookup_default_server() class TestAFDatabase: @@ -34,13 +32,13 @@ def test_server_name(self): def test_unknown_server_name(self): """Test that the server reports a warning for an unknown server.""" - AFserver_name = "__".join(list(PI.PIAFDatabase.servers) + ["UnkownServerName"]) + AFserver_name = "__".join(list(PI.PIAFDatabase.servers()) + ["UnkownServerName"]) with pytest.warns(UserWarning): PI.PIAFDatabase(server=AFserver_name) def test_unknown_database_name(self): """Test that the server reports a warning for an unknown database.""" - server = cast(AF.PISystem, PI.PIAFDatabase.default_server["server"]) # type: ignore + server = cast(AF.PISystem, PI.PIAFDatabase.default_server()) # type: ignore databases = [db.Name for db in server.Databases] AFdatabase_name = "__".join(databases + ["UnkownDatabaseName"]) with pytest.warns(UserWarning): @@ -54,7 +52,7 @@ def test_children(self): """Test that calling children on the database returns a dict of child elements.""" with PI.PIAFDatabase() as db: children = db.children - assert isinstance(children, dict) + assert isinstance(children, dict) class TestDatabaseSearch: @@ -65,18 +63,18 @@ def test_search(self): with pytest.warns(DeprecationWarning): with PI.PIAFDatabase() as db: attributes = db.search([r"", r""]) - assert isinstance(attributes, Asset.AFAttributeList) + assert isinstance(attributes, Asset.AFAttributeList) def test_split_element_attribute(self): """Test that calling attributes on the database returns a list of attributes.""" with pytest.warns(DeprecationWarning): with PI.PIAFDatabase() as db: attributes = db.search(r"BaseElement|Attribute1") - assert isinstance(attributes[0].name, str) + assert isinstance(attributes[0].name, str) def test_split_element_nested_attribute(self): """Test that calling attributes on the database returns a list of attributes.""" with pytest.warns(DeprecationWarning): with PI.PIAFDatabase() as db: attributes = db.search(r"BaseElement|Attribute1|Attribute2") - assert isinstance(attributes[0].name, str) + assert isinstance(attributes[0].name, str) From 7361129c4b07a2be05c452eef94783c9e6daf8f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Fri, 4 Apr 2025 14:51:31 +0200 Subject: [PATCH 15/28] feat: add PIPoint.Step --- PIconnect/_typing/PI.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/PIconnect/_typing/PI.py b/PIconnect/_typing/PI.py index 04f58c87..50bbeef8 100644 --- a/PIconnect/_typing/PI.py +++ b/PIconnect/_typing/PI.py @@ -66,6 +66,9 @@ class PIPoint: Name: str = "TestPIPoint" """This property identifies the name of the PIPoint""" + Step: bool = False + """This property indicates whether the PIPoint is a step PIPoint""" + @staticmethod def CurrentValue() -> _values.AFValue: return _values.AFValue(None) From c943797baf895cd831af3aa4e0f1589e6f555330 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Fri, 4 Apr 2025 14:54:11 +0200 Subject: [PATCH 16/28] chore: AF to SDK.AF --- PIconnect/PIData.py | 105 ++++++++++++++++++++++---------------------- 1 file changed, 53 insertions(+), 52 deletions(-) diff --git a/PIconnect/PIData.py b/PIconnect/PIData.py index 8d5504d5..d79c0300 100644 --- a/PIconnect/PIData.py +++ b/PIconnect/PIData.py @@ -4,10 +4,11 @@ import datetime from typing import Any -import pandas as pd +import pandas as pd # type: ignore import PIconnect._typing.AF as _AFtyping -from PIconnect import AF, PIConsts, Time +import PIconnect.AFSDK as SDK +from PIconnect import PIConsts, Time __all__ = [ "PISeries", @@ -64,9 +65,9 @@ class PISeriesContainer(abc.ABC): version = "0.1.0" __boundary_types = { - "inside": AF.Data.AFBoundaryType.Inside, - "outside": AF.Data.AFBoundaryType.Outside, - "interpolate": AF.Data.AFBoundaryType.Interpolated, + "inside": SDK.AF.Data.AFBoundaryType.Inside, + "outside": SDK.AF.Data.AFBoundaryType.Outside, + "interpolate": SDK.AF.Data.AFBoundaryType.Interpolated, } @property @@ -133,13 +134,13 @@ def filtered_summaries( and the summary name as column name. """ time_range = Time.to_af_time_range(start_time, end_time) - _interval = AF.Time.AFTimeSpan.Parse(interval) + _interval = SDK.AF.Time.AFTimeSpan.Parse(interval) _filter_expression = self._normalize_filter_expression(filter_expression) - _summary_types = AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = AF.Data.AFCalculationBasis(int(calculation_basis)) - _filter_evaluation = AF.Data.AFSampleType(int(filter_evaluation)) - _filter_interval = AF.Time.AFTimeSpan.Parse(filter_interval) - _time_type = AF.Data.AFTimestampCalculation(int(time_type)) + _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) + _filter_evaluation = SDK.AF.Data.AFSampleType(int(filter_evaluation)) + _filter_interval = SDK.AF.Time.AFTimeSpan.Parse(filter_interval) + _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) pivalues = self._filtered_summaries( time_range, _interval, @@ -168,14 +169,14 @@ def filtered_summaries( @abc.abstractmethod def _filtered_summaries( self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, filter_expression: str, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - filter_evaluation: AF.Data.AFSampleType, - filter_interval: AF.Time.AFTimeSpan, - time_type: AF.Data.AFTimestampCalculation, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + filter_evaluation: SDK.AF.Data.AFSampleType, + filter_interval: SDK.AF.Time.AFTimeSpan, + time_type: SDK.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummariesDict: pass @@ -209,7 +210,7 @@ def interpolated_value(self, time: Time.TimeLike) -> PISeries: ) @abc.abstractmethod - def _interpolated_value(self, time: AF.Time.AFTime) -> AF.Asset.AFValue: + def _interpolated_value(self, time: SDK.AF.Time.AFTime) -> SDK.AF.Asset.AFValue: pass def interpolated_values( @@ -255,7 +256,7 @@ def interpolated_values( PISeries: Timeseries of the values returned by the SDK """ time_range = Time.to_af_time_range(start_time, end_time) - _interval = AF.Time.AFTimeSpan.Parse(interval) + _interval = SDK.AF.Time.AFTimeSpan.Parse(interval) _filter_expression = self._normalize_filter_expression(filter_expression) pivalues = self._interpolated_values(time_range, _interval, _filter_expression) @@ -274,10 +275,10 @@ def interpolated_values( @abc.abstractmethod def _interpolated_values( self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, filter_expression: str, - ) -> AF.Asset.AFValues: + ) -> SDK.AF.Asset.AFValues: pass @property @@ -313,7 +314,7 @@ def recorded_value( from . import Time as time_module _time = time_module.to_af_time(time) - _retrieval_mode = AF.Data.AFRetrievalMode(int(retrieval_mode)) + _retrieval_mode = SDK.AF.Data.AFRetrievalMode(int(retrieval_mode)) pivalue = self._recorded_value(_time, _retrieval_mode) return PISeries( # type: ignore tag=self.name, @@ -324,8 +325,8 @@ def recorded_value( @abc.abstractmethod def _recorded_value( - self, time: AF.Time.AFTime, retrieval_mode: AF.Data.AFRetrievalMode - ) -> AF.Asset.AFValue: + self, time: SDK.AF.Time.AFTime, retrieval_mode: SDK.AF.Data.AFRetrievalMode + ) -> SDK.AF.Asset.AFValue: pass def recorded_values( @@ -408,10 +409,10 @@ def recorded_values( @abc.abstractmethod def _recorded_values( self, - time_range: AF.Time.AFTimeRange, - boundary_type: AF.Data.AFBoundaryType, + time_range: SDK.AF.Time.AFTimeRange, + boundary_type: SDK.AF.Data.AFBoundaryType, filter_expression: str, - ) -> AF.Asset.AFValues: + ) -> SDK.AF.Asset.AFValues: """Abstract implementation for recorded values. The internals for retrieving recorded values from PI and PI-AF are @@ -457,9 +458,9 @@ def summary( and the summary name as column name. """ time_range = Time.to_af_time_range(start_time, end_time) - _summary_types = AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = AF.Data.AFCalculationBasis(int(calculation_basis)) - _time_type = AF.Data.AFTimestampCalculation(int(time_type)) + _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) + _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) pivalues = self._summary(time_range, _summary_types, _calculation_basis, _time_type) df = pd.DataFrame() for summary in pivalues: @@ -475,10 +476,10 @@ def summary( @abc.abstractmethod def _summary( self, - time_range: AF.Time.AFTimeRange, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - time_type: AF.Data.AFTimestampCalculation, + time_range: SDK.AF.Time.AFTimeRange, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + time_type: SDK.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummaryDict: pass @@ -523,10 +524,10 @@ def summaries( and the summary name as column name. """ time_range = Time.to_af_time_range(start_time, end_time) - _interval = AF.Time.AFTimeSpan.Parse(interval) - _summary_types = AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = AF.Data.AFCalculationBasis(int(calculation_basis)) - _time_type = AF.Data.AFTimestampCalculation(int(time_type)) + _interval = SDK.AF.Time.AFTimeSpan.Parse(interval) + _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) + _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) pivalues = self._summaries( time_range, _interval, _summary_types, _calculation_basis, _time_type ) @@ -548,11 +549,11 @@ def summaries( @abc.abstractmethod def _summaries( self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - time_type: AF.Data.AFTimestampCalculation, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + time_type: SDK.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummariesDict: pass @@ -583,19 +584,19 @@ def update_value( from . import Time as time_module if time is not None: - _value = AF.Asset.AFValue(value, time_module.to_af_time(time)) + _value = SDK.AF.Asset.AFValue(value, time_module.to_af_time(time)) else: - _value = AF.Asset.AFValue(value) + _value = SDK.AF.Asset.AFValue(value) - _update_mode = AF.Data.AFUpdateOption(int(update_mode)) - _buffer_mode = AF.Data.AFBufferOption(int(buffer_mode)) + _update_mode = SDK.AF.Data.AFUpdateOption(int(update_mode)) + _buffer_mode = SDK.AF.Data.AFBufferOption(int(buffer_mode)) self._update_value(_value, _update_mode, _buffer_mode) @abc.abstractmethod def _update_value( self, - value: AF.Asset.AFValue, - update_mode: AF.Data.AFUpdateOption, - buffer_mode: AF.Data.AFBufferOption, + value: SDK.AF.Asset.AFValue, + update_mode: SDK.AF.Data.AFUpdateOption, + buffer_mode: SDK.AF.Data.AFBufferOption, ) -> None: pass From 8a323fad0b0da8f57a144e045ab8cbf76ed2127c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Fri, 4 Apr 2025 15:45:41 +0200 Subject: [PATCH 17/28] chore: merge/rename modules PIData -> Data PIPoint +>PI PIAFAttribute +> Asset --- PIconnect/AF.py | 34 ++- PIconnect/Asset.py | 6 +- PIconnect/PI.py | 184 +++++++++++- PIconnect/PIAFAttribute.py | 224 -------------- PIconnect/PIData.py | 602 ------------------------------------- PIconnect/PIPoint.py | 164 ---------- PIconnect/Time.py | 6 +- PIconnect/__init__.py | 5 +- PIconnect/_typing/AF.py | 6 +- PIconnect/_typing/PI.py | 3 +- tests/test_PIAF.py | 28 +- 11 files changed, 224 insertions(+), 1038 deletions(-) delete mode 100644 PIconnect/PIAFAttribute.py delete mode 100644 PIconnect/PIData.py delete mode 100644 PIconnect/PIPoint.py diff --git a/PIconnect/AF.py b/PIconnect/AF.py index 568163d9..cddd79d0 100644 --- a/PIconnect/AF.py +++ b/PIconnect/AF.py @@ -1,12 +1,13 @@ """AF - Core containers for connections to the PI Asset Framework.""" +import logging import warnings from typing import Any, Self import PIconnect.AFSDK as SDK from PIconnect import Asset, PIConsts, Search, Time -from PIconnect.AFSDK import System +_logger = logging.getLogger(__name__) _DEFAULT_EVENTFRAME_SEARCH_MODE = PIConsts.EventFrameSearchMode.STARTING_AFTER @@ -38,20 +39,24 @@ def __init__(self, server: str | None = None, database: str | None = None) -> No def _initialise_server(self, server: str | None) -> SDK.AF.PISystem: """Initialise the server connection.""" + _logger.debug(f"Initialising server connection from {server!r}") default_server = self.default_server() if server is None: if default_server is None: raise ValueError("No server specified and no default server found.") + _logger.debug(f"Using default server: {default_server.Name}") return default_server - try: - return SDK.AF.PISystems()[server] - except (Exception, System.Exception): # type: ignore + if (_server := SDK.AF.PISystems()[server]) is not None: + _logger.debug(_server) + return _server + else: if default_server is None: raise ValueError( f'Server "{server}" not found and no default server found.' ) from None message = f'Server "{server}" not found, using the default server.' + _logger.debug(message) warnings.warn(message=message, category=UserWarning, stacklevel=2) return default_server @@ -65,9 +70,10 @@ def default_db(): if database is None: return default_db() - try: - return self.server.Databases[database] - except (Exception, System.Exception): # type: ignore + if (_db := self.server.Databases[database]) is not None: + _logger.debug(_db) + return _db + else: message = f'Database "{database}" not found, using the default database.' warnings.warn(message=message, category=UserWarning, stacklevel=2) return default_db() @@ -142,3 +148,17 @@ def event_frames( search_full_hierarchy, ) } + + +class PIAFDatabase(AFDatabase): + """Context manager for connections to the PI Asset Framework database.""" + + version = "0.3.0" + + def __init__(self, server: str | None = None, database: str | None = None) -> None: + warnings.warn( + "PIAFDatabase is deprecated, use AFDatabase instead.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(server=server, database=database) diff --git a/PIconnect/Asset.py b/PIconnect/Asset.py index 53856b98..d8136c8f 100644 --- a/PIconnect/Asset.py +++ b/PIconnect/Asset.py @@ -7,7 +7,7 @@ import PIconnect._typing.AF as _AFtyping import PIconnect.AFSDK as SDK -from PIconnect import Data, PIPoint, _collections +from PIconnect import PI, Data, _collections __all__ = [ "AFDataReference", @@ -36,10 +36,10 @@ def name(self) -> str: return self.data_reference.Name @property - def pi_point(self) -> PIPoint.PIPoint | None: + def pi_point(self) -> PI.PIPoint | None: """Return the PI Point associated with the data reference, if any.""" if self.data_reference.PIPoint is not None: - return PIPoint.PIPoint(self.data_reference.PIPoint) + return PI.PIPoint(self.data_reference.PIPoint) class AFEnumerationValue: diff --git a/PIconnect/PI.py b/PIconnect/PI.py index 35c9e024..64e175d3 100644 --- a/PIconnect/PI.py +++ b/PIconnect/PI.py @@ -3,15 +3,14 @@ import warnings from typing import Any, cast +import PIconnect._typing.AF as _AFtyping import PIconnect.AFSDK as SDK -import PIconnect.PIPoint as PIPoint_ -from PIconnect import AF, PIConsts +from PIconnect import Data, PIConsts, Time from PIconnect._utils import InitialisationWarning from PIconnect.AFSDK import System __all__ = ["PIServer", "PIPoint"] -PIPoint = PIPoint_.PIPoint _DEFAULT_AUTH_MODE = PIConsts.AuthenticationMode.PI_USER_AUTHENTICATION @@ -40,6 +39,171 @@ def _lookup_default_server() -> SDK.AF.PI.PIServer | None: return default_server +class PIPoint(Data.DataContainer): + """Reference to a PI Point to get data and corresponding metadata from the server. + + Parameters + ---------- + pi_point (AF.PI.PIPoint): Reference to a PIPoint as returned by the SDK + """ + + version = "0.3.0" + + def __init__(self, pi_point: SDK.AF.PI.PIPoint) -> None: + super().__init__() + self.pi_point = pi_point + self.tag = pi_point.Name + self.__attributes_loaded = False + self.__raw_attributes = {} + + def __repr__(self): + """Return the string representation of the PI Point.""" + return ( + f"{self.__class__.__qualname__}({self.tag}, {self.description}; " + f"Current Value: {self.current_value} {self.units_of_measurement})" + ) + + @property + def created(self): + """Return the creation datetime of a point.""" + return Time.timestamp_to_index(self.raw_attributes["creationdate"]) + + @property + def description(self): + """Return the description of the PI Point. + + .. todo:: + + Add setter to alter displayed description + """ + return self.raw_attributes["descriptor"] + + @property + def last_update(self): + """Return the time at which the last value for this PI Point was recorded.""" + return Time.timestamp_to_index(self.pi_point.CurrentValue().Timestamp.UtcTime) + + @property + def name(self) -> str: + """Return the name of the PI Point.""" + return self.tag + + @property + def raw_attributes(self) -> dict[str, Any]: + """Return a dictionary of the raw attributes of the PI Point.""" + self.__load_attributes() + return self.__raw_attributes + + @property + def units_of_measurement(self) -> str | None: + """Return the units of measument in which values for this PI Point are reported.""" + return self.raw_attributes["engunits"] + + @property + def stepped_data(self) -> bool: + """Return False when the PIPoint contains continuous data or True when stepped data.""" + return self.pi_point.Step + + def __load_attributes(self) -> None: + """Load the raw attributes of the PI Point from the server.""" + if not self.__attributes_loaded: + self.pi_point.LoadAttributes([]) + self.__attributes_loaded = True + self.__raw_attributes = {att.Key: att.Value for att in self.pi_point.GetAttributes([])} + + def _current_value(self) -> Any: + """Return the last recorded value for this PI Point (internal use only).""" + return self.pi_point.CurrentValue().Value + + def _filtered_summaries( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + filter_expression: str, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + filter_evaluation: SDK.AF.Data.AFSampleType, + filter_interval: SDK.AF.Time.AFTimeSpan, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummariesDict: + return self.pi_point.FilteredSummaries( + time_range, + interval, + filter_expression, + summary_types, + calculation_basis, + filter_evaluation, + filter_interval, + time_type, + ) + + def _interpolated_value(self, time: SDK.AF.Time.AFTime) -> SDK.AF.Asset.AFValue: + """Return a single value for this PI Point.""" + return self.pi_point.InterpolatedValue(time) + + def _interpolated_values( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + filter_expression: str, + ) -> SDK.AF.Asset.AFValues: + include_filtered_values = False + return self.pi_point.InterpolatedValues( + time_range, interval, filter_expression, include_filtered_values + ) + + def _normalize_filter_expression(self, filter_expression: str) -> str: + return filter_expression.replace("%tag%", self.tag) + + def _recorded_value( + self, time: SDK.AF.Time.AFTime, retrieval_mode: SDK.AF.Data.AFRetrievalMode + ) -> SDK.AF.Asset.AFValue: + """Return a single recorded value for this PI Point.""" + return self.pi_point.RecordedValue( + time, SDK.AF.Data.AFRetrievalMode(int(retrieval_mode)) + ) + + def _recorded_values( + self, + time_range: SDK.AF.Time.AFTimeRange, + boundary_type: SDK.AF.Data.AFBoundaryType, + filter_expression: str, + ) -> SDK.AF.Asset.AFValues: + include_filtered_values = False + return self.pi_point.RecordedValues( + time_range, boundary_type, filter_expression, include_filtered_values + ) + + def _summary( + self, + time_range: SDK.AF.Time.AFTimeRange, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummaryDict: + return self.pi_point.Summary(time_range, summary_types, calculation_basis, time_type) + + def _summaries( + self, + time_range: SDK.AF.Time.AFTimeRange, + interval: SDK.AF.Time.AFTimeSpan, + summary_types: SDK.AF.Data.AFSummaryTypes, + calculation_basis: SDK.AF.Data.AFCalculationBasis, + time_type: SDK.AF.Data.AFTimestampCalculation, + ) -> _AFtyping.Data.SummariesDict: + return self.pi_point.Summaries( + time_range, interval, summary_types, calculation_basis, time_type + ) + + def _update_value( + self, + value: SDK.AF.Asset.AFValue, + update_mode: SDK.AF.Data.AFUpdateOption, + buffer_mode: SDK.AF.Data.AFBufferOption, + ) -> None: + return self.pi_point.UpdateValue(value, update_mode, buffer_mode) + + class PIServer(object): # pylint: disable=useless-object-inheritance """PIServer is a connection to an OSIsoft PI Server. @@ -92,9 +256,9 @@ def __init__( raise ValueError("No server was specified and no default server was found.") self.connection = default_server else: - try: - self.connection = SDK.AF.PI.PIServers()[server] - except (Exception, System.Exception): # type: ignore + if (_server := SDK.AF.PI.PIServers()[server]) is not None: + self.connection = _server + else: if default_server is None: raise ValueError( f"Server '{server}' not found and no default server was found." @@ -121,7 +285,7 @@ def __init__( cred = (username, secure_pass) + ((domain,) if domain else ()) self._credentials = ( System.Net.NetworkCredential(cred[0], cred[1], *cred[2:]), - AF.PI.PIAuthenticationMode(int(authentication_mode)), + SDK.AF.PI.PIAuthenticationMode(int(authentication_mode)), ) else: self._credentials = None @@ -153,9 +317,7 @@ def server_name(self): """Name of the connected server.""" return self.connection.Name - def search( - self, query: str | list[str], source: str | None = None - ) -> list[PIPoint_.PIPoint]: + def search(self, query: str | list[str], source: str | None = None) -> list[PIPoint]: """Search PIPoints on the PIServer. Parameters @@ -177,7 +339,7 @@ def search( # raise TypeError('Argument query must be either a string or a list of strings,' + # 'got type ' + str(type(query))) return [ - PIPoint_.PIPoint(pi_point) + PIPoint(pi_point) for pi_point in SDK.AF.PI.PIPoint.FindPIPoints( self.connection, str(query), source, None ) diff --git a/PIconnect/PIAFAttribute.py b/PIconnect/PIAFAttribute.py deleted file mode 100644 index b0d2ebcd..00000000 --- a/PIconnect/PIAFAttribute.py +++ /dev/null @@ -1,224 +0,0 @@ -"""Module for the PIAFAttribute class.""" - -import dataclasses -import datetime -from collections.abc import Iterator, Sequence -from typing import Any, overload - -from PIconnect import AF, PIData, PIPoint, Time - -from ._typing import AF as _AFtyping - -__all__ = ["PIAFAttribute"] - - -@dataclasses.dataclass -class AFDataReference: - attribute: AF.Asset.AFAttribute - data_reference: AF.Asset.AFDataReference - - @property - def name(self) -> str: - return self.data_reference.Name - - @property - def pi_point(self) -> PIPoint.PIPoint | None: - if self.data_reference.PIPoint is not None: - return PIPoint.PIPoint(self.data_reference.PIPoint) - - -class PIAFAttribute(PIData.PISeriesContainer): - """Container for attributes of PI AF elements in the database.""" - - version = "0.1.0" - - def __init__(self, attribute: AF.Asset.AFAttribute) -> None: - super().__init__() - self.element = attribute.Element - self.attribute = attribute - - def __repr__(self): - """Return the string representation of the current attribute.""" - return ( - f"{self.__class__.__qualname__}({self.name}, {self.description}; " - f"Current Value: {self.current_value} {self.units_of_measurement}" - ) - - @property - def data_reference(self) -> AFDataReference: - """Return the data reference of the current attribute.""" - return AFDataReference(self.attribute, self.attribute.DataReference) - - @property - def name(self) -> str: - """Return the name of the current attribute.""" - return self.attribute.Name - - @property - def parent(self) -> "PIAFAttribute | None": - """Return the parent attribute of the current attribute, or None if it has none.""" - if not self.attribute.Parent: - return None - return self.__class__(self.attribute.Parent) - - @property - def children(self) -> dict[str, "PIAFAttribute"]: - """Return a dictionary of the direct child attributes of the current attribute.""" - return {a.Name: self.__class__(a) for a in self.attribute.Attributes} - - @property - def description(self) -> str: - """Return the description of the PI Point.""" - return self.attribute.Description - - @property - def last_update(self) -> datetime.datetime: - """Return the time at which the current_value was last updated.""" - return Time.timestamp_to_index(self.attribute.GetValue().Timestamp.UtcTime) - - @property - def units_of_measurement(self) -> str: - """Return the units of measurement in which values for this element are reported.""" - return str(self.attribute.DefaultUOM) - - def _current_value(self) -> Any: - return self.attribute.GetValue().Value - - def _filtered_summaries( - self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, - filter_expression: str, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - filter_evaluation: AF.Data.AFSampleType, - filter_interval: AF.Time.AFTimeSpan, - time_type: AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummariesDict: - return self.attribute.Data.FilteredSummaries( - time_range, - interval, - filter_expression, - summary_types, - calculation_basis, - filter_evaluation, - filter_interval, - time_type, - ) - - def _interpolated_value(self, time: AF.Time.AFTime): - """Return a single value for this PI Point.""" - return self.attribute.Data.InterpolatedValue(time, self.attribute.DefaultUOM) - - def _recorded_value( - self, time: AF.Time.AFTime, retrieval_mode: AF.Data.AFRetrievalMode - ) -> AF.Asset.AFValue: - """Return a single value for this PI Point.""" - return self.attribute.Data.RecordedValue( - time, retrieval_mode, self.attribute.DefaultUOM - ) - - def _recorded_values( - self, - time_range: AF.Time.AFTimeRange, - boundary_type: AF.Data.AFBoundaryType, - filter_expression: str, - ) -> AF.Asset.AFValues: - include_filtered_values = False - return self.attribute.Data.RecordedValues( - time_range, - boundary_type, - self.attribute.DefaultUOM, - filter_expression, - include_filtered_values, - ) - - def _interpolated_values( - self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, - filter_expression: str, - ) -> AF.Asset.AFValues: - """Query the pi af attribute, internal implementation.""" - include_filtered_values = False - return self.attribute.Data.InterpolatedValues( - time_range, - interval, - self.attribute.DefaultUOM, - filter_expression, - include_filtered_values, - ) - - def _summaries( - self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - time_type: AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummariesDict: - return self.attribute.Data.Summaries( - time_range, interval, summary_types, calculation_basis, time_type - ) - - def _summary( - self, - time_range: AF.Time.AFTimeRange, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - time_type: AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummaryDict: - return self.attribute.Data.Summary( - time_range, summary_types, calculation_basis, time_type - ) - - def _update_value( - self, - value: AF.Asset.AFValue, - update_mode: AF.Data.AFUpdateOption, - buffer_mode: AF.Data.AFBufferOption, - ) -> None: - return self.attribute.Data.UpdateValue( - value, - update_mode, - buffer_mode, - ) - - -class PIAFAttributeList(Sequence[PIAFAttribute]): - def __init__(self, attributes: Sequence[PIAFAttribute]) -> None: - self._attributes = attributes - - @overload - def __getitem__(self, index: int | str) -> PIAFAttribute: ... - @overload - def __getitem__(self, index: slice) -> "PIAFAttributeList": ... - def __getitem__(self, index: int | str | slice) -> "PIAFAttribute | PIAFAttributeList": - """Return the attribute at the given index or the attribute with the given name.""" - match index: - case int(): - return self._attributes[index] - case str(): - for attr in self._attributes: - if attr.name == index: - return attr - raise KeyError(f"Attribute {index} not found.") - case slice(): - return PIAFAttributeList(self._attributes[index]) - case _: - raise TypeError("Index must be an int or a string.") - - def __len__(self) -> int: - """Return the number of attributes in the list.""" - return len(self._attributes) - - def __iter__(self) -> Iterator[PIAFAttribute]: - """Return an iterator over the attributes in the list.""" - return iter(self._attributes) - - def __reversed__(self) -> Iterator[PIAFAttribute]: - return reversed(self._attributes) - - def __repr__(self) -> str: - """Return the string representation of the attribute list.""" - return f"{self.__class__.__qualname__}({len(self._attributes)} attributes)" diff --git a/PIconnect/PIData.py b/PIconnect/PIData.py deleted file mode 100644 index d79c0300..00000000 --- a/PIconnect/PIData.py +++ /dev/null @@ -1,602 +0,0 @@ -"""Auxipublish-to-pypiliary classes for PI Point and PIAFAttribute objects.""" - -import abc -import datetime -from typing import Any - -import pandas as pd # type: ignore - -import PIconnect._typing.AF as _AFtyping -import PIconnect.AFSDK as SDK -from PIconnect import PIConsts, Time - -__all__ = [ - "PISeries", - "PISeriesContainer", -] - -_DEFAULT_CALCULATION_BASIS = PIConsts.CalculationBasis.TIME_WEIGHTED -_DEFAULT_FILTER_EVALUATION = PIConsts.ExpressionSampleType.EXPRESSION_RECORDED_VALUES - - -class PISeries(pd.Series): # type: ignore - """Create a timeseries, derived from :class:`pandas.Series`. - - Parameters - ---------- - tag (str): Name of the new series - timestamp (list[datetime]): List of datetime objects to - create the new index - value (list): List of values for the timeseries, should be equally long - as the `timestamp` argument - uom (str, optional): Defaults to None. Unit of measurement for the - series - - .. todo:: - - Remove class, return to either plain :class:`pandas.Series` or a - composition where the Series is just an attribute - """ - - version = "0.1.0" - - def __init__( - self, - tag: str, - timestamp: list[datetime.datetime], - value: list[Any], - uom: str | None = None, - *args: Any, - **kwargs: Any, - ) -> None: - pd.Series.__init__(self, *args, data=value, index=timestamp, name=tag, **kwargs) # type: ignore - self.tag = tag - self.uom = uom - - -class PISeriesContainer(abc.ABC): - """Generic behaviour for PI Series returning objects. - - .. todo:: - - Move `__boundary_types` to PIConsts as a new enumeration - """ - - version = "0.1.0" - - __boundary_types = { - "inside": SDK.AF.Data.AFBoundaryType.Inside, - "outside": SDK.AF.Data.AFBoundaryType.Outside, - "interpolate": SDK.AF.Data.AFBoundaryType.Interpolated, - } - - @property - def current_value(self) -> Any: - """Return the current value of the attribute.""" - return self._current_value() - - @abc.abstractmethod - def _current_value(self) -> Any: - pass - - def filtered_summaries( - self, - start_time: Time.TimeLike, - end_time: Time.TimeLike, - interval: str, - filter_expression: str, - summary_types: PIConsts.SummaryType, - calculation_basis: PIConsts.CalculationBasis = _DEFAULT_CALCULATION_BASIS, - filter_evaluation: PIConsts.ExpressionSampleType = _DEFAULT_FILTER_EVALUATION, - filter_interval: str | None = None, - time_type: PIConsts.TimestampCalculation = PIConsts.TimestampCalculation.AUTO, - ) -> pd.DataFrame: - """Return one or more summary values for each interval within a time range. - - Parameters - ---------- - start_time (str or datetime): String containing the date, and possibly time, - from which to retrieve the values. This is parsed, together - with `end_time`, using - :afsdk:`AF.Time.AFTimeRange `. - end_time (str or datetime): String containing the date, and possibly time, - until which to retrieve values. This is parsed, together - with `start_time`, using - :afsdk:`AF.Time.AFTimeRange `. - interval (str): String containing the interval at which to extract - data. This is parsed using - :afsdk:`AF.Time.AFTimeSpan.Parse `. - filter_expression (str, optional): Defaults to ''. Query on which - data to include in the results. See :ref:`filtering_values` - for more information on filter queries. - summary_types (int or PIConsts.SummaryType): Type(s) of summaries - of the data within the requested time range. - calculation_basis (int or PIConsts.CalculationBasis, optional): - Event weighting within an interval. See :ref:`event_weighting` - and :any:`CalculationBasis` for more information. Defaults to - CalculationBasis.TIME_WEIGHTED. - filter_evaluation (int or PIConsts.ExpressionSampleType, optional): - Determines whether the filter is applied to the raw events in - the database, of if it is applied to an interpolated series - with a regular interval. Defaults to - ExpressionSampleType.EXPRESSION_RECORDED_VALUES. - filter_interval (str, optional): String containing the interval at - which to extract apply the filter. This is parsed using - :afsdk:`AF.Time.AFTimeSpan.Parse `. - time_type (int or PIConsts.TimestampCalculation, optional): - Timestamp to return for each of the requested summaries. See - :ref:`summary_timestamps` and :any:`TimestampCalculation` for - more information. Defaults to TimestampCalculation.AUTO. - - Returns - ------- - pandas.DataFrame: Dataframe with the unique timestamps as row index - and the summary name as column name. - """ - time_range = Time.to_af_time_range(start_time, end_time) - _interval = SDK.AF.Time.AFTimeSpan.Parse(interval) - _filter_expression = self._normalize_filter_expression(filter_expression) - _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) - _filter_evaluation = SDK.AF.Data.AFSampleType(int(filter_evaluation)) - _filter_interval = SDK.AF.Time.AFTimeSpan.Parse(filter_interval) - _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) - pivalues = self._filtered_summaries( - time_range, - _interval, - _filter_expression, - _summary_types, - _calculation_basis, - _filter_evaluation, - _filter_interval, - _time_type, - ) - df = pd.DataFrame() - for summary in pivalues: - key = PIConsts.SummaryType(int(summary.Key)).name - timestamps, values = zip( - *[ - (Time.timestamp_to_index(value.Timestamp.UtcTime), value.Value) - for value in summary.Value - ], - strict=True, - ) - df = df.join( # type: ignore - pd.DataFrame(data={key: values}, index=timestamps), how="outer" - ) - return df - - @abc.abstractmethod - def _filtered_summaries( - self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, - filter_expression: str, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - filter_evaluation: SDK.AF.Data.AFSampleType, - filter_interval: SDK.AF.Time.AFTimeSpan, - time_type: SDK.AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummariesDict: - pass - - def interpolated_value(self, time: Time.TimeLike) -> PISeries: - """Return a PISeries with an interpolated value at the given time. - - Parameters - ---------- - ----------eters - ---------- - ----------eters - ---------- - time (str, datetime): String containing the date, and possibly time, - for which to retrieve the value. This is parsed, using - :afsdk:`AF.Time.AFTime `. - - Returns - ------- - PISeries: A PISeries with a single row, with the corresponding time as - the index - """ - from . import Time as time_module - - _time = time_module.to_af_time(time) - pivalue = self._interpolated_value(_time) - return PISeries( # type: ignore - tag=self.name, - value=pivalue.Value, - timestamp=[time_module.timestamp_to_index(pivalue.Timestamp.UtcTime)], - uom=self.units_of_measurement, - ) - - @abc.abstractmethod - def _interpolated_value(self, time: SDK.AF.Time.AFTime) -> SDK.AF.Asset.AFValue: - pass - - def interpolated_values( - self, - start_time: Time.TimeLike, - end_time: Time.TimeLike, - interval: str, - filter_expression: str = "", - ) -> PISeries: - """Return a PISeries of interpolated data. - - Data is returned between *start_time* and *end_time* at a fixed - *interval*. All three values are parsed by AF.Time and the first two - allow for time specification relative to "now" by use of the - asterisk. - - *filter_expression* is an optional string to filter the returned - values, see OSIsoft PI documentation for more information. - - The AF SDK allows for inclusion of filtered data, with filtered - values marked as such. At this point PIconnect does not support this - and filtered values are always left out entirely. - - Parameters - ---------- - start_time (str or datetime): Containing the date, and possibly time, - from which to retrieve the values. This is parsed, together - with `end_time`, using - :afsdk:`AF.Time.AFTimeRange `. - end_time (str or datetime): Containing the date, and possibly time, - until which to retrieve values. This is parsed, together - with `start_time`, using - :afsdk:`AF.Time.AFTimeRange `. - interval (str): String containing the interval at which to extract - data. This is parsed using - :afsdk:`AF.Time.AFTimeSpan.Parse `. - filter_expression (str, optional): Defaults to ''. Query on which - data to include in the results. See :ref:`filtering_values` - for more information on filter queries. - - Returns - ------- - PISeries: Timeseries of the values returned by the SDK - """ - time_range = Time.to_af_time_range(start_time, end_time) - _interval = SDK.AF.Time.AFTimeSpan.Parse(interval) - _filter_expression = self._normalize_filter_expression(filter_expression) - pivalues = self._interpolated_values(time_range, _interval, _filter_expression) - - timestamps: list[datetime.datetime] = [] - values: list[Any] = [] - for value in pivalues: - timestamps.append(Time.timestamp_to_index(value.Timestamp.UtcTime)) - values.append(value.Value) - return PISeries( # type: ignore - tag=self.name, - timestamp=timestamps, - value=values, - uom=self.units_of_measurement, - ) - - @abc.abstractmethod - def _interpolated_values( - self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, - filter_expression: str, - ) -> SDK.AF.Asset.AFValues: - pass - - @property - @abc.abstractmethod - def name(self) -> str: - """Return the name of the current object.""" - pass - - def _normalize_filter_expression(self, filter_expression: str) -> str: - return filter_expression - - def recorded_value( - self, - time: Time.TimeLike, - retrieval_mode: PIConsts.RetrievalMode = PIConsts.RetrievalMode.AUTO, - ) -> PISeries: - """Return a PISeries with the recorded value at or close to the given time. - - Parameters - ---------- - time (str): String containing the date, and possibly time, - for which to retrieve the value. This is parsed, using - :afsdk:`AF.Time.AFTime `. - retrieval_mode (int or :any:`PIConsts.RetrievalMode`): Flag determining - which value to return if no value available at the exact requested - time. - - Returns - ------- - PISeries: A PISeries with a single row, with the corresponding time as - the index - """ - from . import Time as time_module - - _time = time_module.to_af_time(time) - _retrieval_mode = SDK.AF.Data.AFRetrievalMode(int(retrieval_mode)) - pivalue = self._recorded_value(_time, _retrieval_mode) - return PISeries( # type: ignore - tag=self.name, - value=pivalue.Value, - timestamp=[time_module.timestamp_to_index(pivalue.Timestamp.UtcTime)], - uom=self.units_of_measurement, - ) - - @abc.abstractmethod - def _recorded_value( - self, time: SDK.AF.Time.AFTime, retrieval_mode: SDK.AF.Data.AFRetrievalMode - ) -> SDK.AF.Asset.AFValue: - pass - - def recorded_values( - self, - start_time: Time.TimeLike, - end_time: Time.TimeLike, - boundary_type: str = "inside", - filter_expression: str = "", - ): - """Return a PISeries of recorded data. - - Data is returned between the given *start_time* and *end_time*, - inclusion of the boundaries is determined by the *boundary_type* - attribute. Both *start_time* and *end_time* are parsed by AF.Time and - allow for time specification relative to "now" by use of the asterisk. - - By default the *boundary_type* is set to 'inside', which returns from - the first value after *start_time* to the last value before *end_time*. - The other options are 'outside', which returns from the last value - before *start_time* to the first value before *end_time*, and - 'interpolate', which interpolates the first value to the given - *start_time* and the last value to the given *end_time*. - - *filter_expression* is an optional string to filter the returned - values, see OSIsoft PI documentation for more information. - - The AF SDK allows for inclusion of filtered data, with filtered values - marked as such. At this point PIconnect does not support this and - filtered values are always left out entirely. - - Parameters - ---------- - start_time (str or datetime): Containing the date, and possibly time, - from which to retrieve the values. This is parsed, together - with `end_time`, using - :afsdk:`AF.Time.AFTimeRange `. - end_time (str or datetime): Containing the date, and possibly time, - until which to retrieve values. This is parsed, together - with `start_time`, using - :afsdk:`AF.Time.AFTimeRange `. - boundary_type (str, optional): Defaults to 'inside'. Key from the - `__boundary_types` dictionary to describe how to handle the - boundaries of the time range. - filter_expression (str, optional): Defaults to ''. Query on which - data to include in the results. See :ref:`filtering_values` - for more information on filter queries. - - Returns - ------- - PISeries: Timeseries of the values returned by the SDK - - Raises - ------ - ValueError: If the provided `boundary_type` is not a valid key a - `ValueError` is raised. - """ - time_range = Time.to_af_time_range(start_time, end_time) - _boundary_type = self.__boundary_types.get(boundary_type.lower()) - if _boundary_type is None: - raise ValueError( - "Argument boundary_type must be one of " - + ", ".join('"%s"' % x for x in sorted(self.__boundary_types.keys())) - ) - _filter_expression = self._normalize_filter_expression(filter_expression) - - pivalues = self._recorded_values(time_range, _boundary_type, _filter_expression) - - timestamps: list[datetime.datetime] = [] - values: list[Any] = [] - for value in pivalues: - timestamps.append(Time.timestamp_to_index(value.Timestamp.UtcTime)) - values.append(value.Value) - return PISeries( # type: ignore - tag=self.name, - timestamp=timestamps, - value=values, - uom=self.units_of_measurement, - ) - - @abc.abstractmethod - def _recorded_values( - self, - time_range: SDK.AF.Time.AFTimeRange, - boundary_type: SDK.AF.Data.AFBoundaryType, - filter_expression: str, - ) -> SDK.AF.Asset.AFValues: - """Abstract implementation for recorded values. - - The internals for retrieving recorded values from PI and PI-AF are - different and should therefore be implemented by the respective data - containers. - """ - pass - - def summary( - self, - start_time: Time.TimeLike, - end_time: Time.TimeLike, - summary_types: PIConsts.SummaryType, - calculation_basis: PIConsts.CalculationBasis = PIConsts.CalculationBasis.TIME_WEIGHTED, - time_type: PIConsts.TimestampCalculation = PIConsts.TimestampCalculation.AUTO, - ) -> pd.DataFrame: - """Return one or more summary values over a single time range. - - Parameters - ---------- - start_time (str or datetime): Containing the date, and possibly time, - from which to retrieve the values. This is parsed, together - with `end_time`, using - :afsdk:`AF.Time.AFTimeRange `. - end_time (str or datetime): Containing the date, and possibly time, - until which to retrieve values. This is parsed, together - with `start_time`, using - :afsdk:`AF.Time.AFTimeRange `. - summary_types (int or PIConsts.SummaryType): Type(s) of summaries - of the data within the requested time range. - calculation_basis (int or PIConsts.CalculationBasis, optional): - Event weighting within an interval. See :ref:`event_weighting` - and :any:`CalculationBasis` for more information. Defaults to - CalculationBasis.TIME_WEIGHTED. - time_type (int or PIConsts.TimestampCalculation, optional): - Timestamp to return for each of the requested summaries. See - :ref:`summary_timestamps` and :any:`TimestampCalculation` for - more information. Defaults to TimestampCalculation.AUTO. - - Returns - ------- - pandas.DataFrame: Dataframe with the unique timestamps as row index - and the summary name as column name. - """ - time_range = Time.to_af_time_range(start_time, end_time) - _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) - _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) - pivalues = self._summary(time_range, _summary_types, _calculation_basis, _time_type) - df = pd.DataFrame() - for summary in pivalues: - key = PIConsts.SummaryType(int(summary.Key)).name - value = summary.Value - timestamp = Time.timestamp_to_index(value.Timestamp.UtcTime) - value = value.Value - df = df.join( # type: ignore - pd.DataFrame(data={key: value}, index=[timestamp]), how="outer" - ) - return df - - @abc.abstractmethod - def _summary( - self, - time_range: SDK.AF.Time.AFTimeRange, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - time_type: SDK.AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummaryDict: - pass - - def summaries( - self, - start_time: Time.TimeLike, - end_time: Time.TimeLike, - interval: str, - summary_types: PIConsts.SummaryType, - calculation_basis: PIConsts.CalculationBasis = PIConsts.CalculationBasis.TIME_WEIGHTED, - time_type: PIConsts.TimestampCalculation = PIConsts.TimestampCalculation.AUTO, - ) -> pd.DataFrame: - """Return one or more summary values for each interval within a time range. - - Parameters - ---------- - start_time (str or datetime): Containing the date, and possibly time, - from which to retrieve the values. This is parsed, together - with `end_time`, using - :afsdk:`AF.Time.AFTimeRange `. - end_time (str or datetime): Containing the date, and possibly time, - until which to retrieve values. This is parsed, together - with `start_time`, using - :afsdk:`AF.Time.AFTimeRange `. - interval (str): String containing the interval at which to extract - data. This is parsed using - :afsdk:`AF.Time.AFTimeSpan.Parse `. - summary_types (int or PIConsts.SummaryType): Type(s) of summaries - of the data within the requested time range. - calculation_basis (int or PIConsts.CalculationBasis, optional): - Event weighting within an interval. See :ref:`event_weighting` - and :any:`CalculationBasis` for more information. Defaults to - CalculationBasis.TIME_WEIGHTED. - time_type (int or PIConsts.TimestampCalculation, optional): - Timestamp to return for each of the requested summaries. See - :ref:`summary_timestamps` and :any:`TimestampCalculation` for - more information. Defaults to TimestampCalculation.AUTO. - - Returns - ------- - pandas.DataFrame: Dataframe with the unique timestamps as row index - and the summary name as column name. - """ - time_range = Time.to_af_time_range(start_time, end_time) - _interval = SDK.AF.Time.AFTimeSpan.Parse(interval) - _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) - _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) - pivalues = self._summaries( - time_range, _interval, _summary_types, _calculation_basis, _time_type - ) - df = pd.DataFrame() - for summary in pivalues: - key = PIConsts.SummaryType(int(summary.Key)).name - timestamps, values = zip( - *[ - (Time.timestamp_to_index(value.Timestamp.UtcTime), value.Value) - for value in summary.Value - ], - strict=True, - ) - df = df.join( # type: ignore - pd.DataFrame(data={key: values}, index=timestamps), how="outer" - ) - return df - - @abc.abstractmethod - def _summaries( - self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - time_type: SDK.AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummariesDict: - pass - - @property - @abc.abstractmethod - def units_of_measurement(self) -> str | None: - """Return the units of measurment of the values in the current object.""" - pass - - def update_value( - self, - value: Any, - time: Time.TimeLike | None = None, - update_mode: PIConsts.UpdateMode = PIConsts.UpdateMode.NO_REPLACE, - buffer_mode: PIConsts.BufferMode = PIConsts.BufferMode.BUFFER_IF_POSSIBLE, - ) -> None: - """Update value for existing PI object. - - Parameters - ---------- - value: value type should be in cohesion with PI object or - it will raise PIException: [-10702] STATE Not Found - time (datetime, optional): it is not possible to set future value, - it raises PIException: [-11046] Target Date in Future. - - You can combine update_mode and time to change already stored value. - """ - from . import Time as time_module - - if time is not None: - _value = SDK.AF.Asset.AFValue(value, time_module.to_af_time(time)) - else: - _value = SDK.AF.Asset.AFValue(value) - - _update_mode = SDK.AF.Data.AFUpdateOption(int(update_mode)) - _buffer_mode = SDK.AF.Data.AFBufferOption(int(buffer_mode)) - self._update_value(_value, _update_mode, _buffer_mode) - - @abc.abstractmethod - def _update_value( - self, - value: SDK.AF.Asset.AFValue, - update_mode: SDK.AF.Data.AFUpdateOption, - buffer_mode: SDK.AF.Data.AFBufferOption, - ) -> None: - pass diff --git a/PIconnect/PIPoint.py b/PIconnect/PIPoint.py deleted file mode 100644 index f9cacf6f..00000000 --- a/PIconnect/PIPoint.py +++ /dev/null @@ -1,164 +0,0 @@ -"""PIPoint.""" - -from typing import Any - -import PIconnect._typing.AF as _AFtyping -from PIconnect import AF, PIData, Time - - -class PIPoint(PIData.PISeriesContainer): - """Reference to a PI Point to get data and corresponding metadata from the server. - - Parameters - ---------- - pi_point (AF.PI.PIPoint): Reference to a PIPoint as returned by the SDK - """ - - version = "0.3.0" - - def __init__(self, pi_point: AF.PI.PIPoint) -> None: - super().__init__() - self.pi_point = pi_point - self.tag = pi_point.Name - self.__attributes_loaded = False - self.__raw_attributes = {} - - def __repr__(self): - """Return the string representation of the PI Point.""" - return ( - f"{self.__class__.__qualname__}({self.tag}, {self.description}; " - f"Current Value: {self.current_value} {self.units_of_measurement})" - ) - - @property - def created(self): - """Return the creation datetime of a point.""" - return Time.timestamp_to_index(self.raw_attributes["creationdate"]) - - @property - def description(self): - """Return the description of the PI Point. - - .. todo:: - - Add setter to alter displayed description - """ - return self.raw_attributes["descriptor"] - - @property - def last_update(self): - """Return the time at which the last value for this PI Point was recorded.""" - return Time.timestamp_to_index(self.pi_point.CurrentValue().Timestamp.UtcTime) - - @property - def name(self) -> str: - """Return the name of the PI Point.""" - return self.tag - - @property - def raw_attributes(self) -> dict[str, Any]: - """Return a dictionary of the raw attributes of the PI Point.""" - self.__load_attributes() - return self.__raw_attributes - - @property - def units_of_measurement(self) -> str | None: - """Return the units of measument in which values for this PI Point are reported.""" - return self.raw_attributes["engunits"] - - def __load_attributes(self) -> None: - """Load the raw attributes of the PI Point from the server.""" - if not self.__attributes_loaded: - self.pi_point.LoadAttributes([]) - self.__attributes_loaded = True - self.__raw_attributes = {att.Key: att.Value for att in self.pi_point.GetAttributes([])} - - def _current_value(self) -> Any: - """Return the last recorded value for this PI Point (internal use only).""" - return self.pi_point.CurrentValue().Value - - def _filtered_summaries( - self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, - filter_expression: str, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - filter_evaluation: AF.Data.AFSampleType, - filter_interval: AF.Time.AFTimeSpan, - time_type: AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummariesDict: - return self.pi_point.FilteredSummaries( - time_range, - interval, - filter_expression, - summary_types, - calculation_basis, - filter_evaluation, - filter_interval, - time_type, - ) - - def _interpolated_value(self, time: AF.Time.AFTime) -> AF.Asset.AFValue: - """Return a single value for this PI Point.""" - return self.pi_point.InterpolatedValue(time) - - def _interpolated_values( - self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, - filter_expression: str, - ) -> AF.Asset.AFValues: - include_filtered_values = False - return self.pi_point.InterpolatedValues( - time_range, interval, filter_expression, include_filtered_values - ) - - def _normalize_filter_expression(self, filter_expression: str) -> str: - return filter_expression.replace("%tag%", self.tag) - - def _recorded_value( - self, time: AF.Time.AFTime, retrieval_mode: AF.Data.AFRetrievalMode - ) -> AF.Asset.AFValue: - """Return a single recorded value for this PI Point.""" - return self.pi_point.RecordedValue(time, AF.Data.AFRetrievalMode(int(retrieval_mode))) - - def _recorded_values( - self, - time_range: AF.Time.AFTimeRange, - boundary_type: AF.Data.AFBoundaryType, - filter_expression: str, - ) -> AF.Asset.AFValues: - include_filtered_values = False - return self.pi_point.RecordedValues( - time_range, boundary_type, filter_expression, include_filtered_values - ) - - def _summary( - self, - time_range: AF.Time.AFTimeRange, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - time_type: AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummaryDict: - return self.pi_point.Summary(time_range, summary_types, calculation_basis, time_type) - - def _summaries( - self, - time_range: AF.Time.AFTimeRange, - interval: AF.Time.AFTimeSpan, - summary_types: AF.Data.AFSummaryTypes, - calculation_basis: AF.Data.AFCalculationBasis, - time_type: AF.Data.AFTimestampCalculation, - ) -> _AFtyping.Data.SummariesDict: - return self.pi_point.Summaries( - time_range, interval, summary_types, calculation_basis, time_type - ) - - def _update_value( - self, - value: AF.Asset.AFValue, - update_mode: AF.Data.AFUpdateOption, - buffer_mode: AF.Data.AFBufferOption, - ) -> None: - return self.pi_point.UpdateValue(value, update_mode, buffer_mode) diff --git a/PIconnect/Time.py b/PIconnect/Time.py index 7159c506..15e53c73 100644 --- a/PIconnect/Time.py +++ b/PIconnect/Time.py @@ -5,8 +5,8 @@ import pandas as pd # type: ignore -from PIconnect import AF, PIConfig -from PIconnect.AFSDK import System +from PIconnect import PIConfig +from PIconnect.AFSDK import AF, System TimeLike = str | datetime.datetime IntervalLike = str | datetime.timedelta | pd.Timedelta @@ -58,7 +58,7 @@ def to_af_time(time: TimeLike) -> AF.Time.AFTime: return AF.Time.AFTime(time) -def to_af_time_span(interval: IntervalLike) -> AF.Time.AFTimeSpan: +def to_af_time_span(interval: IntervalLike | None) -> AF.Time.AFTimeSpan: """Convert a time interval to a AFTimeSpan value. Parameters diff --git a/PIconnect/__init__.py b/PIconnect/__init__.py index 77db809b..9dc5f443 100644 --- a/PIconnect/__init__.py +++ b/PIconnect/__init__.py @@ -1,9 +1,9 @@ """PIconnect - Connector to the OSISoft PI and PI-AF databases.""" +from PIconnect.config import PIConfig # noqa: I001 +from PIconnect.AF import AFDatabase, PIAFDatabase from PIconnect.AFSDK import AF, AF_SDK_VERSION -from PIconnect.config import PIConfig from PIconnect.PI import PIServer -from PIconnect.PIAF import PIAFDatabase from . import _version @@ -13,6 +13,7 @@ __all__ = [ "AF", "AF_SDK_VERSION", + "AFDatabase", "PIAFDatabase", "PIConfig", "PIServer", diff --git a/PIconnect/_typing/AF.py b/PIconnect/_typing/AF.py index 986ddabf..bd36f78c 100644 --- a/PIconnect/_typing/AF.py +++ b/PIconnect/_typing/AF.py @@ -43,11 +43,10 @@ def __iter__(self) -> Iterator[AFDatabase]: if self.DefaultDatabase is not None: yield from [self.DefaultDatabase] - def __getitem__(self, name: str) -> AFDatabase: + def __getitem__(self, name: str) -> AFDatabase | None: """Return the AFDatabase with the given name.""" if name == self.DefaultDatabase.Name: return self.DefaultDatabase - raise KeyError(f"AFDatabase {name} not found") def __init__(self, name: str) -> None: self.Name = name @@ -75,8 +74,7 @@ def __init__(self) -> None: def __iter__(self) -> Iterator[PISystem]: return (x for x in [self.DefaultPISystem]) - def __getitem__(self, name: str) -> PISystem: + def __getitem__(self, name: str) -> PISystem | None: """Return the PISystem with the given name.""" if name == self.DefaultPISystem.Name: return self.DefaultPISystem - raise KeyError(f"PISystem {name} not found") diff --git a/PIconnect/_typing/PI.py b/PIconnect/_typing/PI.py index 50bbeef8..42d4a6b6 100644 --- a/PIconnect/_typing/PI.py +++ b/PIconnect/_typing/PI.py @@ -53,11 +53,10 @@ def __init__(self) -> None: def __iter__(self) -> Iterator[PIServer]: return (x for x in [self.DefaultPIServer]) - def __getitem__(self, server: str) -> PIServer: + def __getitem__(self, server: str) -> PIServer | None: """Stub to mock getting a server by name.""" if server == self.DefaultPIServer.Name: return self.DefaultPIServer - raise KeyError(f"Server '{server}' not found.") class PIPoint: diff --git a/tests/test_PIAF.py b/tests/test_PIAF.py index bb4fca0f..2a8b3a3d 100644 --- a/tests/test_PIAF.py +++ b/tests/test_PIAF.py @@ -5,44 +5,39 @@ import pytest import PIconnect as PI -import PIconnect.AFSDK as AFSDK -import PIconnect.PIAF as PIAF from PIconnect import Asset from PIconnect._typing import AF -AFSDK.AF, AFSDK.System, AFSDK.AF_SDK_VERSION = AFSDK.__fallback() -PI.AF = PIAF.AF = AFSDK.AF - class TestAFDatabase: """Test connecting to the AF database.""" def test_connection(self): - """Test creating a PI.PIAFDatabase object without arguments raises no exception.""" - PI.PIAFDatabase() + """Test creating a PI.AFDatabase object without arguments raises no exception.""" + PI.AFDatabase() def test_server_name(self): """Test that the server reports the same name as which was connected to.""" AFserver = PI.AF.PISystems().DefaultPISystem.Name database = PI.AF.PISystems().DefaultPISystem.Databases.DefaultDatabase.Name - server = PI.PIAFDatabase(AFserver, database) + server = PI.AFDatabase(AFserver, database) assert server.server_name == AFserver assert server.database_name == database assert repr(server) == "PIAFDatabase(\\\\{s}\\{d})".format(s=AFserver, d=database) def test_unknown_server_name(self): """Test that the server reports a warning for an unknown server.""" - AFserver_name = "__".join(list(PI.PIAFDatabase.servers()) + ["UnkownServerName"]) + AFserver_name = "__".join(list(PI.AFDatabase.servers()) + ["UnkownServerName"]) with pytest.warns(UserWarning): - PI.PIAFDatabase(server=AFserver_name) + PI.AFDatabase(server=AFserver_name) def test_unknown_database_name(self): """Test that the server reports a warning for an unknown database.""" - server = cast(AF.PISystem, PI.PIAFDatabase.default_server()) # type: ignore + server = cast(AF.PISystem, PI.AFDatabase.default_server()) # type: ignore databases = [db.Name for db in server.Databases] AFdatabase_name = "__".join(databases + ["UnkownDatabaseName"]) with pytest.warns(UserWarning): - PI.PIAFDatabase(database=AFdatabase_name) + PI.AFDatabase(database=AFdatabase_name) class TestDatabaseDescendants: @@ -50,7 +45,7 @@ class TestDatabaseDescendants: def test_children(self): """Test that calling children on the database returns a dict of child elements.""" - with PI.PIAFDatabase() as db: + with PI.AFDatabase() as db: children = db.children assert isinstance(children, dict) @@ -61,20 +56,21 @@ class TestDatabaseSearch: def test_search(self): """Test that calling attributes on the database returns a list of attributes.""" with pytest.warns(DeprecationWarning): - with PI.PIAFDatabase() as db: + with PI.AFDatabase() as db: attributes = db.search([r"", r""]) assert isinstance(attributes, Asset.AFAttributeList) def test_split_element_attribute(self): """Test that calling attributes on the database returns a list of attributes.""" with pytest.warns(DeprecationWarning): - with PI.PIAFDatabase() as db: + with PI.AFDatabase() as db: + print(db.children) attributes = db.search(r"BaseElement|Attribute1") assert isinstance(attributes[0].name, str) def test_split_element_nested_attribute(self): """Test that calling attributes on the database returns a list of attributes.""" with pytest.warns(DeprecationWarning): - with PI.PIAFDatabase() as db: + with PI.AFDatabase() as db: attributes = db.search(r"BaseElement|Attribute1|Attribute2") assert isinstance(attributes[0].name, str) From 31f8d9df35416d812f6c198081386aa0bbe198f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Tue, 25 Jun 2024 13:42:46 +0200 Subject: [PATCH 18/28] feat: implement explicit loader for SDK --- PIconnect/AFSDK.py | 86 ++++++++++++++++++++++++++++++++---------- PIconnect/__init__.py | 2 +- tests/test_load_SDK.py | 62 ++++++++++++++++++++++++++++++ 3 files changed, 130 insertions(+), 20 deletions(-) create mode 100644 tests/test_load_SDK.py diff --git a/PIconnect/AFSDK.py b/PIconnect/AFSDK.py index b279c252..d7dfef13 100644 --- a/PIconnect/AFSDK.py +++ b/PIconnect/AFSDK.py @@ -1,14 +1,55 @@ """AFSDK - Loads the .NET libraries from the OSIsoft AF SDK.""" +import dataclasses import logging import os +import pathlib import sys -import typing +from types import ModuleType +from typing import TYPE_CHECKING, Optional, Union, cast __all__ = ["AF", "System", "AF_SDK_VERSION"] logger = logging.getLogger(__name__) + +@dataclasses.dataclass(kw_only=True) +class PIConnector: + assembly_path: pathlib.Path + AF: ModuleType + System: ModuleType + + +StrPath = Union[str, pathlib.Path] + + +def get_PI_connector(assembly_path: Optional[StrPath] = None) -> PIConnector: + """Return a new instance of the PI connector.""" + full_path = _get_SDK_path(assembly_path) + if full_path is None: + if assembly_path: + raise ImportError(f"PIAF SDK not found at '{assembly_path}'") + raise ImportError( + "PIAF SDK not found, check installation " + "or pass valid path to directory containing SDK assembly." + ) + dotnetSDK = _get_dotnet_SDK(full_path) + return PIConnector(assembly_path=full_path, **dotnetSDK) + + +def _get_dotnet_SDK(full_path: pathlib.Path) -> dict[str, ModuleType]: + import clr # type: ignore + + sys.path.append(str(full_path)) + clr.AddReference("OSIsoft.AFSDK") # type: ignore ; pylint: disable=no-member + import System # type: ignore + from OSIsoft import AF # type: ignore + + _AF = cast(ModuleType, AF) + _System = cast(ModuleType, System) + return {"AF": _AF, "System": _System} + + # pragma pylint: disable=import-outside-toplevel @@ -28,6 +69,26 @@ def __fallback(): return _af, _System, _AF_SDK_version +def _get_SDK_path(full_path: Optional[StrPath] = None) -> Optional[pathlib.Path]: + if full_path: + assembly_directories = [pathlib.Path(full_path)] + else: + installation_directories = { + os.getenv("PIHOME"), + "C:\\Program Files\\PIPC", + "C:\\Program Files (x86)\\PIPC", + } + assembly_directories = ( + pathlib.Path(path) / "AF\\PublicAssemblies\\4.0\\" + for path in installation_directories + if path is not None + ) + for AF_dir in assembly_directories: + logging.debug("Full path to potential SDK location: '%s'", AF_dir) + if AF_dir.is_dir(): + return AF_dir + + if ( os.getenv("GITHUB_ACTIONS", "false").lower() == "true" or os.getenv("TF_BUILD", "false").lower() == "true" @@ -39,35 +100,22 @@ def __fallback(): # Get the installation directory from the environment variable or fall back # to the Windows default installation path - installation_directories = [ - os.getenv("PIHOME"), - "C:\\Program Files\\PIPC", - "C:\\Program Files (x86)\\PIPC", - ] - for directory in installation_directories: - logging.debug("Trying installation directory '%s'", directory) - if not directory: - continue - AF_dir = os.path.join(directory, "AF\\PublicAssemblies\\4.0\\") - logging.debug("Full path to potential SDK location: '%s'", AF_dir) - if os.path.isdir(AF_dir): - PIAF_SDK = AF_dir - break - else: + PIAF_SDK = _get_SDK_path() + if PIAF_SDK is None: raise ImportError("PIAF SDK not found, check installation") - sys.path.append(PIAF_SDK) + sys.path.append(str(PIAF_SDK)) clr.AddReference("OSIsoft.AFSDK") # type: ignore ; pylint: disable=no-member import System as _System # type: ignore from OSIsoft import AF as _af # type: ignore - _AF_SDK_version = typing.cast(str, _af.PISystems().Version) # type: ignore ; pylint: disable=no-member + _AF_SDK_version = cast(str, _af.PISystems().Version) # type: ignore ; pylint: disable=no-member print("OSIsoft(r) AF SDK Version: {}".format(_AF_SDK_version)) -if typing.TYPE_CHECKING: +if TYPE_CHECKING: # This branch is separate from previous one as otherwise no typechecking takes place # on the main logic. from ._typing import AF, AF_SDK_VERSION, System diff --git a/PIconnect/__init__.py b/PIconnect/__init__.py index 9dc5f443..065a0254 100644 --- a/PIconnect/__init__.py +++ b/PIconnect/__init__.py @@ -1,8 +1,8 @@ """PIconnect - Connector to the OSISoft PI and PI-AF databases.""" -from PIconnect.config import PIConfig # noqa: I001 from PIconnect.AF import AFDatabase, PIAFDatabase from PIconnect.AFSDK import AF, AF_SDK_VERSION +from PIconnect.config import PIConfig # noqa: I001 from PIconnect.PI import PIServer from . import _version diff --git a/tests/test_load_SDK.py b/tests/test_load_SDK.py new file mode 100644 index 00000000..949c6bfd --- /dev/null +++ b/tests/test_load_SDK.py @@ -0,0 +1,62 @@ +"""Test the loading of the SDK connector.""" + +import os +import pathlib + +import pytest + +import PIconnect as PI +from PIconnect import AFSDK + + +def on_CI() -> bool: + """Return True if the tests are running on a CI environment.""" + return ( + os.getenv("GITHUB_ACTIONS", "false").lower() == "true" + or os.getenv("TF_BUILD", "false").lower() == "true" + or os.getenv("READTHEDOCS", "false").lower() == "true" + ) + + +# Skip this test module on CI as it requires the real SDK to be installed +pytestmark = pytest.mark.skipif(on_CI(), reason="Real SDK not available on CI") + + +def test_load_SDK_without_arguments_raises_no_exception() -> None: + """Test that loading the SDK object without arguments raises no exception.""" + try: + PI.get_PI_connector() + except Exception as e: + pytest.fail(f"Exception raised: {e}") + + +def test_load_SDK_returns_PIconnect_object() -> None: + """Test that loading the SDK object returns a PIConnector.""" + assert isinstance(PI.get_PI_connector(), AFSDK.PIConnector) + + +def test_load_SDK_with_a_valid_path_returns_SDK_object() -> None: + """Test that loading the SDK object with a path returns a PIConnector.""" + assembly_path = "c:\\Program Files (x86)\\PIPC\\AF\\PublicAssemblies\\4.0\\" + assert isinstance(PI.get_PI_connector(assembly_path), AFSDK.PIConnector) + + +def test_load_SDK_with_a_valid_path_stores_path_in_connector() -> None: + """Test that loading the SDK object with a path stores the path in the connector.""" + assembly_path = "c:\\Program Files (x86)\\PIPC\\AF\\PublicAssemblies\\4.0\\" + connector = PI.get_PI_connector(assembly_path) + assert connector.assembly_path == pathlib.Path(assembly_path) + + +def test_load_SDK_with_an_invalid_path_raises_import_error() -> None: + """Test that loading the SDK object with an invalid path raises an ImportError.""" + assembly_path = "c:\\invalid\\path\\" + with pytest.raises(ImportError, match="PIAF SDK not found at .*"): + PI.get_PI_connector(assembly_path) + + +def test_load_SDK_with_valid_path_has_SDK_reference() -> None: + """Test that loading the SDK object with a valid path has a reference to the SDK.""" + assembly_path = "c:\\Program Files (x86)\\PIPC\\AF\\PublicAssemblies\\4.0\\" + connector = PI.get_PI_connector(assembly_path) + assert connector.AF is not None From f7f3cf90e3b4c037e436fa2fd7ccf72a9e6336a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Tue, 25 Jun 2024 14:50:10 +0200 Subject: [PATCH 19/28] chore: update SDK documentation root to Aveva site --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index bdbb1a77..b44562da 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -85,11 +85,11 @@ def __getattr__(cls, name) -> MagicMock: # type: ignore # built documents. # # The short X.Y version. -version = PIconnect.__version__ +version = '.'.join(PIconnect.__version__.split('.')[:2]) # The full version, including alpha/beta/rc tags. release = PIconnect.__version__ -extlinks = {"afsdk": ("https://docs.osisoft.com/bundle/af-sdk/page/html/%s", "")} +extlinks = {"afsdk": ("https://docs.aveva.com/bundle/af-sdk/page/html/%s", "")} intersphinx_mapping = { "python": ("https://docs.python.org/3.10", None), From 7629f003dbfed36cef333b2fd4926bf405c1c2b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Tue, 25 Jun 2024 15:12:37 +0200 Subject: [PATCH 20/28] feat: added PIconnector to __init__, added protocols for module spec --- PIconnect/AFSDK.py | 23 +++++++++- PIconnect/__init__.py | 3 +- PIconnect/_typing/__init__.py | 82 ++++++++++++++++++++++++++++++++++- tests/test_load_SDK.py | 5 +-- 4 files changed, 105 insertions(+), 8 deletions(-) diff --git a/PIconnect/AFSDK.py b/PIconnect/AFSDK.py index d7dfef13..fcf7012d 100644 --- a/PIconnect/AFSDK.py +++ b/PIconnect/AFSDK.py @@ -12,12 +12,31 @@ logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from ._typing import AFType, SystemType +else: + AFType = ModuleType + SystemType = ModuleType + @dataclasses.dataclass(kw_only=True) class PIConnector: assembly_path: pathlib.Path - AF: ModuleType - System: ModuleType + AF: AFType + System: SystemType + + # def PIAFSystems(self) -> dict[str, "PIAFSystem"]: + # return {srv.Name: PIAFSystem(srv) for srv in self.AF.PISystems} + + # def PIServers(self) -> dict[str, "PIServer"]: + # return {srv.Name: PIServer(srv) for srv in self.AF.PI.PIServers} + + # @property + # def version(self) -> str: + # return self.AF.PISystems().Version + + # def __str__(self) -> str: + # return f"PIConnector({self.assembly_path}, AF SDK version: {self.version})" StrPath = Union[str, pathlib.Path] diff --git a/PIconnect/__init__.py b/PIconnect/__init__.py index 065a0254..30b982c3 100644 --- a/PIconnect/__init__.py +++ b/PIconnect/__init__.py @@ -1,8 +1,8 @@ """PIconnect - Connector to the OSISoft PI and PI-AF databases.""" +from PIconnect.config import PIConfig # noqa: I001 isort: skip from PIconnect.AF import AFDatabase, PIAFDatabase from PIconnect.AFSDK import AF, AF_SDK_VERSION -from PIconnect.config import PIConfig # noqa: I001 from PIconnect.PI import PIServer from . import _version @@ -16,6 +16,7 @@ "AFDatabase", "PIAFDatabase", "PIConfig", + "PIConnector", "PIServer", "__sdk_version", ] diff --git a/PIconnect/_typing/__init__.py b/PIconnect/_typing/__init__.py index 67372785..cce28d2f 100644 --- a/PIconnect/_typing/__init__.py +++ b/PIconnect/_typing/__init__.py @@ -1,8 +1,86 @@ """Type stubs for the AF SDK and dotnet libraries.""" -from . import dotnet as System # noqa: I001 +from typing import Protocol + from . import AF +from . import dotnet as System + + +class AFType(Protocol): + # Modules + # Analysis = AF.Analysis + Asset = AF.Asset + # Collective = AF.Collective + Data = AF.Data + # Diagnostics = AF.Diagnostics + EventFrame = AF.EventFrame + # Modeling = AF.Modeling + # Notification = AF.Notification + PI = AF.PI + # Search = AF.Search + # Support = AF.Support + Time = AF.Time + # UI = AF.UI + UnitsOfMeasure = AF.UnitsOfMeasure + + # Classes + # AFActiveDirectoryProperties = AF.AFActiveDirectoryProperties + AFCategory = AF.AFCategory + AFCategories = AF.AFCategories + # AFChangedEventArgs = AF.AFChangedEventArgs + # AFCheckoutInfo = AF.AFCheckoutInfo + # AFClientRegistration = AF.AFClientRegistration + # AFCollection = AF.AFCollection + # AFCollectionList = AF.AFCollectionList + # AFConnectionInfo = AF.AFConnectionInfo + # AFContact = AF.AFContact + # AFCsvColumn = AF.AFCsvColumn + # AFCsvColumns = AF.AFCsvColumns + AFDatabase = AF.AFDatabase + # AFDatabases = AF.AFDatabases + # AFErrors = AF.AFErrors + # AFEventArgs = AF.AFEventArgs + # AFGlobalRestorer = AF.AFGlobalRestorer + # AFGlobalSettings = AF.AFGlobalSettings + # AFKeyedResults = AF.AFKeyedResults + # AFLibraries = AF.AFLibraries + # AFLibrary = AF.AFLibrary + # AFListResults = AF.AFListResults + # AFNamedCollection = AF.AFNamedCollection + # AFNamedCollectionList = AF.AFNamedCollectionList + # AFNameSubstitution = AF.AFNameSubstitution + # AFObject = AF.AFObject + # AFOidcIdentity = AF.AFOidcIdentity + # AFPlugin = AF.AFPlugin + # AFPlugins = AF.AFPlugins + # AFProgressEventArgs = AF.AFProgressEventArgs + # AFProvider = AF.AFProvider + # AFRole = AF.AFRole + # AFSDKExtension = AF.AFSDKExtension + # AFSecurity = AF.AFSecurity + # AFSecurityIdentities = AF.AFSecurityIdentities + # AFSecurityIdentity = AF.AFSecurityIdentity + # AFSecurityMapping = AF.AFSecurityMapping + # AFSecurityMappings = AF.AFSecurityMappings + # AFSecurityRightsExtension = AF.AFSecurityRightsExtension + # NumericStringComparer = AF.NumericStringComparer + PISystem = AF.PISystem + PISystems = AF.PISystems + # UniversalComparer = AF.UniversalComparer + + +class SystemType(Protocol): + # Modules + Data = System.Data + Net = System.Net + Security = System.Security + + # Classes + DateTime = System.DateTime + Exception = System.Exception + TimeSpan = System.TimeSpan + AF_SDK_VERSION = "2.7_compatible" -__all__ = ["AF", "AF_SDK_VERSION", "System"] +__all__ = ["AF", "AF_SDK_VERSION", "AFType", "System"] diff --git a/tests/test_load_SDK.py b/tests/test_load_SDK.py index 949c6bfd..ace869b5 100644 --- a/tests/test_load_SDK.py +++ b/tests/test_load_SDK.py @@ -6,7 +6,6 @@ import pytest import PIconnect as PI -from PIconnect import AFSDK def on_CI() -> bool: @@ -32,13 +31,13 @@ def test_load_SDK_without_arguments_raises_no_exception() -> None: def test_load_SDK_returns_PIconnect_object() -> None: """Test that loading the SDK object returns a PIConnector.""" - assert isinstance(PI.get_PI_connector(), AFSDK.PIConnector) + assert isinstance(PI.get_PI_connector(), PI.PIConnector) def test_load_SDK_with_a_valid_path_returns_SDK_object() -> None: """Test that loading the SDK object with a path returns a PIConnector.""" assembly_path = "c:\\Program Files (x86)\\PIPC\\AF\\PublicAssemblies\\4.0\\" - assert isinstance(PI.get_PI_connector(assembly_path), AFSDK.PIConnector) + assert isinstance(PI.get_PI_connector(assembly_path), PI.PIConnector) def test_load_SDK_with_a_valid_path_stores_path_in_connector() -> None: From 8e205e4ad488a9cc95a144083b5eb629b4cfb3a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Tue, 25 Jun 2024 15:21:12 +0200 Subject: [PATCH 21/28] test: move skip on CI marker to common.py --- tests/common.py | 17 +++++++++++++++++ tests/test_load_SDK.py | 13 ++----------- 2 files changed, 19 insertions(+), 11 deletions(-) create mode 100644 tests/common.py diff --git a/tests/common.py b/tests/common.py new file mode 100644 index 00000000..2fa314ae --- /dev/null +++ b/tests/common.py @@ -0,0 +1,17 @@ +"""Common fixtures for testing PIconnect.""" + +import os + +import pytest + + +def on_CI() -> bool: + """Return True if the tests are running on a CI environment.""" + return ( + os.getenv("GITHUB_ACTIONS", "false").lower() == "true" + or os.getenv("TF_BUILD", "false").lower() == "true" + or os.getenv("READTHEDOCS", "false").lower() == "true" + ) + + +skip_if_on_CI = pytest.mark.skipif(on_CI(), reason="Real SDK not available on CI") diff --git a/tests/test_load_SDK.py b/tests/test_load_SDK.py index ace869b5..9cf3888a 100644 --- a/tests/test_load_SDK.py +++ b/tests/test_load_SDK.py @@ -1,24 +1,15 @@ """Test the loading of the SDK connector.""" -import os import pathlib import pytest import PIconnect as PI - -def on_CI() -> bool: - """Return True if the tests are running on a CI environment.""" - return ( - os.getenv("GITHUB_ACTIONS", "false").lower() == "true" - or os.getenv("TF_BUILD", "false").lower() == "true" - or os.getenv("READTHEDOCS", "false").lower() == "true" - ) - +from .common import skip_if_on_CI # Skip this test module on CI as it requires the real SDK to be installed -pytestmark = pytest.mark.skipif(on_CI(), reason="Real SDK not available on CI") +pytestmark = skip_if_on_CI def test_load_SDK_without_arguments_raises_no_exception() -> None: From f10c925993413d75b2d960e6a3570ef85d9e1f42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Sat, 29 Mar 2025 12:22:59 +0100 Subject: [PATCH 22/28] chore: replace deprecated type hints --- PIconnect/AFSDK.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/PIconnect/AFSDK.py b/PIconnect/AFSDK.py index fcf7012d..c1211a23 100644 --- a/PIconnect/AFSDK.py +++ b/PIconnect/AFSDK.py @@ -6,7 +6,7 @@ import pathlib import sys from types import ModuleType -from typing import TYPE_CHECKING, Optional, Union, cast +from typing import TYPE_CHECKING, cast __all__ = ["AF", "System", "AF_SDK_VERSION"] @@ -39,10 +39,10 @@ class PIConnector: # return f"PIConnector({self.assembly_path}, AF SDK version: {self.version})" -StrPath = Union[str, pathlib.Path] +StrPath = str | pathlib.Path -def get_PI_connector(assembly_path: Optional[StrPath] = None) -> PIConnector: +def get_PI_connector(assembly_path: StrPath | None = None) -> PIConnector: """Return a new instance of the PI connector.""" full_path = _get_SDK_path(assembly_path) if full_path is None: @@ -88,7 +88,7 @@ def __fallback(): return _af, _System, _AF_SDK_version -def _get_SDK_path(full_path: Optional[StrPath] = None) -> Optional[pathlib.Path]: +def _get_SDK_path(full_path: StrPath | None = None) -> pathlib.Path | None: if full_path: assembly_directories = [pathlib.Path(full_path)] else: From 352fd228006aeacfb6ff1da50798b80c1fea5aa3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Tue, 8 Apr 2025 13:49:06 +0200 Subject: [PATCH 23/28] chore: cleanup dotnet mocks - move AF modules in AF package - rename dotnet to System --- PIconnect/_typing/{ => AF}/Asset.py | 7 ++++--- PIconnect/_typing/{ => AF}/Data.py | 0 PIconnect/_typing/{ => AF}/Database.py | 0 PIconnect/_typing/{ => AF}/EventFrame.py | 17 ++++++++++++++++- PIconnect/_typing/{ => AF}/Generic.py | 0 PIconnect/_typing/{ => AF}/PI.py | 3 ++- PIconnect/_typing/{ => AF}/Search.py | 5 +++++ PIconnect/_typing/{ => AF}/Time.py | 2 +- PIconnect/_typing/{ => AF}/UnitsOfMeasure.py | 0 PIconnect/_typing/{AF.py => AF/__init__.py} | 2 +- PIconnect/_typing/{ => AF}/_values.py | 0 PIconnect/_typing/{dotnet => System}/Data.py | 0 PIconnect/_typing/{dotnet => System}/Net.py | 0 .../_typing/{dotnet => System}/Security.py | 0 .../_typing/{dotnet => System}/__init__.py | 0 PIconnect/_typing/__init__.py | 5 ++--- 16 files changed, 31 insertions(+), 10 deletions(-) rename PIconnect/_typing/{ => AF}/Asset.py (97%) rename PIconnect/_typing/{ => AF}/Data.py (100%) rename PIconnect/_typing/{ => AF}/Database.py (100%) rename PIconnect/_typing/{ => AF}/EventFrame.py (67%) rename PIconnect/_typing/{ => AF}/Generic.py (100%) rename PIconnect/_typing/{ => AF}/PI.py (99%) rename PIconnect/_typing/{ => AF}/Search.py (89%) rename PIconnect/_typing/{ => AF}/Time.py (95%) rename PIconnect/_typing/{ => AF}/UnitsOfMeasure.py (100%) rename PIconnect/_typing/{AF.py => AF/__init__.py} (96%) rename PIconnect/_typing/{ => AF}/_values.py (100%) rename PIconnect/_typing/{dotnet => System}/Data.py (100%) rename PIconnect/_typing/{dotnet => System}/Net.py (100%) rename PIconnect/_typing/{dotnet => System}/Security.py (100%) rename PIconnect/_typing/{dotnet => System}/__init__.py (100%) diff --git a/PIconnect/_typing/Asset.py b/PIconnect/_typing/AF/Asset.py similarity index 97% rename from PIconnect/_typing/Asset.py rename to PIconnect/_typing/AF/Asset.py index acf430a5..5637ff8b 100644 --- a/PIconnect/_typing/Asset.py +++ b/PIconnect/_typing/AF/Asset.py @@ -1,11 +1,12 @@ -"""Mock classes for the AF module.""" +"""Mock classes for the AF.Asset module.""" from collections.abc import Iterator from typing import cast -from . import AF, Data, Generic +from PIconnect._typing import AF, System + +from . import Data, Generic from . import UnitsOfMeasure as UOM -from . import dotnet as System from ._values import AFValue, AFValues __all__ = [ diff --git a/PIconnect/_typing/Data.py b/PIconnect/_typing/AF/Data.py similarity index 100% rename from PIconnect/_typing/Data.py rename to PIconnect/_typing/AF/Data.py diff --git a/PIconnect/_typing/Database.py b/PIconnect/_typing/AF/Database.py similarity index 100% rename from PIconnect/_typing/Database.py rename to PIconnect/_typing/AF/Database.py diff --git a/PIconnect/_typing/EventFrame.py b/PIconnect/_typing/AF/EventFrame.py similarity index 67% rename from PIconnect/_typing/EventFrame.py rename to PIconnect/_typing/AF/EventFrame.py index 1fda7725..6519635f 100644 --- a/PIconnect/_typing/EventFrame.py +++ b/PIconnect/_typing/AF/EventFrame.py @@ -3,7 +3,8 @@ import enum from collections.abc import Iterable -from . import AF, Asset, Time +from PIconnect._typing import AF +from PIconnect._typing.AF import Asset, Time class AFEventFrameSearchMode(enum.IntEnum): @@ -39,6 +40,20 @@ def FindEventFrames( search_full_hierarchy: bool = False, /, ) -> Iterable["AFEventFrame"]: + """Mock method to find event frames.""" + _config = { + "database": database, + "search_root": search_root, + "start_time": start_time, + "start_index": start_index, + "max_count": max_count, + "search_mode": search_mode, + "name_filter": name_filter, + "referenced_element_name_filter": referenced_element_name_filter, + "element_category": element_category, + "element_template": element_template, + "search_full_hierarchy": search_full_hierarchy, + } return [] diff --git a/PIconnect/_typing/Generic.py b/PIconnect/_typing/AF/Generic.py similarity index 100% rename from PIconnect/_typing/Generic.py rename to PIconnect/_typing/AF/Generic.py diff --git a/PIconnect/_typing/PI.py b/PIconnect/_typing/AF/PI.py similarity index 99% rename from PIconnect/_typing/PI.py rename to PIconnect/_typing/AF/PI.py index 42d4a6b6..665fe5d2 100644 --- a/PIconnect/_typing/PI.py +++ b/PIconnect/_typing/AF/PI.py @@ -3,8 +3,9 @@ import enum from collections.abc import Iterable, Iterator +from PIconnect._typing import System + from . import Data, Generic, Time, _values -from . import dotnet as System __all__ = ["PIPoint", "PIServer", "PIServers"] diff --git a/PIconnect/_typing/Search.py b/PIconnect/_typing/AF/Search.py similarity index 89% rename from PIconnect/_typing/Search.py rename to PIconnect/_typing/AF/Search.py index c042e250..3c4ef7c9 100644 --- a/PIconnect/_typing/Search.py +++ b/PIconnect/_typing/AF/Search.py @@ -5,6 +5,7 @@ from .Asset import AFAttribute, AFElement from .Database import AFDatabase +from .EventFrame import AFEventFrame _AFSearchable = TypeVar( "_AFSearchable", @@ -36,3 +37,7 @@ class AFAttributeSearch(AFSearch[AFAttribute]): class AFElementSearch(AFSearch[AFElement]): """Search for AF elements.""" + + +class AFEventFrameSearch(AFSearch[AFEventFrame]): + """Search for AF elements.""" diff --git a/PIconnect/_typing/Time.py b/PIconnect/_typing/AF/Time.py similarity index 95% rename from PIconnect/_typing/Time.py rename to PIconnect/_typing/AF/Time.py index 81f6741d..6cd082be 100644 --- a/PIconnect/_typing/Time.py +++ b/PIconnect/_typing/AF/Time.py @@ -1,6 +1,6 @@ """Mock classes for the AF.Time module.""" -from . import dotnet as System +from PIconnect._typing import System class AFTime: diff --git a/PIconnect/_typing/UnitsOfMeasure.py b/PIconnect/_typing/AF/UnitsOfMeasure.py similarity index 100% rename from PIconnect/_typing/UnitsOfMeasure.py rename to PIconnect/_typing/AF/UnitsOfMeasure.py diff --git a/PIconnect/_typing/AF.py b/PIconnect/_typing/AF/__init__.py similarity index 96% rename from PIconnect/_typing/AF.py rename to PIconnect/_typing/AF/__init__.py index bd36f78c..6a44f5be 100644 --- a/PIconnect/_typing/AF.py +++ b/PIconnect/_typing/AF/__init__.py @@ -45,7 +45,7 @@ def __iter__(self) -> Iterator[AFDatabase]: def __getitem__(self, name: str) -> AFDatabase | None: """Return the AFDatabase with the given name.""" - if name == self.DefaultDatabase.Name: + if self.DefaultDatabase and name == self.DefaultDatabase.Name: return self.DefaultDatabase def __init__(self, name: str) -> None: diff --git a/PIconnect/_typing/_values.py b/PIconnect/_typing/AF/_values.py similarity index 100% rename from PIconnect/_typing/_values.py rename to PIconnect/_typing/AF/_values.py diff --git a/PIconnect/_typing/dotnet/Data.py b/PIconnect/_typing/System/Data.py similarity index 100% rename from PIconnect/_typing/dotnet/Data.py rename to PIconnect/_typing/System/Data.py diff --git a/PIconnect/_typing/dotnet/Net.py b/PIconnect/_typing/System/Net.py similarity index 100% rename from PIconnect/_typing/dotnet/Net.py rename to PIconnect/_typing/System/Net.py diff --git a/PIconnect/_typing/dotnet/Security.py b/PIconnect/_typing/System/Security.py similarity index 100% rename from PIconnect/_typing/dotnet/Security.py rename to PIconnect/_typing/System/Security.py diff --git a/PIconnect/_typing/dotnet/__init__.py b/PIconnect/_typing/System/__init__.py similarity index 100% rename from PIconnect/_typing/dotnet/__init__.py rename to PIconnect/_typing/System/__init__.py diff --git a/PIconnect/_typing/__init__.py b/PIconnect/_typing/__init__.py index cce28d2f..650d4d80 100644 --- a/PIconnect/_typing/__init__.py +++ b/PIconnect/_typing/__init__.py @@ -2,8 +2,7 @@ from typing import Protocol -from . import AF -from . import dotnet as System +from . import AF, System class AFType(Protocol): @@ -17,7 +16,7 @@ class AFType(Protocol): # Modeling = AF.Modeling # Notification = AF.Notification PI = AF.PI - # Search = AF.Search + Search = AF.Search # Support = AF.Support Time = AF.Time # UI = AF.UI From a8c56f146306561bd4c6e2f95bb11e6085b85690 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Tue, 8 Apr 2025 13:50:16 +0200 Subject: [PATCH 24/28] chore: cleanup _utils.py --- PIconnect/PI.py | 3 ++- PIconnect/_utils.py | 2 -- 2 files changed, 2 insertions(+), 3 deletions(-) delete mode 100644 PIconnect/_utils.py diff --git a/PIconnect/PI.py b/PIconnect/PI.py index 64e175d3..3ba94a47 100644 --- a/PIconnect/PI.py +++ b/PIconnect/PI.py @@ -6,13 +6,14 @@ import PIconnect._typing.AF as _AFtyping import PIconnect.AFSDK as SDK from PIconnect import Data, PIConsts, Time -from PIconnect._utils import InitialisationWarning from PIconnect.AFSDK import System __all__ = ["PIServer", "PIPoint"] _DEFAULT_AUTH_MODE = PIConsts.AuthenticationMode.PI_USER_AUTHENTICATION +class InitialisationWarning(UserWarning): + pass def _lookup_servers() -> dict[str, SDK.AF.PI.PIServer]: servers: dict[str, SDK.AF.PI.PIServer] = {} diff --git a/PIconnect/_utils.py b/PIconnect/_utils.py deleted file mode 100644 index 23f4e295..00000000 --- a/PIconnect/_utils.py +++ /dev/null @@ -1,2 +0,0 @@ -class InitialisationWarning(UserWarning): - pass From d0cc595d74dca6424118dab00b89f3e2f18b4979 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Tue, 8 Apr 2025 16:26:16 +0200 Subject: [PATCH 25/28] feat!: move AFEventFrame to separate module --- PIconnect/AF.py | 10 +++--- PIconnect/Asset.py | 29 ----------------- PIconnect/EventFrame.py | 70 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 75 insertions(+), 34 deletions(-) create mode 100644 PIconnect/EventFrame.py diff --git a/PIconnect/AF.py b/PIconnect/AF.py index cddd79d0..bab62407 100644 --- a/PIconnect/AF.py +++ b/PIconnect/AF.py @@ -5,10 +5,10 @@ from typing import Any, Self import PIconnect.AFSDK as SDK -from PIconnect import Asset, PIConsts, Search, Time +from PIconnect import Asset, EventFrame, Search, Time _logger = logging.getLogger(__name__) -_DEFAULT_EVENTFRAME_SEARCH_MODE = PIConsts.EventFrameSearchMode.STARTING_AFTER +_DEFAULT_EVENTFRAME_SEARCH_MODE = EventFrame.EventFrameSearchMode.STARTING_AFTER class AFDatabase: @@ -126,14 +126,14 @@ def event_frames( start_time: Time.TimeLike = "", start_index: int = 0, max_count: int = 1000, - search_mode: PIConsts.EventFrameSearchMode = _DEFAULT_EVENTFRAME_SEARCH_MODE, + search_mode: EventFrame.EventFrameSearchMode = _DEFAULT_EVENTFRAME_SEARCH_MODE, search_full_hierarchy: bool = False, - ) -> dict[str, Asset.AFEventFrame]: + ) -> dict[str, EventFrame.AFEventFrame]: """Search for event frames in the database.""" _start_time = Time.to_af_time(start_time) _search_mode = SDK.AF.EventFrame.AFEventFrameSearchMode(int(search_mode)) return { - frame.Name: Asset.AFEventFrame(frame) + frame.Name: EventFrame.AFEventFrame(frame) for frame in SDK.AF.EventFrame.AFEventFrame.FindEventFrames( self.database, None, diff --git a/PIconnect/Asset.py b/PIconnect/Asset.py index d8136c8f..0635ae89 100644 --- a/PIconnect/Asset.py +++ b/PIconnect/Asset.py @@ -334,35 +334,6 @@ class AFElementList(_collections.NamedItemList[AFElement]): pass -class AFEventFrame(AFBaseElement[SDK.AF.EventFrame.AFEventFrame]): - """Container for PI AF Event Frames in the database.""" - - version = "0.1.0" - - @property - def event_frame(self) -> SDK.AF.EventFrame.AFEventFrame: - """Return the underlying AF Event Frame object.""" - return self.element - - @property - def parent(self) -> Self | None: - """Return the parent element of the current event frame, or None if it has none.""" - if not self.element.Parent: - return None - return self.__class__(self.element.Parent) - - @property - def children(self) -> dict[str, Self]: - """Return a dictionary of the direct child event frames of the current event frame.""" - return {c.Name: self.__class__(c) for c in self.element.EventFrames} - - -class AFEventFrameList(_collections.NamedItemList[AFEventFrame]): - """Container for a list of PIAFEventFrame objects.""" - - pass - - class AFTable: """Container for PI AF Tables in the database.""" diff --git a/PIconnect/EventFrame.py b/PIconnect/EventFrame.py new file mode 100644 index 00000000..f1c23fd9 --- /dev/null +++ b/PIconnect/EventFrame.py @@ -0,0 +1,70 @@ +"""Module for handling Event Frames.""" + +import enum +from typing import Self + +from PIconnect import AFSDK as dotnet +from PIconnect import Asset, _collections + + +class EventFrameSearchMode(enum.IntEnum): + """EventFrameSearchMode. + + EventFrameSearchMode defines the interpretation and direction from the start time + when searching for event frames. + + Detailed information is available at + :afsdk:`AF.EventFrame.AFEventFrameSearchMode `. + including a graphical display of event frames that are returned for a given search + mode. + """ # noqa: E501 + + #: Uninitialized + NONE = 0 + #: Backward from start time, also known as starting before + BACKWARD_FROM_START_TIME = 1 + STARTING_BEFORE = 1 + #: Forward from start time, also known as starting after + FORWARD_FROM_START_TIME = 2 + STARTING_AFTER = 2 + #: Backward from end time, also known as ending before + BACKWARD_FROM_END_TIME = 3 + ENDING_BEFORE = 3 + #: Forward from end time, also known as ending after + FORWARD_FROM_END_TIME = 4 + ENDING_AFTER = 4 + #: Backward in progress, also known as starting before and in progress + BACKWARD_IN_PROGRESS = 5 + STARTING_BEFORE_IN_PROGRESS = 5 + #: Forward in progress, also known as starting after and in progress + FORWARD_IN_PROGRESS = 6 + STARTING_AFTER_IN_PROGRESS = 6 + + +class AFEventFrame(Asset.AFBaseElement[dotnet.AF.EventFrame.AFEventFrame]): + """Container for PI AF Event Frames in the database.""" + + version = "0.1.0" + + @property + def event_frame(self) -> dotnet.AF.EventFrame.AFEventFrame: + """Return the underlying AF Event Frame object.""" + return self.element + + @property + def parent(self) -> Self | None: + """Return the parent element of the current event frame, or None if it has none.""" + if not self.element.Parent: + return None + return self.__class__(self.element.Parent) + + @property + def children(self) -> dict[str, Self]: + """Return a dictionary of the direct child event frames of the current event frame.""" + return {c.Name: self.__class__(c) for c in self.element.EventFrames} + + +class AFEventFrameList(_collections.NamedItemList[AFEventFrame]): + """Container for a list of PIAFEventFrame objects.""" + + pass From 2b1b286cb67f0a4d05aefde0169bcf9001557070 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 14 Apr 2025 08:37:35 +0200 Subject: [PATCH 26/28] feat: Move AuthenticationMode to PI.py --- PIconnect/PI.py | 20 ++++++++++++++++++-- PIconnect/PIConsts.py | 13 ------------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/PIconnect/PI.py b/PIconnect/PI.py index 3ba94a47..6908bfe2 100644 --- a/PIconnect/PI.py +++ b/PIconnect/PI.py @@ -1,5 +1,6 @@ """PI - Core containers for connections to PI databases.""" +import enum import warnings from typing import Any, cast @@ -10,11 +11,26 @@ __all__ = ["PIServer", "PIPoint"] -_DEFAULT_AUTH_MODE = PIConsts.AuthenticationMode.PI_USER_AUTHENTICATION class InitialisationWarning(UserWarning): pass +class AuthenticationMode(enum.IntEnum): + """AuthenticationMode indicates how a user authenticates to a PI Server. + + Detailed information is available at + :afsdk:`AF.PI.PIAuthenticationMode `. + """ + + #: Use Windows authentication when making a connection + WINDOWS_AUTHENTICATION = 0 + #: Use the PI User authentication mode when making a connection + PI_USER_AUTHENTICATION = 1 + + +_DEFAULT_AUTH_MODE = AuthenticationMode.PI_USER_AUTHENTICATION + + def _lookup_servers() -> dict[str, SDK.AF.PI.PIServer]: servers: dict[str, SDK.AF.PI.PIServer] = {} @@ -248,7 +264,7 @@ def __init__( username: str | None = None, password: str | None = None, domain: str | None = None, - authentication_mode: PIConsts.AuthenticationMode = _DEFAULT_AUTH_MODE, + authentication_mode: AuthenticationMode = _DEFAULT_AUTH_MODE, timeout: int | None = None, ) -> None: default_server = self.default_server() diff --git a/PIconnect/PIConsts.py b/PIconnect/PIConsts.py index 1ebfd88a..7fd64570 100644 --- a/PIconnect/PIConsts.py +++ b/PIconnect/PIConsts.py @@ -54,19 +54,6 @@ class BufferMode(enum.IntEnum): BUFFER = 2 -class AuthenticationMode(enum.IntEnum): - """AuthenticationMode indicates how a user authenticates to a PI Server. - - Detailed information is available at - :afsdk:`AF.PI.PIAuthenticationMode `. - """ - - #: Use Windows authentication when making a connection - WINDOWS_AUTHENTICATION = 0 - #: Use the PI User authentication mode when making a connection - PI_USER_AUTHENTICATION = 1 - - class CalculationBasis(enum.IntEnum): """CalculationBasis indicates how values should be weighted over a time range. From 2366d70b2128522965a39cb49f45924fba800e46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 14 Apr 2025 08:40:46 +0200 Subject: [PATCH 27/28] fix: create empty valuelist for empty collection --- PIconnect/Data.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/PIconnect/Data.py b/PIconnect/Data.py index 9abf3347..bc31df13 100644 --- a/PIconnect/Data.py +++ b/PIconnect/Data.py @@ -799,9 +799,13 @@ def align(df: pd.DataFrame) -> pd.DataFrame: @property def current_value(self) -> pd.Series: """Return the current values of all attributes in the collection.""" - idx, value = zip( - *[(element.name, element.current_value) for element in self._elements], strict=True - ) + if self._elements: + idx, value = zip( + *[(element.name, element.current_value) for element in self._elements], + strict=True, + ) + else: + idx, value = [], [] return pd.Series(value, index=idx) def filtered_summaries( From a0167dabf2906de36331c5863aad80f9ed583ac8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20Lapr=C3=A9?= Date: Mon, 14 Apr 2025 08:52:33 +0200 Subject: [PATCH 28/28] feat!: separate dotnet type hints and actual libraries As automatic detection of the libraries often failed and made testing very difficult the code was reorganised to require an explicit import of the .NET libraries. This way importing the PIconnect library can always succeed, regardless of the environment. The major downside is that it requires an explicit `PI.load_sdk()` call in every script. --- PIconnect/AF.py | 30 ++++----- PIconnect/AFSDK.py | 144 --------------------------------------- PIconnect/Asset.py | 83 +++++++++++------------ PIconnect/Data.py | 98 ++++++++++++++------------- PIconnect/EventFrame.py | 3 +- PIconnect/PI.py | 99 +++++++++++++-------------- PIconnect/PIAFBase.py | 146 ---------------------------------------- PIconnect/Search.py | 46 +++++++++---- PIconnect/Time.py | 18 ++--- PIconnect/__init__.py | 18 +++-- PIconnect/dotnet.py | 114 +++++++++++++++++++++++++++++++ tests/test_PI.py | 3 + tests/test_PIAF.py | 10 +-- tests/test_load_SDK.py | 52 -------------- tests/test_piconnect.py | 37 ---------- 15 files changed, 336 insertions(+), 565 deletions(-) delete mode 100644 PIconnect/AFSDK.py delete mode 100644 PIconnect/PIAFBase.py create mode 100644 PIconnect/dotnet.py delete mode 100644 tests/test_load_SDK.py delete mode 100644 tests/test_piconnect.py diff --git a/PIconnect/AF.py b/PIconnect/AF.py index bab62407..a4f35447 100644 --- a/PIconnect/AF.py +++ b/PIconnect/AF.py @@ -4,8 +4,7 @@ import warnings from typing import Any, Self -import PIconnect.AFSDK as SDK -from PIconnect import Asset, EventFrame, Search, Time +from PIconnect import Asset, EventFrame, Search, Time, dotnet _logger = logging.getLogger(__name__) _DEFAULT_EVENTFRAME_SEARCH_MODE = EventFrame.EventFrameSearchMode.STARTING_AFTER @@ -17,27 +16,27 @@ class AFDatabase: version = "0.3.0" @classmethod - def servers(cls) -> dict[str, SDK.AF.PISystem]: + def servers(cls) -> dict[str, dotnet.AF.PISystem]: """Return a dictionary of the known servers.""" - return {server.Name: server for server in SDK.AF.PISystems()} + return {server.Name: server for server in dotnet.lib.AF.PISystems()} @classmethod - def default_server(cls) -> SDK.AF.PISystem | None: + def default_server(cls) -> dotnet.AF.PISystem | None: """Return the default server.""" - if SDK.AF.PISystems().DefaultPISystem: - return SDK.AF.PISystems().DefaultPISystem - servers = SDK.AF.PISystems() + if dotnet.lib.AF.PISystems().DefaultPISystem: + return dotnet.lib.AF.PISystems().DefaultPISystem + servers = dotnet.lib.AF.PISystems() if servers.Count > 0: return next(iter(servers)) else: return None def __init__(self, server: str | None = None, database: str | None = None) -> None: - self.server: SDK.AF.PISystem = self._initialise_server(server) - self.database: SDK.AF.AFDatabase = self._initialise_database(database) + self.server = self._initialise_server(server) + self.database = self._initialise_database(database) self.search = Search.Search(self.database) - def _initialise_server(self, server: str | None) -> SDK.AF.PISystem: + def _initialise_server(self, server: str | None) -> dotnet.AF.PISystem: """Initialise the server connection.""" _logger.debug(f"Initialising server connection from {server!r}") default_server = self.default_server() @@ -47,7 +46,7 @@ def _initialise_server(self, server: str | None) -> SDK.AF.PISystem: _logger.debug(f"Using default server: {default_server.Name}") return default_server - if (_server := SDK.AF.PISystems()[server]) is not None: + if (_server := dotnet.lib.AF.PISystems()[server]) is not None: _logger.debug(_server) return _server else: @@ -60,7 +59,7 @@ def _initialise_server(self, server: str | None) -> SDK.AF.PISystem: warnings.warn(message=message, category=UserWarning, stacklevel=2) return default_server - def _initialise_database(self, database: str | None) -> SDK.AF.AFDatabase: + def _initialise_database(self, database: str | None) -> dotnet.AF.AFDatabase: def default_db(): default = self.server.Databases.DefaultDatabase if default is None: @@ -88,6 +87,7 @@ def __exit__( *args: Any, # type: ignore ) -> bool: """Close the PI AF server connection context.""" + _logger.log(0, f"Closing connection to {self} ({args=})") return False # Disabled disconnecting because garbage collection sometimes impedes # connecting to another server later @@ -131,10 +131,10 @@ def event_frames( ) -> dict[str, EventFrame.AFEventFrame]: """Search for event frames in the database.""" _start_time = Time.to_af_time(start_time) - _search_mode = SDK.AF.EventFrame.AFEventFrameSearchMode(int(search_mode)) + _search_mode = dotnet.lib.AF.EventFrame.AFEventFrameSearchMode(int(search_mode)) return { frame.Name: EventFrame.AFEventFrame(frame) - for frame in SDK.AF.EventFrame.AFEventFrame.FindEventFrames( + for frame in dotnet.lib.AF.EventFrame.AFEventFrame.FindEventFrames( self.database, None, _start_time, diff --git a/PIconnect/AFSDK.py b/PIconnect/AFSDK.py deleted file mode 100644 index c1211a23..00000000 --- a/PIconnect/AFSDK.py +++ /dev/null @@ -1,144 +0,0 @@ -"""AFSDK - Loads the .NET libraries from the OSIsoft AF SDK.""" - -import dataclasses -import logging -import os -import pathlib -import sys -from types import ModuleType -from typing import TYPE_CHECKING, cast - -__all__ = ["AF", "System", "AF_SDK_VERSION"] - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from ._typing import AFType, SystemType -else: - AFType = ModuleType - SystemType = ModuleType - - -@dataclasses.dataclass(kw_only=True) -class PIConnector: - assembly_path: pathlib.Path - AF: AFType - System: SystemType - - # def PIAFSystems(self) -> dict[str, "PIAFSystem"]: - # return {srv.Name: PIAFSystem(srv) for srv in self.AF.PISystems} - - # def PIServers(self) -> dict[str, "PIServer"]: - # return {srv.Name: PIServer(srv) for srv in self.AF.PI.PIServers} - - # @property - # def version(self) -> str: - # return self.AF.PISystems().Version - - # def __str__(self) -> str: - # return f"PIConnector({self.assembly_path}, AF SDK version: {self.version})" - - -StrPath = str | pathlib.Path - - -def get_PI_connector(assembly_path: StrPath | None = None) -> PIConnector: - """Return a new instance of the PI connector.""" - full_path = _get_SDK_path(assembly_path) - if full_path is None: - if assembly_path: - raise ImportError(f"PIAF SDK not found at '{assembly_path}'") - raise ImportError( - "PIAF SDK not found, check installation " - "or pass valid path to directory containing SDK assembly." - ) - dotnetSDK = _get_dotnet_SDK(full_path) - return PIConnector(assembly_path=full_path, **dotnetSDK) - - -def _get_dotnet_SDK(full_path: pathlib.Path) -> dict[str, ModuleType]: - import clr # type: ignore - - sys.path.append(str(full_path)) - clr.AddReference("OSIsoft.AFSDK") # type: ignore ; pylint: disable=no-member - import System # type: ignore - from OSIsoft import AF # type: ignore - - _AF = cast(ModuleType, AF) - _System = cast(ModuleType, System) - return {"AF": _AF, "System": _System} - - -# pragma pylint: disable=import-outside-toplevel - - -def __fallback(): - import warnings - - warnings.warn( - "Can't import the PI AF SDK, running in test mode", - ImportWarning, - stacklevel=2, - ) - - from ._typing import AF as _af - from ._typing import AF_SDK_VERSION as _AF_SDK_version - from ._typing import System as _System - - return _af, _System, _AF_SDK_version - - -def _get_SDK_path(full_path: StrPath | None = None) -> pathlib.Path | None: - if full_path: - assembly_directories = [pathlib.Path(full_path)] - else: - installation_directories = { - os.getenv("PIHOME"), - "C:\\Program Files\\PIPC", - "C:\\Program Files (x86)\\PIPC", - } - assembly_directories = ( - pathlib.Path(path) / "AF\\PublicAssemblies\\4.0\\" - for path in installation_directories - if path is not None - ) - for AF_dir in assembly_directories: - logging.debug("Full path to potential SDK location: '%s'", AF_dir) - if AF_dir.is_dir(): - return AF_dir - - -if ( - os.getenv("GITHUB_ACTIONS", "false").lower() == "true" - or os.getenv("TF_BUILD", "false").lower() == "true" - or os.getenv("READTHEDOCS", "false").lower() == "true" -): - _af, _System, _AF_SDK_version = __fallback() -else: - import clr # type: ignore - - # Get the installation directory from the environment variable or fall back - # to the Windows default installation path - PIAF_SDK = _get_SDK_path() - if PIAF_SDK is None: - raise ImportError("PIAF SDK not found, check installation") - - sys.path.append(str(PIAF_SDK)) - - clr.AddReference("OSIsoft.AFSDK") # type: ignore ; pylint: disable=no-member - - import System as _System # type: ignore - from OSIsoft import AF as _af # type: ignore - - _AF_SDK_version = cast(str, _af.PISystems().Version) # type: ignore ; pylint: disable=no-member - print("OSIsoft(r) AF SDK Version: {}".format(_AF_SDK_version)) - - -if TYPE_CHECKING: - # This branch is separate from previous one as otherwise no typechecking takes place - # on the main logic. - from ._typing import AF, AF_SDK_VERSION, System -else: - AF = _af - System = _System - AF_SDK_VERSION = _AF_SDK_version diff --git a/PIconnect/Asset.py b/PIconnect/Asset.py index 0635ae89..557aa4af 100644 --- a/PIconnect/Asset.py +++ b/PIconnect/Asset.py @@ -6,8 +6,7 @@ import pandas as pd # type: ignore import PIconnect._typing.AF as _AFtyping -import PIconnect.AFSDK as SDK -from PIconnect import PI, Data, _collections +from PIconnect import PI, Data, _collections, dotnet __all__ = [ "AFDataReference", @@ -16,14 +15,14 @@ ] T = TypeVar("T") -ElementType = TypeVar("ElementType", bound=SDK.AF.Asset.AFBaseElement) +ElementType = TypeVar("ElementType", bound=dotnet.AF.Asset.AFBaseElement) @dataclasses.dataclass class AFDataReference: """Reference to the data source of an AF attribute.""" - data_reference: SDK.AF.Asset.AFDataReference + data_reference: dotnet.AF.Asset.AFDataReference @property def attribute(self) -> "AFAttribute": @@ -45,7 +44,7 @@ def pi_point(self) -> PI.PIPoint | None: class AFEnumerationValue: """Representation of an AF enumeration value.""" - def __init__(self, value: SDK.AF.Asset.AFEnumerationValue) -> None: + def __init__(self, value: dotnet.AF.Asset.AFEnumerationValue) -> None: self._value = value def __str__(self) -> str: @@ -73,7 +72,7 @@ def value(self) -> int: @overload @staticmethod def wrap_enumeration_value( - value: SDK.AF.Asset.AFEnumerationValue, + value: dotnet.AF.Asset.AFEnumerationValue, ) -> "AFEnumerationValue": ... @overload @staticmethod @@ -82,10 +81,10 @@ def wrap_enumeration_value( ) -> T: ... @staticmethod def wrap_enumeration_value( - value: T | SDK.AF.Asset.AFEnumerationValue, + value: T | dotnet.AF.Asset.AFEnumerationValue, ) -> "T | AFEnumerationValue": """Wrap the value in an AFEnumerationValue if it is an enumeration value.""" - if isinstance(value, SDK.AF.Asset.AFEnumerationValue): + if isinstance(value, dotnet.lib.AF.Asset.AFEnumerationValue): return AFEnumerationValue(value) return value @@ -93,7 +92,7 @@ def wrap_enumeration_value( class AFAttribute(Data.DataContainer): """Representation of an AF attribute.""" - def __init__(self, attribute: SDK.AF.Asset.AFAttribute) -> None: + def __init__(self, attribute: dotnet.AF.Asset.AFAttribute) -> None: super().__init__() self.attribute = attribute @@ -111,7 +110,7 @@ def stepped_data(self) -> bool: return self.attribute.Step @property - def element(self) -> SDK.AF.Asset.AFBaseElement: + def element(self) -> dotnet.AF.Asset.AFBaseElement: """Return the element to which the attribute belongs.""" return self.attribute.Element @@ -163,14 +162,14 @@ def _current_value(self) -> object: def _filtered_summaries( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, filter_expression: str, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - filter_evaluation: SDK.AF.Data.AFSampleType, - filter_interval: SDK.AF.Time.AFTimeSpan, - time_type: SDK.AF.Data.AFTimestampCalculation, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + filter_evaluation: dotnet.AF.Data.AFSampleType, + filter_interval: dotnet.AF.Time.AFTimeSpan, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummariesDict: return self.attribute.Data.FilteredSummaries( time_range, @@ -183,13 +182,13 @@ def _filtered_summaries( time_type, ) - def _interpolated_value(self, time: SDK.AF.Time.AFTime): + def _interpolated_value(self, time: dotnet.AF.Time.AFTime): """Return a single value for this PI Point.""" return self.attribute.Data.InterpolatedValue(time, self.attribute.DefaultUOM) def _recorded_value( - self, time: SDK.AF.Time.AFTime, retrieval_mode: SDK.AF.Data.AFRetrievalMode - ) -> SDK.AF.Asset.AFValue: + self, time: dotnet.AF.Time.AFTime, retrieval_mode: dotnet.AF.Data.AFRetrievalMode + ) -> dotnet.AF.Asset.AFValue: """Return a single value for this PI Point.""" return self.attribute.Data.RecordedValue( time, retrieval_mode, self.attribute.DefaultUOM @@ -197,10 +196,10 @@ def _recorded_value( def _recorded_values( self, - time_range: SDK.AF.Time.AFTimeRange, - boundary_type: SDK.AF.Data.AFBoundaryType, + time_range: dotnet.AF.Time.AFTimeRange, + boundary_type: dotnet.AF.Data.AFBoundaryType, filter_expression: str, - ) -> SDK.AF.Asset.AFValues: + ) -> dotnet.AF.Asset.AFValues: include_filtered_values = False return self.attribute.Data.RecordedValues( time_range, @@ -212,10 +211,10 @@ def _recorded_values( def _interpolated_values( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, filter_expression: str, - ) -> SDK.AF.Asset.AFValues: + ) -> dotnet.AF.Asset.AFValues: """Query the pi af attribute, internal implementation.""" include_filtered_values = False return self.attribute.Data.InterpolatedValues( @@ -228,11 +227,11 @@ def _interpolated_values( def _summaries( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - time_type: SDK.AF.Data.AFTimestampCalculation, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummariesDict: return self.attribute.Data.Summaries( time_range, interval, summary_types, calculation_basis, time_type @@ -240,10 +239,10 @@ def _summaries( def _summary( self, - time_range: SDK.AF.Time.AFTimeRange, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - time_type: SDK.AF.Data.AFTimestampCalculation, + time_range: dotnet.AF.Time.AFTimeRange, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummaryDict: return self.attribute.Data.Summary( time_range, summary_types, calculation_basis, time_type @@ -251,9 +250,9 @@ def _summary( def _update_value( self, - value: SDK.AF.Asset.AFValue, - update_mode: SDK.AF.Data.AFUpdateOption, - buffer_mode: SDK.AF.Data.AFBufferOption, + value: dotnet.AF.Asset.AFValue, + update_mode: dotnet.AF.Data.AFUpdateOption, + buffer_mode: dotnet.AF.Data.AFBufferOption, ) -> None: return self.attribute.Data.UpdateValue( value, @@ -291,7 +290,7 @@ def attributes(self) -> dict[str, AFAttribute]: return {a.Name: AFAttribute(a) for a in self.element.Attributes} @property - def categories(self) -> SDK.AF.AFCategories: + def categories(self) -> dotnet.AF.AFCategories: """Return the categories of the current element.""" return self.element.Categories @@ -306,7 +305,7 @@ def path(self) -> str: return self.element.GetPath() -class AFElement(AFBaseElement[SDK.AF.Asset.AFElement]): +class AFElement(AFBaseElement[dotnet.AF.Asset.AFElement]): """Container for PI AF elements in the database.""" version = "0.1.0" @@ -337,7 +336,7 @@ class AFElementList(_collections.NamedItemList[AFElement]): class AFTable: """Container for PI AF Tables in the database.""" - def __init__(self, table: SDK.AF.Asset.AFTable) -> None: + def __init__(self, table: dotnet.AF.Asset.AFTable) -> None: self._table = table @property @@ -346,7 +345,7 @@ def columns(self) -> list[str]: return [col.ColumnName for col in self._table.Table.Columns] @property - def _rows(self) -> list[SDK.System.Data.DataRow]: + def _rows(self) -> list[dotnet.System.Data.DataRow]: return self._table.Table.Rows @property diff --git a/PIconnect/Data.py b/PIconnect/Data.py index bc31df13..8aef3eba 100644 --- a/PIconnect/Data.py +++ b/PIconnect/Data.py @@ -9,8 +9,7 @@ import pandas as pd # type: ignore import PIconnect._typing.AF as _AFtyping -import PIconnect.AFSDK as SDK -from PIconnect import Time, _collections +from PIconnect import Time, _collections, dotnet class BoundaryType(enum.IntEnum): @@ -291,11 +290,11 @@ def filtered_summaries( time_range = Time.to_af_time_range(start_time, end_time) _interval = Time.to_af_time_span(interval) _filter_expression = self._normalize_filter_expression(filter_expression) - _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) - _filter_evaluation = SDK.AF.Data.AFSampleType(int(filter_evaluation)) + _summary_types = dotnet.lib.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = dotnet.lib.AF.Data.AFCalculationBasis(int(calculation_basis)) + _filter_evaluation = dotnet.lib.AF.Data.AFSampleType(int(filter_evaluation)) _filter_interval = Time.to_af_time_span(filter_interval) - _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) + _time_type = dotnet.lib.AF.Data.AFTimestampCalculation(int(time_type)) pivalues = self._filtered_summaries( time_range, _interval, @@ -325,14 +324,14 @@ def filtered_summaries( @abc.abstractmethod def _filtered_summaries( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, filter_expression: str, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - filter_evaluation: SDK.AF.Data.AFSampleType, - filter_interval: SDK.AF.Time.AFTimeSpan, - time_type: SDK.AF.Data.AFTimestampCalculation, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + filter_evaluation: dotnet.AF.Data.AFSampleType, + filter_interval: dotnet.AF.Time.AFTimeSpan, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummariesDict: pass @@ -361,7 +360,7 @@ def interpolated_value(self, time: Time.TimeLike) -> pd.Series: return result @abc.abstractmethod - def _interpolated_value(self, time: SDK.AF.Time.AFTime) -> SDK.AF.Asset.AFValue: + def _interpolated_value(self, time: dotnet.AF.Time.AFTime) -> dotnet.AF.Asset.AFValue: pass def interpolated_values( @@ -424,10 +423,10 @@ def interpolated_values( @abc.abstractmethod def _interpolated_values( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, filter_expression: str, - ) -> SDK.AF.Asset.AFValues: + ) -> dotnet.AF.Asset.AFValues: pass def _normalize_filter_expression(self, filter_expression: str) -> str: @@ -455,7 +454,7 @@ def recorded_value( the index """ _time = Time.to_af_time(time) - _retrieval_mode = SDK.AF.Data.AFRetrievalMode(int(retrieval_mode)) + _retrieval_mode = dotnet.lib.AF.Data.AFRetrievalMode(int(retrieval_mode)) pivalue = self._recorded_value(_time, _retrieval_mode) result = pd.Series( data=[pivalue.Value], @@ -467,8 +466,8 @@ def recorded_value( @abc.abstractmethod def _recorded_value( - self, time: SDK.AF.Time.AFTime, retrieval_mode: SDK.AF.Data.AFRetrievalMode - ) -> SDK.AF.Asset.AFValue: + self, time: dotnet.AF.Time.AFTime, retrieval_mode: dotnet.AF.Data.AFRetrievalMode + ) -> dotnet.AF.Asset.AFValue: pass def recorded_values( @@ -518,7 +517,7 @@ def recorded_values( pd.Series: Timeseries of the values returned by the SDK """ time_range = Time.to_af_time_range(start_time, end_time) - _boundary_type = SDK.AF.Data.AFBoundaryType(int(boundary_type)) + _boundary_type = dotnet.lib.AF.Data.AFBoundaryType(int(boundary_type)) _filter_expression = self._normalize_filter_expression(filter_expression) pivalues = self._recorded_values(time_range, _boundary_type, _filter_expression) @@ -539,10 +538,10 @@ def recorded_values( @abc.abstractmethod def _recorded_values( self, - time_range: SDK.AF.Time.AFTimeRange, - boundary_type: SDK.AF.Data.AFBoundaryType, + time_range: dotnet.AF.Time.AFTimeRange, + boundary_type: dotnet.AF.Data.AFBoundaryType, filter_expression: str, - ) -> SDK.AF.Asset.AFValues: + ) -> dotnet.AF.Asset.AFValues: """Abstract implementation for recorded values. The internals for retrieving recorded values from PI and PI-AF are @@ -586,9 +585,9 @@ def summary( and the summary name as column name. """ time_range = Time.to_af_time_range(start_time, end_time) - _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) - _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) + _summary_types = dotnet.lib.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = dotnet.lib.AF.Data.AFCalculationBasis(int(calculation_basis)) + _time_type = dotnet.lib.AF.Data.AFTimestampCalculation(int(time_type)) pivalues = self._summary(time_range, _summary_types, _calculation_basis, _time_type) df = pd.DataFrame() for summary in pivalues: @@ -605,10 +604,10 @@ def summary( @abc.abstractmethod def _summary( self, - time_range: SDK.AF.Time.AFTimeRange, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - time_type: SDK.AF.Data.AFTimestampCalculation, + time_range: dotnet.AF.Time.AFTimeRange, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummaryDict: pass @@ -651,9 +650,9 @@ def summaries( """ time_range = Time.to_af_time_range(start_time, end_time) _interval = Time.to_af_time_span(interval) - _summary_types = SDK.AF.Data.AFSummaryTypes(int(summary_types)) - _calculation_basis = SDK.AF.Data.AFCalculationBasis(int(calculation_basis)) - _time_type = SDK.AF.Data.AFTimestampCalculation(int(time_type)) + _summary_types = dotnet.lib.AF.Data.AFSummaryTypes(int(summary_types)) + _calculation_basis = dotnet.lib.AF.Data.AFCalculationBasis(int(calculation_basis)) + _time_type = dotnet.lib.AF.Data.AFTimestampCalculation(int(time_type)) pivalues = self._summaries( time_range, _interval, _summary_types, _calculation_basis, _time_type ) @@ -676,11 +675,11 @@ def summaries( @abc.abstractmethod def _summaries( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - time_type: SDK.AF.Data.AFTimestampCalculation, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummariesDict: pass @@ -711,20 +710,20 @@ def update_value( from . import Time as time_module if time is not None: - _value = SDK.AF.Asset.AFValue(value, time_module.to_af_time(time)) + _value = dotnet.lib.AF.Asset.AFValue(value, time_module.to_af_time(time)) else: - _value = SDK.AF.Asset.AFValue(value) + _value = dotnet.lib.AF.Asset.AFValue(value) - _update_mode = SDK.AF.Data.AFUpdateOption(int(update_mode)) - _buffer_mode = SDK.AF.Data.AFBufferOption(int(buffer_mode)) + _update_mode = dotnet.lib.AF.Data.AFUpdateOption(int(update_mode)) + _buffer_mode = dotnet.lib.AF.Data.AFBufferOption(int(buffer_mode)) self._update_value(_value, _update_mode, _buffer_mode) @abc.abstractmethod def _update_value( self, - value: SDK.AF.Asset.AFValue, - update_mode: SDK.AF.Data.AFUpdateOption, - buffer_mode: SDK.AF.Data.AFBufferOption, + value: dotnet.AF.Asset.AFValue, + update_mode: dotnet.AF.Data.AFUpdateOption, + buffer_mode: dotnet.AF.Data.AFBufferOption, ) -> None: pass @@ -794,7 +793,12 @@ def align(df: pd.DataFrame) -> pd.DataFrame: case "time": return df.interpolate(method="time", axis=0) # type: ignore - return align(pd.concat(map(apply_func, self._elements), axis=1)) + return align( + pd.concat( + [pd.DataFrame()] + [apply_func(e) for e in self._elements], + axis=1, + ) + ) @property def current_value(self) -> pd.Series: diff --git a/PIconnect/EventFrame.py b/PIconnect/EventFrame.py index f1c23fd9..b748835f 100644 --- a/PIconnect/EventFrame.py +++ b/PIconnect/EventFrame.py @@ -3,8 +3,7 @@ import enum from typing import Self -from PIconnect import AFSDK as dotnet -from PIconnect import Asset, _collections +from PIconnect import Asset, _collections, dotnet class EventFrameSearchMode(enum.IntEnum): diff --git a/PIconnect/PI.py b/PIconnect/PI.py index 6908bfe2..5826cd2c 100644 --- a/PIconnect/PI.py +++ b/PIconnect/PI.py @@ -5,9 +5,7 @@ from typing import Any, cast import PIconnect._typing.AF as _AFtyping -import PIconnect.AFSDK as SDK -from PIconnect import Data, PIConsts, Time -from PIconnect.AFSDK import System +from PIconnect import Data, Time, dotnet __all__ = ["PIServer", "PIPoint"] @@ -15,6 +13,7 @@ class InitialisationWarning(UserWarning): pass + class AuthenticationMode(enum.IntEnum): """AuthenticationMode indicates how a user authenticates to a PI Server. @@ -31,13 +30,13 @@ class AuthenticationMode(enum.IntEnum): _DEFAULT_AUTH_MODE = AuthenticationMode.PI_USER_AUTHENTICATION -def _lookup_servers() -> dict[str, SDK.AF.PI.PIServer]: - servers: dict[str, SDK.AF.PI.PIServer] = {} +def _lookup_servers() -> dict[str, dotnet.AF.PI.PIServer]: + servers: dict[str, dotnet.AF.PI.PIServer] = {} - for server in SDK.AF.PI.PIServers(): + for server in dotnet.lib.AF.PI.PIServers(): try: servers[server.Name] = server - except (Exception, System.Exception) as e: # type: ignore + except (Exception, dotnet.lib.System.Exception) as e: # type: ignore warnings.warn( f"Failed loading server data for {server.Name} " f"with error {type(cast(Exception, e)).__qualname__}", @@ -47,10 +46,10 @@ def _lookup_servers() -> dict[str, SDK.AF.PI.PIServer]: return servers -def _lookup_default_server() -> SDK.AF.PI.PIServer | None: +def _lookup_default_server() -> dotnet.AF.PI.PIServer | None: default_server = None try: - default_server = SDK.AF.PI.PIServers().DefaultPIServer + default_server = dotnet.lib.AF.PI.PIServers().DefaultPIServer except Exception: warnings.warn("Could not load the default PI Server", ResourceWarning, stacklevel=2) return default_server @@ -66,7 +65,7 @@ class PIPoint(Data.DataContainer): version = "0.3.0" - def __init__(self, pi_point: SDK.AF.PI.PIPoint) -> None: + def __init__(self, pi_point: dotnet.AF.PI.PIPoint) -> None: super().__init__() self.pi_point = pi_point self.tag = pi_point.Name @@ -134,14 +133,14 @@ def _current_value(self) -> Any: def _filtered_summaries( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, filter_expression: str, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - filter_evaluation: SDK.AF.Data.AFSampleType, - filter_interval: SDK.AF.Time.AFTimeSpan, - time_type: SDK.AF.Data.AFTimestampCalculation, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + filter_evaluation: dotnet.AF.Data.AFSampleType, + filter_interval: dotnet.AF.Time.AFTimeSpan, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummariesDict: return self.pi_point.FilteredSummaries( time_range, @@ -154,16 +153,16 @@ def _filtered_summaries( time_type, ) - def _interpolated_value(self, time: SDK.AF.Time.AFTime) -> SDK.AF.Asset.AFValue: + def _interpolated_value(self, time: dotnet.AF.Time.AFTime) -> dotnet.AF.Asset.AFValue: """Return a single value for this PI Point.""" return self.pi_point.InterpolatedValue(time) def _interpolated_values( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, filter_expression: str, - ) -> SDK.AF.Asset.AFValues: + ) -> dotnet.AF.Asset.AFValues: include_filtered_values = False return self.pi_point.InterpolatedValues( time_range, interval, filter_expression, include_filtered_values @@ -173,19 +172,19 @@ def _normalize_filter_expression(self, filter_expression: str) -> str: return filter_expression.replace("%tag%", self.tag) def _recorded_value( - self, time: SDK.AF.Time.AFTime, retrieval_mode: SDK.AF.Data.AFRetrievalMode - ) -> SDK.AF.Asset.AFValue: + self, time: dotnet.AF.Time.AFTime, retrieval_mode: dotnet.AF.Data.AFRetrievalMode + ) -> dotnet.AF.Asset.AFValue: """Return a single recorded value for this PI Point.""" return self.pi_point.RecordedValue( - time, SDK.AF.Data.AFRetrievalMode(int(retrieval_mode)) + time, dotnet.lib.AF.Data.AFRetrievalMode(int(retrieval_mode)) ) def _recorded_values( self, - time_range: SDK.AF.Time.AFTimeRange, - boundary_type: SDK.AF.Data.AFBoundaryType, + time_range: dotnet.AF.Time.AFTimeRange, + boundary_type: dotnet.AF.Data.AFBoundaryType, filter_expression: str, - ) -> SDK.AF.Asset.AFValues: + ) -> dotnet.AF.Asset.AFValues: include_filtered_values = False return self.pi_point.RecordedValues( time_range, boundary_type, filter_expression, include_filtered_values @@ -193,20 +192,20 @@ def _recorded_values( def _summary( self, - time_range: SDK.AF.Time.AFTimeRange, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - time_type: SDK.AF.Data.AFTimestampCalculation, + time_range: dotnet.AF.Time.AFTimeRange, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummaryDict: return self.pi_point.Summary(time_range, summary_types, calculation_basis, time_type) def _summaries( self, - time_range: SDK.AF.Time.AFTimeRange, - interval: SDK.AF.Time.AFTimeSpan, - summary_types: SDK.AF.Data.AFSummaryTypes, - calculation_basis: SDK.AF.Data.AFCalculationBasis, - time_type: SDK.AF.Data.AFTimestampCalculation, + time_range: dotnet.AF.Time.AFTimeRange, + interval: dotnet.AF.Time.AFTimeSpan, + summary_types: dotnet.AF.Data.AFSummaryTypes, + calculation_basis: dotnet.AF.Data.AFCalculationBasis, + time_type: dotnet.AF.Data.AFTimestampCalculation, ) -> _AFtyping.Data.SummariesDict: return self.pi_point.Summaries( time_range, interval, summary_types, calculation_basis, time_type @@ -214,9 +213,9 @@ def _summaries( def _update_value( self, - value: SDK.AF.Asset.AFValue, - update_mode: SDK.AF.Data.AFUpdateOption, - buffer_mode: SDK.AF.Data.AFBufferOption, + value: dotnet.AF.Asset.AFValue, + update_mode: dotnet.AF.Data.AFUpdateOption, + buffer_mode: dotnet.AF.Data.AFBufferOption, ) -> None: return self.pi_point.UpdateValue(value, update_mode, buffer_mode) @@ -241,18 +240,18 @@ class PIServer(object): # pylint: disable=useless-object-inheritance version = "0.2.2" #: Dictionary of known servers, as reported by the SDK - _servers: dict[str, SDK.AF.PI.PIServer] | None = None - _default_server: SDK.AF.PI.PIServer | None = None + _servers: dict[str, dotnet.AF.PI.PIServer] | None = None + _default_server: dotnet.AF.PI.PIServer | None = None @classmethod - def servers(cls) -> dict[str, SDK.AF.PI.PIServer]: + def servers(cls) -> dict[str, dotnet.AF.PI.PIServer]: """Return a dictionary of the known servers.""" if cls._servers is None: cls._servers = _lookup_servers() return cls._servers @classmethod - def default_server(cls) -> SDK.AF.PI.PIServer | None: + def default_server(cls) -> dotnet.AF.PI.PIServer | None: """Return the default server.""" if cls._default_server is None: cls._default_server = _lookup_default_server() @@ -273,7 +272,7 @@ def __init__( raise ValueError("No server was specified and no default server was found.") self.connection = default_server else: - if (_server := SDK.AF.PI.PIServers()[server]) is not None: + if (_server := dotnet.lib.AF.PI.PIServers()[server]) is not None: self.connection = _server else: if default_server is None: @@ -295,21 +294,23 @@ def __init__( "A domain can only specified together with a username and password." ) if username: - secure_pass = System.Security.SecureString() + secure_pass = dotnet.lib.System.Security.SecureString() if password is not None: for c in password: secure_pass.AppendChar(c) cred = (username, secure_pass) + ((domain,) if domain else ()) self._credentials = ( - System.Net.NetworkCredential(cred[0], cred[1], *cred[2:]), - SDK.AF.PI.PIAuthenticationMode(int(authentication_mode)), + dotnet.lib.System.Net.NetworkCredential(cred[0], cred[1], *cred[2:]), + dotnet.lib.AF.PI.PIAuthenticationMode(int(authentication_mode)), ) else: self._credentials = None if timeout: # System.TimeSpan(hours, minutes, seconds) - self.connection.ConnectionInfo.OperationTimeOut = System.TimeSpan(0, 0, timeout) + self.connection.ConnectionInfo.OperationTimeOut = dotnet.lib.System.TimeSpan( + 0, 0, timeout + ) def __enter__(self): """Open connection context with the PI Server.""" @@ -357,7 +358,7 @@ def search(self, query: str | list[str], source: str | None = None) -> list[PIPo # 'got type ' + str(type(query))) return [ PIPoint(pi_point) - for pi_point in SDK.AF.PI.PIPoint.FindPIPoints( + for pi_point in dotnet.lib.AF.PI.PIPoint.FindPIPoints( self.connection, str(query), source, None ) ] diff --git a/PIconnect/PIAFBase.py b/PIconnect/PIAFBase.py deleted file mode 100644 index ffe66084..00000000 --- a/PIconnect/PIAFBase.py +++ /dev/null @@ -1,146 +0,0 @@ -"""Base element class for PI AF elements.""" - -from collections.abc import Iterator, Sequence -from typing import Generic, Self, TypeVar, overload - -import pandas as pd # type: ignore - -import PIconnect.AFSDK as SDK -import PIconnect.PIAFAttribute as PIattr -from PIconnect.AFSDK import System - -ElementType = TypeVar("ElementType", bound=SDK.AF.Asset.AFBaseElement) - - -class PIAFBaseElement(Generic[ElementType]): - """Container for PI AF elements in the database.""" - - version = "0.1.0" - - def __init__(self, element: ElementType) -> None: - self.element = element - - def __repr__(self) -> str: - """Return the string representation of the element.""" - return f"{self.__class__.__qualname__}({self.name})" - - @property - def name(self) -> str: - """Return the name of the current element.""" - return self.element.Name - - @property - def attributes(self) -> dict[str, PIattr.PIAFAttribute]: - """Return a dictionary of the attributes of the current element.""" - return {a.Name: PIattr.PIAFAttribute(a) for a in self.element.Attributes} - - @property - def categories(self) -> SDK.AF.AFCategories: - """Return the categories of the current element.""" - return self.element.Categories - - @property - def description(self) -> str: - """Return the description of the current element.""" - return self.element.Description - - -class PIAFElement(PIAFBaseElement[SDK.AF.Asset.AFElement]): - """Container for PI AF elements in the database.""" - - version = "0.1.0" - - @property - def parent(self) -> "PIAFElement | None": - """Return the parent element of the current element, or None if it has none.""" - if not self.element.Parent: - return None - return self.__class__(self.element.Parent) - - @property - def children(self) -> dict[str, "PIAFElement"]: - """Return a dictionary of the direct child elements of the current element.""" - return {c.Name: self.__class__(c) for c in self.element.Elements} - - def descendant(self, path: str) -> "PIAFElement": - """Return a descendant of the current element from an exact path.""" - return self.__class__(self.element.Elements.get_Item(path)) - - -class PIAFElementList(Sequence[PIAFElement]): - """Container for a list of PIAFElement objects.""" - - def __init__(self, elements: list[PIAFElement]) -> None: - self._elements = elements - - @overload - def __getitem__(self, index: int) -> PIAFElement: ... - @overload - def __getitem__(self, index: slice) -> Self: ... - def __getitem__(self, index: int | slice) -> PIAFElement | Self: - """Return the element at the specified index.""" - if isinstance(index, slice): - return self.__class__(self._elements[index]) - return self._elements[index] - - def __len__(self) -> int: - """Return the number of elements in the list.""" - return len(self._elements) - - def __iter__(self) -> Iterator[PIAFElement]: - """Return an iterator over the elements in the list.""" - yield from self._elements - - -class PIAFEventFrame(PIAFBaseElement[SDK.AF.EventFrame.AFEventFrame]): - """Container for PI AF Event Frames in the database.""" - - version = "0.1.0" - - @property - def event_frame(self) -> SDK.AF.EventFrame.AFEventFrame: - """Return the underlying AF Event Frame object.""" - return self.element - - @property - def parent(self) -> "PIAFEventFrame | None": - """Return the parent element of the current event frame, or None if it has none.""" - if not self.element.Parent: - return None - return self.__class__(self.element.Parent) - - @property - def children(self) -> dict[str, "PIAFEventFrame"]: - """Return a dictionary of the direct child event frames of the current event frame.""" - return {c.Name: self.__class__(c) for c in self.element.EventFrames} - - -class PIAFTable: - """Container for PI AF Tables in the database.""" - - def __init__(self, table: SDK.AF.Asset.AFTable) -> None: - self._table = table - - @property - def columns(self) -> list[str]: - """Return the names of the columns in the table.""" - return [col.ColumnName for col in self._table.Table.Columns] - - @property - def _rows(self) -> list[System.Data.DataRow]: - return self._table.Table.Rows - - @property - def name(self) -> str: - """Return the name of the table.""" - return self._table.Name - - @property - def shape(self) -> tuple[int, int]: - """Return the shape of the table.""" - return (len(self._rows), len(self.columns)) - - @property - def data(self) -> pd.DataFrame: - """Return the data in the table as a pandas DataFrame.""" - return pd.DataFrame([{col: row[col] for col in self.columns} for row in self._rows]) diff --git a/PIconnect/Search.py b/PIconnect/Search.py index b384a305..fd781cb7 100644 --- a/PIconnect/Search.py +++ b/PIconnect/Search.py @@ -5,9 +5,7 @@ from collections.abc import Iterator, Sequence from typing import Generic, TypeVar -import PIconnect.AFSDK as SDK - -from . import Asset +from . import Asset, EventFrame, dotnet SearchResultType = TypeVar( "SearchResultType", @@ -23,7 +21,7 @@ class SearchResult(Generic[AFSearchResultType, SearchResultType], abc.ABC): def __init__( self, - search: "SDK.AF.Search.AFSearch[AFSearchResultType]", + search: "dotnet.AF.Search.AFSearch[AFSearchResultType]", ) -> None: self.search = search self.result_type: type[SearchResultType] @@ -62,12 +60,12 @@ def to_list(self) -> Sequence[SearchResultType]: pass -class AttributeSearchResult(SearchResult[SDK.AF.Asset.AFAttribute, Asset.AFAttribute]): +class AttributeSearchResult(SearchResult[dotnet.AF.Asset.AFAttribute, Asset.AFAttribute]): """Container for attribute search results.""" def __init__( self, - search: SDK.AF.Search.AFAttributeSearch, + search: dotnet.AF.Search.AFAttributeSearch, ) -> None: super().__init__(search) self.result_type = Asset.AFAttribute @@ -77,12 +75,12 @@ def to_list(self) -> Asset.AFAttributeList: return Asset.AFAttributeList(list(self)) -class ElementSearchResult(SearchResult[SDK.AF.Asset.AFElement, Asset.AFElement]): +class ElementSearchResult(SearchResult[dotnet.AF.Asset.AFElement, Asset.AFElement]): """Container for attribute search results.""" def __init__( self, - search: SDK.AF.Search.AFElementSearch, + search: dotnet.AF.Search.AFElementSearch, ) -> None: super().__init__(search) self.result_type = Asset.AFElement @@ -92,24 +90,48 @@ def to_list(self) -> Asset.AFElementList: return Asset.AFElementList(list(self)) +class EventFrameSearchResult( + SearchResult[dotnet.AF.EventFrame.AFEventFrame, EventFrame.AFEventFrame] +): + """Container for attribute search results.""" + + def __init__( + self, + search: dotnet.AF.Search.AFEventFrameSearch, + ) -> None: + super().__init__(search) + self.result_type = EventFrame.AFEventFrame + + def to_list(self) -> EventFrame.AFEventFrameList: + """Return all items in the search result.""" + return EventFrame.AFEventFrameList(list(self)) + + class Search: """Search the AF database for different objects.""" - def __init__(self, database: SDK.AF.AFDatabase) -> None: + def __init__(self, database: dotnet.AF.AFDatabase) -> None: self.database = database def attributes( - self, query: str, query_name: str = "element_search" + self, query: str, query_name: str = "attribute_search" ) -> AttributeSearchResult: """Search for elements in the AF database.""" - search = SDK.AF.Search.AFAttributeSearch(self.database, query_name, query) + search = dotnet.lib.AF.Search.AFAttributeSearch(self.database, query_name, query) return AttributeSearchResult(search) def elements(self, query: str, query_name: str = "element_search") -> ElementSearchResult: """Search for elements in the AF database.""" - search = SDK.AF.Search.AFElementSearch(self.database, query_name, query) + search = dotnet.lib.AF.Search.AFElementSearch(self.database, query_name, query) return ElementSearchResult(search) + def event_frames( + self, query: str, query_name: str = "event_frame_search" + ) -> EventFrameSearchResult: + """Search for event frames in the AF database.""" + search = dotnet.lib.AF.Search.AFEventFrameSearch(self.database, query_name, query) + return EventFrameSearchResult(search) + def _descendant(self, path: str) -> Asset.AFElement: return Asset.AFElement(self.database.Elements.get_Item(path)) diff --git a/PIconnect/Time.py b/PIconnect/Time.py index 15e53c73..7a2f8202 100644 --- a/PIconnect/Time.py +++ b/PIconnect/Time.py @@ -5,14 +5,14 @@ import pandas as pd # type: ignore -from PIconnect import PIConfig -from PIconnect.AFSDK import AF, System +from PIconnect import dotnet +from PIconnect.config import PIConfig TimeLike = str | datetime.datetime IntervalLike = str | datetime.timedelta | pd.Timedelta -def to_af_time_range(start_time: TimeLike, end_time: TimeLike) -> AF.Time.AFTimeRange: +def to_af_time_range(start_time: TimeLike, end_time: TimeLike) -> dotnet.AF.Time.AFTimeRange: """Convert a combination of start and end time to a time range. Both `start_time` and `end_time` can be either a :any:`datetime.datetime` object or @@ -37,10 +37,10 @@ def to_af_time_range(start_time: TimeLike, end_time: TimeLike) -> AF.Time.AFTime if isinstance(end_time, datetime.datetime): end_time = end_time.isoformat() - return AF.Time.AFTimeRange.Parse(start_time, end_time) + return dotnet.lib.AF.Time.AFTimeRange.Parse(start_time, end_time) -def to_af_time(time: TimeLike) -> AF.Time.AFTime: +def to_af_time(time: TimeLike) -> dotnet.AF.Time.AFTime: """Convert a time to a AFTime value. Parameters @@ -55,10 +55,10 @@ def to_af_time(time: TimeLike) -> AF.Time.AFTime: if isinstance(time, datetime.datetime): time = time.isoformat() - return AF.Time.AFTime(time) + return dotnet.lib.AF.Time.AFTime(time) -def to_af_time_span(interval: IntervalLike | None) -> AF.Time.AFTimeSpan: +def to_af_time_span(interval: IntervalLike | None) -> dotnet.AF.Time.AFTimeSpan: """Convert a time interval to a AFTimeSpan value. Parameters @@ -73,10 +73,10 @@ def to_af_time_span(interval: IntervalLike | None) -> AF.Time.AFTimeSpan: if isinstance(interval, (datetime.timedelta, pd.Timedelta)): interval = f"{interval.total_seconds()}s" - return AF.Time.AFTimeSpan.Parse(interval) + return dotnet.lib.AF.Time.AFTimeSpan.Parse(interval) -def timestamp_to_index(timestamp: System.DateTime) -> datetime.datetime: +def timestamp_to_index(timestamp: dotnet.System.DateTime) -> datetime.datetime: """Convert AFTime object to datetime in local timezone. Parameters diff --git a/PIconnect/__init__.py b/PIconnect/__init__.py index 30b982c3..cbada959 100644 --- a/PIconnect/__init__.py +++ b/PIconnect/__init__.py @@ -2,21 +2,27 @@ from PIconnect.config import PIConfig # noqa: I001 isort: skip from PIconnect.AF import AFDatabase, PIAFDatabase -from PIconnect.AFSDK import AF, AF_SDK_VERSION +from PIconnect.dotnet import lib, load_SDK from PIconnect.PI import PIServer from . import _version + +def __getattr__(name: str): + """Lazy load the AF SDK.""" + match name: + case "__sdk_version": + return tuple(int(x) for x in lib.AF_SDK_VERSION.split("_")[0].split(".")) + case _: + raise AttributeError(f"module {__name__} has no attribute {name}") + + __version__ = _version.get_versions()["version"] -__sdk_version = tuple(int(x) for x in AF_SDK_VERSION.split(".")) __all__ = [ - "AF", - "AF_SDK_VERSION", "AFDatabase", "PIAFDatabase", "PIConfig", - "PIConnector", "PIServer", - "__sdk_version", + "load_SDK", ] diff --git a/PIconnect/dotnet.py b/PIconnect/dotnet.py new file mode 100644 index 00000000..38abd48a --- /dev/null +++ b/PIconnect/dotnet.py @@ -0,0 +1,114 @@ +"""AFSDK - Loads the .NET libraries from the OSIsoft AF SDK.""" + +import logging +import os +import pathlib +import sys +from typing import cast + +from ._typing import AF, AF_SDK_VERSION, AFType, System, SystemType + +__all__ = ["AF", "System", "AF_SDK_VERSION", "lib", "load_SDK"] + +logger = logging.getLogger(__name__) + +StrPath = str | pathlib.Path + + +class dotNET: + """Class to load the .NET libraries from the OSIsoft AF SDK.""" + + def __init__(self) -> None: + self._af: AFType | None = None + self._system: SystemType | None = None + self._af_sdk_version: str | None = None + + @property + def AF(self) -> AFType: + """Return the AF SDK.""" + if self._af is None: + raise ImportError(".NET libraries not loaded, call PIconnect.load_SDK() first.") + return self._af + + @property + def System(self) -> SystemType: + """Return the System SDK.""" + if self._system is None: + raise ImportError(".NET libraries not loaded, call PIconnect.load_SDK() first.") + return self._system + + @property + def AF_SDK_VERSION(self) -> str: + """Return the AF SDK version.""" + return self.AF.PISystems().Version + + def load(self, assembly_path: StrPath | None = None) -> None: + """Return a new instance of the PI connector.""" + full_path = _get_SDK_path(assembly_path) + if full_path is None: + if assembly_path: + raise ImportError(f"AF SDK not found at '{assembly_path}'") + raise ImportError( + "AF SDK not found, check installation " + "or pass valid path to directory containing SDK assembly." + ) + self._af, self._system = _get_dotnet_libraries(full_path) + self._af_sdk_version = self.AF.PISystems().Version + logger.info("Loaded AF SDK version %s", self._af_sdk_version) + + def load_test_SDK(self) -> None: + self._af = AF + self._system = System + self._af_sdk_version = AF_SDK_VERSION + + +def _get_dotnet_libraries(full_path: StrPath) -> tuple[AFType, SystemType]: + import clr # type: ignore + + sys.path.append(str(full_path)) + clr.AddReference("OSIsoft.AFSDK") # type: ignore ; pylint: disable=no-member + import System # type: ignore + from OSIsoft import AF # type: ignore + + _AF = cast(AFType, AF) + _System = cast(SystemType, System) + return _AF, _System + + +def _get_SDK_path(full_path: StrPath | None = None) -> pathlib.Path | None: + if full_path: + assembly_directories = [pathlib.Path(full_path)] + else: + installation_directories = { + os.getenv("PIHOME"), + "C:\\Program Files\\PIPC", + "C:\\Program Files (x86)\\PIPC", + } + assembly_directories = ( + pathlib.Path(path) / "AF\\PublicAssemblies\\4.0\\" + for path in installation_directories + if path is not None + ) + for AF_dir in assembly_directories: + logging.debug("Full path to potential SDK location: '%s'", AF_dir) + if AF_dir.is_dir(): + return AF_dir + + +lib = dotNET() + + +def load_SDK(assembly_path: StrPath | None = None) -> None: + """Load the AF SDK from the specified path. + + Parameters + ---------- + assembly_path (str | Path, optional): Path to the AF SDK assembly. If None, the default + installation path will be used. + + Raises + ------ + ImportError: If the AF SDK cannot be found or loaded. + """ + global lib + lib.load(assembly_path) diff --git a/tests/test_PI.py b/tests/test_PI.py index edd8f0cf..9d2b93fe 100644 --- a/tests/test_PI.py +++ b/tests/test_PI.py @@ -7,9 +7,12 @@ import PIconnect as PI import PIconnect.PI as PI_ +from PIconnect import dotnet from .fakes import VirtualTestCase, pi_point +dotnet.lib.load_test_SDK() + __all__ = ["TestServer", "TestSearchPIPoints", "TestPIPoint", "pi_point"] diff --git a/tests/test_PIAF.py b/tests/test_PIAF.py index 2a8b3a3d..dd7db2fa 100644 --- a/tests/test_PIAF.py +++ b/tests/test_PIAF.py @@ -5,9 +5,11 @@ import pytest import PIconnect as PI -from PIconnect import Asset +from PIconnect import Asset, dotnet from PIconnect._typing import AF +dotnet.lib.load_test_SDK() + class TestAFDatabase: """Test connecting to the AF database.""" @@ -18,12 +20,12 @@ def test_connection(self): def test_server_name(self): """Test that the server reports the same name as which was connected to.""" - AFserver = PI.AF.PISystems().DefaultPISystem.Name - database = PI.AF.PISystems().DefaultPISystem.Databases.DefaultDatabase.Name + AFserver = dotnet.lib.AF.PISystems().DefaultPISystem.Name + database = dotnet.lib.AF.PISystems().DefaultPISystem.Databases.DefaultDatabase.Name server = PI.AFDatabase(AFserver, database) assert server.server_name == AFserver assert server.database_name == database - assert repr(server) == "PIAFDatabase(\\\\{s}\\{d})".format(s=AFserver, d=database) + assert repr(server) == "AFDatabase(\\\\{s}\\{d})".format(s=AFserver, d=database) def test_unknown_server_name(self): """Test that the server reports a warning for an unknown server.""" diff --git a/tests/test_load_SDK.py b/tests/test_load_SDK.py deleted file mode 100644 index 9cf3888a..00000000 --- a/tests/test_load_SDK.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Test the loading of the SDK connector.""" - -import pathlib - -import pytest - -import PIconnect as PI - -from .common import skip_if_on_CI - -# Skip this test module on CI as it requires the real SDK to be installed -pytestmark = skip_if_on_CI - - -def test_load_SDK_without_arguments_raises_no_exception() -> None: - """Test that loading the SDK object without arguments raises no exception.""" - try: - PI.get_PI_connector() - except Exception as e: - pytest.fail(f"Exception raised: {e}") - - -def test_load_SDK_returns_PIconnect_object() -> None: - """Test that loading the SDK object returns a PIConnector.""" - assert isinstance(PI.get_PI_connector(), PI.PIConnector) - - -def test_load_SDK_with_a_valid_path_returns_SDK_object() -> None: - """Test that loading the SDK object with a path returns a PIConnector.""" - assembly_path = "c:\\Program Files (x86)\\PIPC\\AF\\PublicAssemblies\\4.0\\" - assert isinstance(PI.get_PI_connector(assembly_path), PI.PIConnector) - - -def test_load_SDK_with_a_valid_path_stores_path_in_connector() -> None: - """Test that loading the SDK object with a path stores the path in the connector.""" - assembly_path = "c:\\Program Files (x86)\\PIPC\\AF\\PublicAssemblies\\4.0\\" - connector = PI.get_PI_connector(assembly_path) - assert connector.assembly_path == pathlib.Path(assembly_path) - - -def test_load_SDK_with_an_invalid_path_raises_import_error() -> None: - """Test that loading the SDK object with an invalid path raises an ImportError.""" - assembly_path = "c:\\invalid\\path\\" - with pytest.raises(ImportError, match="PIAF SDK not found at .*"): - PI.get_PI_connector(assembly_path) - - -def test_load_SDK_with_valid_path_has_SDK_reference() -> None: - """Test that loading the SDK object with a valid path has a reference to the SDK.""" - assembly_path = "c:\\Program Files (x86)\\PIPC\\AF\\PublicAssemblies\\4.0\\" - connector = PI.get_PI_connector(assembly_path) - assert connector.AF is not None diff --git a/tests/test_piconnect.py b/tests/test_piconnect.py deleted file mode 100644 index 2f784e9b..00000000 --- a/tests/test_piconnect.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -"""Tests for `PIconnect` package.""" - - -# from click.testing import CliRunner - - -# from PIconnect import cli - - -# @pytest.fixture -# def response(): -# """Sample pytest fixture. - -# See more at: http://doc.pytest.org/en/latest/fixture.html -# """ -# # import requests -# # return requests.get('https://github.com/audreyr/cookiecutter-pypackage') - - -# def test_content(response): -# """Sample pytest test function with the pytest fixture as an argument.""" -# # from bs4 import BeautifulSoup -# # assert 'GitHub' in BeautifulSoup(response.content).title.string - - -# def test_command_line_interface(): -# """Test the CLI.""" -# runner = CliRunner() -# result = runner.invoke(cli.main) -# assert result.exit_code == 0 -# assert 'PIconnect.cli.main' in result.output -# help_result = runner.invoke(cli.main, ['--help']) -# assert help_result.exit_code == 0 -# assert '--help Show this message and exit.' in help_result.output