From 956434b77f23ba1e38670f38d3f0596a1f694a93 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Fri, 27 Mar 2026 18:28:21 +0530 Subject: [PATCH 01/25] Update pyproject.toml --- pyproject.toml | 106 ++--- uv.lock | 1178 ++++++++---------------------------------------- 2 files changed, 230 insertions(+), 1054 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 48a1f45..0def7e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,15 @@ +[build-system] +requires = ["setuptools>=48"] +build-backend = "setuptools.build_meta" + [project] -name = "taskiq-postgresql" -version = "0.4.0" -description = "PostgreSQL integration for taskiq" +name = "taskiq-sqlalchemy" +version = "0.0.1" +description = "Fast and flexible SqlAlchemy integration for TaskIQ" readme = "README.md" authors = [ - { name = "jeffersonsilva-mb", email = "jefferson.silva@mb.com.br" }, - { name = "Jefferson Venceslau", email = "jeff.venceslau@gmail.com" } + { name = "Corridor Platforms", email = "postmaster@corridorplatforms.com" }, ] -dependencies = ["taskiq>=0.11.7"] license = "MIT" requires-python = ">= 3.9" classifiers = [ @@ -15,7 +17,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Framework :: AsyncIO", - "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", @@ -32,8 +33,6 @@ classifiers = [ "Typing :: Typed", "Operating System :: OS Independent", ] -homepage = "https://github.com/z22092/taskiq-postgresql" -repository = "https://github.com/z22092/taskiq-postgresql" keywords = [ "taskiq", "tasks", @@ -43,62 +42,54 @@ keywords = [ "asyncpg", "psqlpy", "psycopg3", + "sqlalchemy", ] -packages = [{ include = "taskiq_postgresql" }] + +dependencies = ["taskiq>=0.11.7", 'sqlalchemy>=2'] [project.optional-dependencies] -asyncpg = [ - "asyncpg>=0.30.0", -] +all = ["taskiq_sqlalchemy[postgresql]"] -psqlpy = [ +postgresql = [ + "asyncpg>=0.30.0", "psqlpy>=0.11.3", -] -psycopg = [ "psycopg[binary,pool]>=3.2.9", ] +[project.urls] +Homepage = "https://github.com/corridor/taskiq-sqlalchemy" +Documentation = "https://github.com/corridor/taskiq-sqlalchemy" -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" +[dependency-groups] +dev = [ + "ruff", + "pytest", + "pytest-cov", + "pytest-env", + "asyncpg-stubs", + "pre-commit", + "pytest-xdist", + "anyio", + "pytest-timeout", +] + +[tool.setuptools.packages.find] +include = ["taskiq_sqlalchemy", "taskiq_sqlalchemy.*"] [tool.uv] -managed = true -dev-dependencies = [ - "ruff>=0.6.9", - "pytest>=8.3.3", - "pytest-cov>=5.0.0", - "pytest-env>=1.1.5", - "asyncpg-stubs>=0.29.1", - "pre-commit>=4.0.1", - "pytest-xdist>=3.6.1", - "flake8>=7.1.1", - "autoflake>=2.3.1", - "yesqa>=1.5.0", - "anyio>=4.6.2.post1", - "wemake-python-styleguide>=0.19.2", - "black>=24.10.0", - "twine>=5.1.1", - "pytest-timeout>=2.4.0", +required-environments = [ + # This tells uv to treat 3.9/3.10 as separate envs not to be mixed with 3.11+ + # So, when upgrading - uv can take latest pkg for 3.11+ and older pkg for older python versions + # Ref: https://github.com/astral-sh/uv/issues/9425 + "python_version == '3.9'", + "python_version == '3.10'", ] -no-sources = false [tool.uv.pip] -generate-hashes = true - -[tool.hatch.metadata] -allow-direct-references = true - -[tool.hatch.build.targets.wheel] -packages = ["taskiq_postgresql"] - - -[tool.ruff.format] -exclude = [".venv/"] +universal = true [tool.ruff.lint] -select = [ +select = [ "E", # Error "F", # Pyflakes "W", # Pycodestyle @@ -163,20 +154,3 @@ extend-immutable-calls = ["taskiq_dependencies.Depends", "taskiq.TaskiqDepends"] "T201", # Use of assert detected "S603", # Use of subprocess.run detected ] - -[tool.bumpversion] -current_version = "0.4.0" -parse = "(?P\\d+)\\.(?P\\d+)\\.(?P\\d+)" -serialize = ["{major}.{minor}.{patch}"] -search = "{current_version}" -replace = "{new_version}" -regex = false -ignore_missing_version = false -tag = false -sign_tags = false -tag_name = "{new_version}" -tag_message = "Bump version: {current_version} → {new_version}" -allow_dirty = false -commit = false -message = "Bump version: {current_version} → {new_version}" -commit_args = "" diff --git a/uv.lock b/uv.lock index 45db20a..f046470 100644 --- a/uv.lock +++ b/uv.lock @@ -5,6 +5,10 @@ resolution-markers = [ "python_full_version >= '3.10'", "python_full_version < '3.10'", ] +required-markers = [ + "python_full_version < '3.10'", + "python_full_version == '3.10.*'", +] [[package]] name = "annotated-types" @@ -30,15 +34,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, ] -[[package]] -name = "astor" -version = "0.8.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/21/75b771132fee241dfe601d39ade629548a9626d1d39f333fde31bc46febe/astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e", size = 35090, upload-time = "2019-12-10T01:50:35.51Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/88/97eef84f48fa04fbd6750e62dcceafba6c63c81b7ac1420856c8dcc0a3f9/astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", size = 27488, upload-time = "2019-12-10T01:50:33.628Z" }, -] - [[package]] name = "async-timeout" version = "5.0.1" @@ -112,149 +107,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/22/77a4a08cc9ef4f8bbb5e7ffbf4be008e596b535a3533a28c3465e9400d75/asyncpg_stubs-0.30.2-py3-none-any.whl", hash = "sha256:e57818bbaf10945a60ff3219da3c5ce97e1b424503b6a6f0a18db99797397cbb", size = 26929, upload-time = "2025-06-27T20:03:14.847Z" }, ] -[[package]] -name = "attrs" -version = "25.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, -] - -[[package]] -name = "autoflake" -version = "2.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyflakes" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2a/cb/486f912d6171bc5748c311a2984a301f4e2d054833a1da78485866c71522/autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e", size = 27642, upload-time = "2024-03-13T03:41:28.977Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/ee/3fd29bf416eb4f1c5579cf12bf393ae954099258abd7bde03c4f9716ef6b/autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840", size = 32483, upload-time = "2024-03-13T03:41:26.969Z" }, -] - -[[package]] -name = "backports-tarfile" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406, upload-time = "2024-05-28T17:01:54.731Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, -] - -[[package]] -name = "bandit" -version = "1.8.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, - { name = "pyyaml", marker = "python_full_version < '3.10'" }, - { name = "rich", marker = "python_full_version < '3.10'" }, - { name = "stevedore", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fb/b5/7eb834e213d6f73aace21938e5e90425c92e5f42abafaf8a6d5d21beed51/bandit-1.8.6.tar.gz", hash = "sha256:dbfe9c25fc6961c2078593de55fd19f2559f9e45b99f1272341f5b95dea4e56b", size = 4240271, upload-time = "2025-07-06T03:10:50.9Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/ca/ba5f909b40ea12ec542d5d7bdd13ee31c4d65f3beed20211ef81c18fa1f3/bandit-1.8.6-py3-none-any.whl", hash = "sha256:3348e934d736fcdb68b6aa4030487097e23a501adf3e7827b63658df464dddd0", size = 133808, upload-time = "2025-07-06T03:10:49.134Z" }, -] - -[[package]] -name = "black" -version = "25.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "mypy-extensions" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "platformdirs" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419, upload-time = "2025-01-29T05:37:06.642Z" }, - { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080, upload-time = "2025-01-29T05:37:09.321Z" }, - { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886, upload-time = "2025-01-29T04:18:24.432Z" }, - { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404, upload-time = "2025-01-29T04:19:04.296Z" }, - { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372, upload-time = "2025-01-29T05:37:11.71Z" }, - { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865, upload-time = "2025-01-29T05:37:14.309Z" }, - { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699, upload-time = "2025-01-29T04:18:17.688Z" }, - { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028, upload-time = "2025-01-29T04:18:51.711Z" }, - { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" }, - { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" }, - { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" }, - { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" }, - { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" }, - { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" }, - { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" }, - { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" }, - { url = "https://files.pythonhosted.org/packages/d3/b6/ae7507470a4830dbbfe875c701e84a4a5fb9183d1497834871a715716a92/black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0", size = 1628593, upload-time = "2025-01-29T05:37:23.672Z" }, - { url = "https://files.pythonhosted.org/packages/24/c1/ae36fa59a59f9363017ed397750a0cd79a470490860bc7713967d89cdd31/black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f", size = 1460000, upload-time = "2025-01-29T05:37:25.829Z" }, - { url = "https://files.pythonhosted.org/packages/ac/b6/98f832e7a6c49aa3a464760c67c7856363aa644f2f3c74cf7d624168607e/black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e", size = 1765963, upload-time = "2025-01-29T04:18:38.116Z" }, - { url = "https://files.pythonhosted.org/packages/ce/e9/2cb0a017eb7024f70e0d2e9bdb8c5a5b078c5740c7f8816065d06f04c557/black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355", size = 1419419, upload-time = "2025-01-29T04:18:30.191Z" }, - { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, -] - -[[package]] -name = "certifi" -version = "2025.8.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, -] - -[[package]] -name = "cffi" -version = "1.17.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pycparser" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024, upload-time = "2024-09-04T20:43:34.186Z" }, - { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188, upload-time = "2024-09-04T20:43:36.286Z" }, - { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571, upload-time = "2024-09-04T20:43:38.586Z" }, - { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687, upload-time = "2024-09-04T20:43:40.084Z" }, - { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211, upload-time = "2024-09-04T20:43:41.526Z" }, - { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325, upload-time = "2024-09-04T20:43:43.117Z" }, - { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784, upload-time = "2024-09-04T20:43:45.256Z" }, - { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564, upload-time = "2024-09-04T20:43:46.779Z" }, - { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, - { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, - { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, - { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, - { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, - { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, - { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, - { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, - { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, - { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, - { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, - { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, - { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, - { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, - { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910, upload-time = "2024-09-04T20:45:05.315Z" }, - { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200, upload-time = "2024-09-04T20:45:06.903Z" }, - { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565, upload-time = "2024-09-04T20:45:08.975Z" }, - { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635, upload-time = "2024-09-04T20:45:10.64Z" }, - { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218, upload-time = "2024-09-04T20:45:12.366Z" }, - { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486, upload-time = "2024-09-04T20:45:13.935Z" }, - { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911, upload-time = "2024-09-04T20:45:15.696Z" }, - { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632, upload-time = "2024-09-04T20:45:17.284Z" }, -] - [[package]] name = "cfgv" version = "3.4.0" @@ -264,111 +116,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, ] -[[package]] -name = "charset-normalizer" -version = "3.4.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, - { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, - { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, - { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, - { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, - { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, - { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, - { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, - { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, - { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, - { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, - { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, - { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, - { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, - { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, - { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, - { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, - { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, - { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, - { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, - { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, - { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, - { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, - { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, - { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, - { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, - { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, - { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, - { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, - { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, - { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, - { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, - { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, - { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, - { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, - { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, - { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, - { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, - { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, - { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, - { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, - { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, - { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, - { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, - { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, - { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, - { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, - { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, - { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ca/9a0983dd5c8e9733565cf3db4df2b0a2e9a82659fd8aa2a868ac6e4a991f/charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05", size = 207520, upload-time = "2025-08-09T07:57:11.026Z" }, - { url = "https://files.pythonhosted.org/packages/39/c6/99271dc37243a4f925b09090493fb96c9333d7992c6187f5cfe5312008d2/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e", size = 147307, upload-time = "2025-08-09T07:57:12.4Z" }, - { url = "https://files.pythonhosted.org/packages/e4/69/132eab043356bba06eb333cc2cc60c6340857d0a2e4ca6dc2b51312886b3/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99", size = 160448, upload-time = "2025-08-09T07:57:13.712Z" }, - { url = "https://files.pythonhosted.org/packages/04/9a/914d294daa4809c57667b77470533e65def9c0be1ef8b4c1183a99170e9d/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7", size = 157758, upload-time = "2025-08-09T07:57:14.979Z" }, - { url = "https://files.pythonhosted.org/packages/b0/a8/6f5bcf1bcf63cb45625f7c5cadca026121ff8a6c8a3256d8d8cd59302663/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7", size = 152487, upload-time = "2025-08-09T07:57:16.332Z" }, - { url = "https://files.pythonhosted.org/packages/c4/72/d3d0e9592f4e504f9dea08b8db270821c909558c353dc3b457ed2509f2fb/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19", size = 150054, upload-time = "2025-08-09T07:57:17.576Z" }, - { url = "https://files.pythonhosted.org/packages/20/30/5f64fe3981677fe63fa987b80e6c01042eb5ff653ff7cec1b7bd9268e54e/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312", size = 161703, upload-time = "2025-08-09T07:57:20.012Z" }, - { url = "https://files.pythonhosted.org/packages/e1/ef/dd08b2cac9284fd59e70f7d97382c33a3d0a926e45b15fc21b3308324ffd/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc", size = 159096, upload-time = "2025-08-09T07:57:21.329Z" }, - { url = "https://files.pythonhosted.org/packages/45/8c/dcef87cfc2b3f002a6478f38906f9040302c68aebe21468090e39cde1445/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34", size = 153852, upload-time = "2025-08-09T07:57:22.608Z" }, - { url = "https://files.pythonhosted.org/packages/63/86/9cbd533bd37883d467fcd1bd491b3547a3532d0fbb46de2b99feeebf185e/charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432", size = 99840, upload-time = "2025-08-09T07:57:23.883Z" }, - { url = "https://files.pythonhosted.org/packages/ce/d6/7e805c8e5c46ff9729c49950acc4ee0aeb55efb8b3a56687658ad10c3216/charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca", size = 107438, upload-time = "2025-08-09T07:57:25.287Z" }, - { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, -] - -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, -] - -[[package]] -name = "click" -version = "8.2.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, -] - [[package]] name = "colorama" version = "0.4.6" @@ -478,52 +225,6 @@ toml = [ { name = "tomli", marker = "python_full_version <= '3.11'" }, ] -[[package]] -name = "cryptography" -version = "45.0.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949, upload-time = "2025-08-05T23:59:27.93Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483, upload-time = "2025-08-05T23:58:27.132Z" }, - { url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679, upload-time = "2025-08-05T23:58:29.152Z" }, - { url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553, upload-time = "2025-08-05T23:58:30.596Z" }, - { url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499, upload-time = "2025-08-05T23:58:32.03Z" }, - { url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484, upload-time = "2025-08-05T23:58:33.526Z" }, - { url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281, upload-time = "2025-08-05T23:58:35.445Z" }, - { url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890, upload-time = "2025-08-05T23:58:36.923Z" }, - { url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247, upload-time = "2025-08-05T23:58:38.781Z" }, - { url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045, upload-time = "2025-08-05T23:58:40.415Z" }, - { url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169, upload-time = "2025-08-05T23:58:47.121Z" }, - { url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273, upload-time = "2025-08-05T23:58:48.557Z" }, - { url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211, upload-time = "2025-08-05T23:58:50.139Z" }, - { url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732, upload-time = "2025-08-05T23:58:52.253Z" }, - { url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655, upload-time = "2025-08-05T23:58:53.848Z" }, - { url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956, upload-time = "2025-08-05T23:58:55.209Z" }, - { url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859, upload-time = "2025-08-05T23:58:56.639Z" }, - { url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254, upload-time = "2025-08-05T23:58:58.833Z" }, - { url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815, upload-time = "2025-08-05T23:59:00.283Z" }, - { url = "https://files.pythonhosted.org/packages/ec/24/55fc238fcaa122855442604b8badb2d442367dfbd5a7ca4bb0bd346e263a/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d", size = 4141694, upload-time = "2025-08-05T23:59:06.66Z" }, - { url = "https://files.pythonhosted.org/packages/f9/7e/3ea4fa6fbe51baf3903806a0241c666b04c73d2358a3ecce09ebee8b9622/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d", size = 4375010, upload-time = "2025-08-05T23:59:08.14Z" }, - { url = "https://files.pythonhosted.org/packages/50/42/ec5a892d82d2a2c29f80fc19ced4ba669bca29f032faf6989609cff1f8dc/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da", size = 4141377, upload-time = "2025-08-05T23:59:09.584Z" }, - { url = "https://files.pythonhosted.org/packages/e7/d7/246c4c973a22b9c2931999da953a2c19cae7c66b9154c2d62ffed811225e/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db", size = 4374609, upload-time = "2025-08-05T23:59:11.923Z" }, - { url = "https://files.pythonhosted.org/packages/e3/fe/deea71e9f310a31fe0a6bfee670955152128d309ea2d1c79e2a5ae0f0401/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427", size = 4153022, upload-time = "2025-08-05T23:59:16.954Z" }, - { url = "https://files.pythonhosted.org/packages/60/45/a77452f5e49cb580feedba6606d66ae7b82c128947aa754533b3d1bd44b0/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b", size = 4386802, upload-time = "2025-08-05T23:59:18.55Z" }, - { url = "https://files.pythonhosted.org/packages/a3/b9/a2f747d2acd5e3075fdf5c145c7c3568895daaa38b3b0c960ef830db6cdc/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c", size = 4152706, upload-time = "2025-08-05T23:59:20.044Z" }, - { url = "https://files.pythonhosted.org/packages/81/ec/381b3e8d0685a3f3f304a382aa3dfce36af2d76467da0fd4bb21ddccc7b2/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385", size = 4386740, upload-time = "2025-08-05T23:59:21.525Z" }, -] - -[[package]] -name = "darglint" -version = "1.8.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/2c/86e8549e349388c18ca8a4ff8661bb5347da550f598656d32a98eaaf91cc/darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da", size = 74435, upload-time = "2021-10-18T03:40:37.283Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/28/85d1e0396d64422c5218d68e5cdcc53153aa8a2c83c7dbc3ee1502adf3a1/darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d", size = 120767, upload-time = "2021-10-18T03:40:35.034Z" }, -] - [[package]] name = "distlib" version = "0.4.0" @@ -533,24 +234,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] -[[package]] -name = "docutils" -version = "0.22" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e9/86/5b41c32ecedcfdb4c77b28b6cb14234f252075f8cdb254531727a35547dd/docutils-0.22.tar.gz", hash = "sha256:ba9d57750e92331ebe7c08a1bbf7a7f8143b86c476acd51528b042216a6aad0f", size = 2277984, upload-time = "2025-07-29T15:20:31.06Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/57/8db39bc5f98f042e0153b1de9fb88e1a409a33cda4dd7f723c2ed71e01f6/docutils-0.22-py3-none-any.whl", hash = "sha256:4ed966a0e96a0477d852f7af31bdcb3adc049fbb35ccba358c2ea8a03287615e", size = 630709, upload-time = "2025-07-29T15:20:28.335Z" }, -] - -[[package]] -name = "eradicate" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7a/e1/665186aedea2d6ebf0415cf97c0629c8123a721e7afc417deeade5598215/eradicate-2.3.0.tar.gz", hash = "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37", size = 8536, upload-time = "2023-06-09T06:31:41.814Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/c2/533e1338429aeba1f089566a2314d69d3e78ab57a73006f16a923bf2b24c/eradicate-2.3.0-py3-none-any.whl", hash = "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e", size = 6113, upload-time = "2023-06-09T06:31:40.209Z" }, -] - [[package]] name = "exceptiongroup" version = "1.3.0" @@ -582,180 +265,111 @@ wheels = [ ] [[package]] -name = "flake8" -version = "7.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mccabe" }, - { name = "pycodestyle" }, - { name = "pyflakes" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9b/af/fbfe3c4b5a657d79e5c47a2827a362f9e1b763336a52f926126aa6dc7123/flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872", size = 48326, upload-time = "2025-06-20T19:31:35.838Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/56/13ab06b4f93ca7cac71078fbe37fcea175d3216f31f85c3168a6bbd0bb9a/flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e", size = 57922, upload-time = "2025-06-20T19:31:34.425Z" }, -] - -[[package]] -name = "flake8-bandit" -version = "4.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "bandit", marker = "python_full_version < '3.10'" }, - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/77/1c/4f66a7a52a246d6c64312b5c40da3af3630cd60b27af81b137796af3c0bc/flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e", size = 5403, upload-time = "2022-08-29T13:48:41.225Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/5f/55bab0ac89f9ad9f4c6e38087faa80c252daec4ccb7776b4dac216ca9e3f/flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d", size = 4828, upload-time = "2022-08-29T13:48:39.737Z" }, -] - -[[package]] -name = "flake8-broken-line" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/30/5e/eca08446205afb79e74b6af8e227f06f0b1a26ae892708adbc4e65ccaa86/flake8_broken_line-1.0.0.tar.gz", hash = "sha256:e2c6a17f8d9a129e99c1320fce89b33843e2963871025c4c2bb7b8b8d8732a85", size = 3458, upload-time = "2023-05-31T10:09:11.716Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/ff/57d0101933527b5202cc9f80bc15aa85b207916c722a00e7adde0e33f413/flake8_broken_line-1.0.0-py3-none-any.whl", hash = "sha256:96c964336024a5030dc536a9f6fb02aa679e2d2a6b35b80a558b5136c35832a9", size = 4202, upload-time = "2023-05-31T10:09:10.027Z" }, -] - -[[package]] -name = "flake8-bugbear" -version = "24.12.12" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs", marker = "python_full_version < '3.10'" }, - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c7/25/48ba712ff589b0149f21135234f9bb45c14d6689acc6151b5e2ff8ac2ae9/flake8_bugbear-24.12.12.tar.gz", hash = "sha256:46273cef0a6b6ff48ca2d69e472f41420a42a46e24b2a8972e4f0d6733d12a64", size = 82907, upload-time = "2024-12-12T16:49:26.307Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/21/0a875f75fbe4008bd171e2fefa413536258fe6b4cfaaa087986de74588f4/flake8_bugbear-24.12.12-py3-none-any.whl", hash = "sha256:1b6967436f65ca22a42e5373aaa6f2d87966ade9aa38d4baf2a1be550767545e", size = 36664, upload-time = "2024-12-12T16:49:23.584Z" }, -] - -[[package]] -name = "flake8-commas" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0e/83/814bc8eb02b8883bc004384a1fb8b1f45b4a0b892e579fec7c80a9368526/flake8-commas-2.1.0.tar.gz", hash = "sha256:940441ab8ee544df564ae3b3f49f20462d75d5c7cac2463e0b27436e2050f263", size = 8484, upload-time = "2021-10-13T19:25:41.6Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/0d/41895badcdbbe84893b95c114d5bd4345d69c9d5645a42857f1ccb84d556/flake8_commas-2.1.0-py2.py3-none-any.whl", hash = "sha256:ebb96c31e01d0ef1d0685a21f3f0e2f8153a0381430e748bf0bbbb5d5b453d54", size = 7591, upload-time = "2021-10-13T19:25:39.472Z" }, -] - -[[package]] -name = "flake8-comprehensions" -version = "3.16.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6d/7d/7ffaa876ca5b330fc244287208dce1d12515b88a69488ea90ab58c94501d/flake8_comprehensions-3.16.0.tar.gz", hash = "sha256:9cbf789905a8f03f9d350fb82b17b264d9a16c7ce3542b2a7b871ef568cafabe", size = 12991, upload-time = "2024-10-27T21:51:18.029Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/bf/0cf8d3c9a233620840f209490c4907d7d416d066557396ebda678c58de09/flake8_comprehensions-3.16.0-py3-none-any.whl", hash = "sha256:7c1eadc9d22e765f39857798febe7766b4d9c519793c6c149e3e13bf99693f70", size = 8169, upload-time = "2024-10-27T21:51:16.464Z" }, -] - -[[package]] -name = "flake8-debugger" -version = "4.1.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "pycodestyle", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1f/1e/f9bdb98f3df5dceaa2287a8fb5801a22681dbd677a8759704083357e27c4/flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840", size = 7801, upload-time = "2022-04-30T16:50:55.71Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/6b/8e5d248949798644b3d8e5f598ed5d1da82d8f157d4bafd78f45247f1690/flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf", size = 7909, upload-time = "2022-04-30T16:50:57.294Z" }, -] - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "pydocstyle", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/93/24/f839e3a06e18f4643ccb81370909a497297909f15106e6af2fecdef46894/flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af", size = 5995, upload-time = "2023-01-25T14:27:13.903Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/7d/76a278fa43250441ed9300c344f889c7fb1817080c8fb8996b840bf421c2/flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75", size = 4994, upload-time = "2023-01-25T14:27:12.32Z" }, -] - -[[package]] -name = "flake8-eradicate" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs", marker = "python_full_version < '3.10'" }, - { name = "eradicate", marker = "python_full_version < '3.10'" }, - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9e/72/a3975dfa4287396e9fb8fc2b4ee94a80d0809babbf92abed5af9c8e29c95/flake8_eradicate-1.5.0.tar.gz", hash = "sha256:aee636cb9ecb5594a7cd92d67ad73eb69909e5cc7bd81710cf9d00970f3983a6", size = 4508, upload-time = "2023-05-31T09:57:15.484Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/a9/1319b9e5eeb7d948f6db0b0ed4209bae0ec12d30ab3ee43a0ac1d8ce455f/flake8_eradicate-1.5.0-py3-none-any.whl", hash = "sha256:18acc922ad7de623f5247c7d5595da068525ec5437dd53b22ec2259b96ce9d22", size = 5144, upload-time = "2023-05-31T09:57:13.589Z" }, -] - -[[package]] -name = "flake8-isort" -version = "6.1.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "isort", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7c/ea/2f2662d4fefa6ab335c7119cb28e5bc57c935a86a69a7f72df3ea5fe7b2c/flake8_isort-6.1.2.tar.gz", hash = "sha256:9d0452acdf0e1cd6f2d6848e3605e66b54d920e73471fb4744eef0f93df62d5d", size = 17756, upload-time = "2025-01-29T12:29:25.753Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/10/295e982874f2a94f309baf7c45f852a191c87d59bd846b1701332303783f/flake8_isort-6.1.2-py3-none-any.whl", hash = "sha256:549197dedf0273502fb74f04c080beed9e62a7eb70244610413d27052e78bd3b", size = 18385, upload-time = "2025-01-29T12:29:23.46Z" }, -] - -[[package]] -name = "flake8-quotes" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "setuptools", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/dd/57/a173e3eb86072b7ee77650aca496b15d6886367d257f58ea9de5276e330a/flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c", size = 14107, upload-time = "2024-02-10T21:58:22.357Z" } - -[[package]] -name = "flake8-rst-docstrings" -version = "0.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "restructuredtext-lint", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/18/d6/a3e5f86f984d6d8caa1705deffdae84c710e594ab5c1985e26c5e1bb05db/flake8_rst_docstrings-0.3.1.tar.gz", hash = "sha256:26dcc1338caf985990677696a8a6a274f73a0c6845b85f567befd3b648db78e2", size = 12867, upload-time = "2025-04-29T11:34:56.437Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/a7/ef9a2b35987d4d4b2b6213891915d0d7242ccc034861dec63540b81f3a13/flake8_rst_docstrings-0.3.1-py3-none-any.whl", hash = "sha256:ed831afca7ee47851e2162d5fa726b823b446fd46085c2164d7979ae5d9a96d7", size = 11049, upload-time = "2025-04-29T11:34:54.861Z" }, -] - -[[package]] -name = "flake8-string-format" -version = "0.3.0" +name = "greenlet" +version = "3.2.5" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/db/500e114a9ee115b03a21a2581c227fd932a0f50c4ae8fee514ef9a373cf4/flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", size = 6495, upload-time = "2020-02-16T15:27:51.045Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/22/e5f4ccc41dda8db61cf3bb7a93549f9ae8e1dd10547b3d71cc8483a0b437/flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af", size = 7266, upload-time = "2020-02-16T15:27:49.327Z" }, +resolution-markers = [ + "python_full_version < '3.10'", ] - -[[package]] -name = "id" -version = "1.5.0" +sdist = { url = "https://files.pythonhosted.org/packages/b0/f5/3e9eafb4030588337b2a2ae4df46212956854e9069c07b53aa3caabafd47/greenlet-3.2.5.tar.gz", hash = "sha256:c816554eb33e7ecf9ba4defcb1fd8c994e59be6b4110da15480b3e7447ea4286", size = 191501, upload-time = "2026-02-20T20:08:51.539Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/d6/b3db928fc329b1b19ba32ffe143d2305f3aaafc583f5e1074c74ec445189/greenlet-3.2.5-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:34cc7cf8ab6f4b85298b01e13e881265ee7b3c1daf6bc10a2944abc15d4f87c3", size = 275803, upload-time = "2026-02-20T20:06:42.541Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ff/ab0ad4ff3d9e1faa266de4f6c79763b33fccd9265995f2940192494cc0ec/greenlet-3.2.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c11fe0cfb0ce33132f0b5d27eeadd1954976a82e5e9b60909ec2c4b884a55382", size = 633556, upload-time = "2026-02-20T20:30:41.594Z" }, + { url = "https://files.pythonhosted.org/packages/da/dd/7b3ac77099a1671af8077ecedb12c9a1be1310e4c35bb69fd34c18ab6093/greenlet-3.2.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a145f4b1c4ed7a2c94561b7f18b4beec3d3fb6f0580db22f7ed1d544e0620b34", size = 644943, upload-time = "2026-02-20T20:37:23.084Z" }, + { url = "https://files.pythonhosted.org/packages/0f/36/84630e9ff1dfc8b7690957c0f77834a84eabdbd9c4977c3a2d0cbd5325c2/greenlet-3.2.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc1d01bdd67db3e5711e6246e451d7a0f75fae7bbf40adde129296a7f9aa7cc9", size = 639841, upload-time = "2026-02-20T20:07:17.473Z" }, + { url = "https://files.pythonhosted.org/packages/12/c4/6a2ee6c676dea7a05a3c3c1291fbc8ea44f26456b0accc891471293825af/greenlet-3.2.5-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd593db7ee1fa8a513a48a404f8cc4126998a48025e3f5cbbc68d51be0a6bf66", size = 588813, upload-time = "2026-02-20T20:07:56.171Z" }, + { url = "https://files.pythonhosted.org/packages/01/c0/75e75c2c993aa850292561ec80f5c263e3924e5843aa95a38716df69304c/greenlet-3.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ac8db07bced2c39b987bba13a3195f8157b0cfbce54488f86919321444a1cc3c", size = 1117377, upload-time = "2026-02-20T20:32:48.452Z" }, + { url = "https://files.pythonhosted.org/packages/ee/03/e38ebf9024a0873fe8f60f5b7bc36bfb3be5e13efe4d798240f2d1f0fb73/greenlet-3.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4544ab2cfd5912e42458b13516429e029f87d8bbcdc8d5506db772941ae12493", size = 1141246, upload-time = "2026-02-20T20:06:23.576Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7b/c6e1192c795c0c12871e199237909a6bd35757d92c8472c7c019959b8637/greenlet-3.2.5-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:acabf468466d18017e2ae5fbf1a5a88b86b48983e550e1ae1437b69a83d9f4ac", size = 276916, upload-time = "2026-02-20T20:06:18.166Z" }, + { url = "https://files.pythonhosted.org/packages/3e/b6/9887b559f3e1952d23052ec352e9977e808a2246c7cb8282a38337221e88/greenlet-3.2.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:472841de62d60f2cafd60edd4fd4dd7253eb70e6eaf14b8990dcaf177f4af957", size = 636107, upload-time = "2026-02-20T20:30:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/8a/be/e3e48b63bbc27d660fa1d98aecb64906b90a12e686a436169c1330ef34b2/greenlet-3.2.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7d951e7d628a6e8b68af469f0fe4f100ef64c4054abeb9cdafbfaa30a920c950", size = 648240, upload-time = "2026-02-20T20:37:24.608Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ac/e731ed62576e91e533b36d0d97325adc2786674ab9e48ed8a6a24f4ef4e9/greenlet-3.2.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8317d732e2ae0935d9ed2af2ea876fa714cf6f3b887a31ca150b54329b0a6e9", size = 643313, upload-time = "2026-02-20T20:07:19.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/64/99e5cdceb494bd4c1341c45b93f322601d2c8a5e1e4d1c7a2d24c5ed0570/greenlet-3.2.5-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce8aed6fdd5e07d3cbb988cbdc188266a4eb9e1a52db9ef5c6526e59962d3933", size = 591295, upload-time = "2026-02-20T20:07:57.286Z" }, + { url = "https://files.pythonhosted.org/packages/ee/e9/968e11f388c2b8792d3b8b40a57984c894a3b4745dae3662dce722653bc5/greenlet-3.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:60c06b502d56d5451f60ca665691da29f79ed95e247bcf8ce5024d7bbe64acb9", size = 1120277, upload-time = "2026-02-20T20:32:50.103Z" }, + { url = "https://files.pythonhosted.org/packages/cb/2c/b5f2c4c68d753dce08218dc5a6b21d82238fdfdc44309032f6fe24d285e6/greenlet-3.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d2a78e6f1bf3f1672df91e212a2f8314e1e7c922f065d14cbad4bc815059467", size = 1145746, upload-time = "2026-02-20T20:06:26.296Z" }, + { url = "https://files.pythonhosted.org/packages/ad/32/022b21523eee713e7550162d5ca6aed23f913cc2c6232b154b9fd9badc07/greenlet-3.2.5-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2acb30e77042f747ca81f0a10cc153296567e92e666c5e1b117f4595afd43352", size = 278412, upload-time = "2026-02-20T20:03:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/90/c5/8a3b0ed3cc34d8b988a44349437dfa0941f9c23ac108175f7b4ccea97111/greenlet-3.2.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:393c03c26c865f17f31d8db2f09603fadbe0581ad85a5d5908b131549fc38217", size = 644616, upload-time = "2026-02-20T20:30:44.823Z" }, + { url = "https://files.pythonhosted.org/packages/b1/2c/2627bea183554695016af6cae93d7474fa90f61e5a6601a84ae7841cb720/greenlet-3.2.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:04e6a202cde56043fd355fefd1552c4caa5c087528121871d950eb4f1b51fa99", size = 658813, upload-time = "2026-02-20T20:37:26.255Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1b/75a5aeff487a26ba427a3837da6372f1fe6f2a9c6b2898e28ac99d491c11/greenlet-3.2.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:45fcea7b697b91290b36eafc12fff479aca6ba6500d98ef6f34d5634c7119cbe", size = 655426, upload-time = "2026-02-20T20:07:20.124Z" }, + { url = "https://files.pythonhosted.org/packages/53/91/9b5dfb4f3c88f8247c7a8f4c3759f0740bfa6bb0c59a9f6bf938e913df56/greenlet-3.2.5-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f96e2bb8a56b7e1aed1dbfbbe0050cb2ecca99c7c91892fd1771e3afab63b3e3", size = 611138, upload-time = "2026-02-20T20:07:58.966Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8d/d0b086410512d9859c84e9242a9b341de9f5566011ddf3a3f6886b842b61/greenlet-3.2.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d7456e67b0be653dfe643bb37d9566cd30939c80f858e2ce6d2d54951f75b14a", size = 1126896, upload-time = "2026-02-20T20:32:52.198Z" }, + { url = "https://files.pythonhosted.org/packages/ef/37/59fe12fe456e84ced6ba71781e28cde52a3124d1dd2077bc1727021f49fd/greenlet-3.2.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5ceb29d1f74c7280befbbfa27b9bf91ba4a07a1a00b2179a5d953fc219b16c42", size = 1154779, upload-time = "2026-02-20T20:06:27.583Z" }, + { url = "https://files.pythonhosted.org/packages/dd/95/d5d332fb73affaf7a1fbe80e49c2c7eae4f17c645af24a3b3fa25736d6f0/greenlet-3.2.5-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:f2cc88b50b9006b324c1b9f5f3552f9d4564c78af57cdfb4c7baf4f0aa089146", size = 277166, upload-time = "2026-02-20T20:03:57.077Z" }, + { url = "https://files.pythonhosted.org/packages/6c/77/89458e20db5a4f1c64f9a0191561227e76d809941ca2d7529006d17d3450/greenlet-3.2.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e66872daffa360b2537170b73ad530f14fa31785b1bc78080125d92edf0a6def", size = 644674, upload-time = "2026-02-20T20:30:46.118Z" }, + { url = "https://files.pythonhosted.org/packages/90/f8/9962175d2f2eaa629a7fd7545abacc8c4deda3baa4e52c1526d2eb5f5546/greenlet-3.2.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c5445ddb7b586d870dad32ca9fc47c287d6022a528d194efdb8912093c5303ad", size = 658834, upload-time = "2026-02-20T20:37:27.466Z" }, + { url = "https://files.pythonhosted.org/packages/f5/d7/826d0e080f0a7ad5ec47c8d143bbd3ca0887657bb806595fe2434d12938a/greenlet-3.2.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:752c896a8c976548faafe8a306d446c6a4c68d4fd24699b84d4393bd9ac69a8e", size = 655760, upload-time = "2026-02-20T20:07:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/41/cc/33bd4c2f816be8c8e16f71740c4130adf3a66a3dd2ba29de72b9d8dd1096/greenlet-3.2.5-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499b809e7738c8af0ff9ac9d5dd821cb93f4293065a9237543217f0b252f950a", size = 614132, upload-time = "2026-02-20T20:08:00.351Z" }, + { url = "https://files.pythonhosted.org/packages/48/79/f3891dcfc59097474a53cc3c624f2f2465e431ab493bda043b8c873fb20a/greenlet-3.2.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2c7429f6e9cea7cbf2637d86d3db12806ba970f7f972fcab39d6b54b4457cbaf", size = 1125286, upload-time = "2026-02-20T20:32:54.032Z" }, + { url = "https://files.pythonhosted.org/packages/ca/47/212b47e6d2d7a04c4083db1af2fdd291bc8fe99b7e3571bfa560b65fc361/greenlet-3.2.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a5e4b25e855800fba17713020c5c33e0a4b7a1829027719344f0c7c8870092a2", size = 1152825, upload-time = "2026-02-20T20:06:29Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9d/4e9b941be05f8da7ba804c6413761d2c11cca05994cbf0a015bd729419f0/greenlet-3.2.5-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:7123b29e6bad2f3f89681be4ef316480fca798ebe8d22fbaced9cc3775007a4f", size = 277627, upload-time = "2026-02-20T20:06:04.798Z" }, + { url = "https://files.pythonhosted.org/packages/23/cb/a73625c9a35138330014ecf3740c0d62e0c2b5e7279bb7f2586b1b199fac/greenlet-3.2.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6e8fe0c72603201a86b2e038daf9b6c8570715f8779566419cff543b6ace88de", size = 690001, upload-time = "2026-02-20T20:30:47.754Z" }, + { url = "https://files.pythonhosted.org/packages/83/49/6d1531109507bce7dfb23acf57a87013627ed3ac058851176e443a6a9134/greenlet-3.2.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:050703a60603db0e817364d69e048c70af299040c13a7e67792b9e62d4571196", size = 702953, upload-time = "2026-02-20T20:37:29.125Z" }, + { url = "https://files.pythonhosted.org/packages/f7/38/f958ee90fab93529b30cc1e4a59b27c1112b640570043a84af84da3b3b98/greenlet-3.2.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6712bfd520530eb67331813f7112d3ee18e206f48b3d026d8a96cd2d2ad20251", size = 698995, upload-time = "2026-02-20T20:07:22.663Z" }, + { url = "https://files.pythonhosted.org/packages/51/c1/a603906e79716d61f08afedaf8aed62017661457aef233d62d6e57ecd511/greenlet-3.2.5-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bc06a78fa3ffbe2a75f1ebc7e040eacf6fa1050a9432953ab111fbbbf0d03c1", size = 661175, upload-time = "2026-02-20T20:08:01.477Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8f/f880ff4587d236b4d06893fb34da6b299aa0d00f6c8259673f80e1b6d63c/greenlet-3.2.5-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:dbe0e81e24982bb45907ca20152b31c2e3300ca352fdc4acbd4956e4a2cbc195", size = 274946, upload-time = "2026-02-20T20:05:21.979Z" }, + { url = "https://files.pythonhosted.org/packages/3c/50/f6c78b8420187fdfe97fcf2e6d1dd243a7742d272c32fd4d4b1095474b37/greenlet-3.2.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:15871afc0d78ec87d15d8412b337f287fc69f8f669346e391585824970931c48", size = 631781, upload-time = "2026-02-20T20:30:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/26/d6/3277f92e1961e6e9f41d9f173ea74b5c1f7065072637669f761626f26cc0/greenlet-3.2.5-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5bf0d7d62e356ef2e87e55e46a4e930ac165f9372760fb983b5631bb479e9d3a", size = 643740, upload-time = "2026-02-20T20:37:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6a/4f79d2e7b5ef3723fc5ffea0d6cb22627e5f95e0f19c973fa12bf1cf7891/greenlet-3.2.5-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6dff6433742073e5b6ad40953a78a0e8cddcb3f6869e5ea635d29a810ca5e7d0", size = 638382, upload-time = "2026-02-20T20:07:23.883Z" }, + { url = "https://files.pythonhosted.org/packages/4d/59/7aadf33f23c65dbf4db27e7f5b60c414797a61e954352ae4a86c5c8b0553/greenlet-3.2.5-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bdd67619cefe1cc9fcab57c8853d2bb36eca9f166c0058cc0d428d471f7c785c", size = 587516, upload-time = "2026-02-20T20:08:02.841Z" }, + { url = "https://files.pythonhosted.org/packages/1d/46/b3422959f830de28a4eea447414e6bd7b980d755892f66ab52ad805da1c4/greenlet-3.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3828b309dfb1f117fe54867512a8265d8d4f00f8de6908eef9b885f4d8789062", size = 1115818, upload-time = "2026-02-20T20:32:55.786Z" }, + { url = "https://files.pythonhosted.org/packages/54/4a/3d1c9728f093415637cf3696909fa10852632e33e68238fb8ca60eb90de1/greenlet-3.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:67725ae9fea62c95cf1aa230f1b8d4dc38f7cd14f6103d1df8a5a95657eb8e54", size = 1140219, upload-time = "2026-02-20T20:06:30.334Z" }, +] + +[[package]] +name = "greenlet" +version = "3.3.2" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "requests" }, +resolution-markers = [ + "python_full_version >= '3.10'", ] -sdist = { url = "https://files.pythonhosted.org/packages/22/11/102da08f88412d875fa2f1a9a469ff7ad4c874b0ca6fed0048fe385bdb3d/id-1.5.0.tar.gz", hash = "sha256:292cb8a49eacbbdbce97244f47a97b4c62540169c976552e497fd57df0734c1d", size = 15237, upload-time = "2024-12-04T19:53:05.575Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/cb/18326d2d89ad3b0dd143da971e77afd1e6ca6674f1b1c3df4b6bec6279fc/id-1.5.0-py3-none-any.whl", hash = "sha256:f1434e1cef91f2cbb8a4ec64663d5a23b9ed43ef44c4c957d02583d61714c658", size = 13611, upload-time = "2024-12-04T19:53:03.02Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/a3/51/1664f6b78fc6ebbd98019a1fd730e83fa78f2db7058f72b1463d3612b8db/greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2", size = 188267, upload-time = "2026-02-20T20:54:15.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d", size = 277747, upload-time = "2026-02-20T20:16:21.325Z" }, + { url = "https://files.pythonhosted.org/packages/fb/07/cb284a8b5c6498dbd7cba35d31380bb123d7dceaa7907f606c8ff5993cbf/greenlet-3.3.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b568183cf65b94919be4438dc28416b234b678c608cafac8874dfeeb2a9bbe13", size = 579202, upload-time = "2026-02-20T20:47:28.955Z" }, + { url = "https://files.pythonhosted.org/packages/ed/45/67922992b3a152f726163b19f890a85129a992f39607a2a53155de3448b8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:527fec58dc9f90efd594b9b700662ed3fb2493c2122067ac9c740d98080a620e", size = 590620, upload-time = "2026-02-20T20:55:55.581Z" }, + { url = "https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f", size = 591729, upload-time = "2026-02-20T20:20:58.395Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/21f5455773d37f94b866eb3cf5caed88d6cea6dd2c6e1f9c34f463cba3ec/greenlet-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:97245cc10e5515dbc8c3104b2928f7f02b6813002770cfaffaf9a6e0fc2b94ef", size = 1551946, upload-time = "2026-02-20T20:49:31.102Z" }, + { url = "https://files.pythonhosted.org/packages/00/68/91f061a926abead128fe1a87f0b453ccf07368666bd59ffa46016627a930/greenlet-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c1fdd7d1b309ff0da81d60a9688a8bd044ac4e18b250320a96fc68d31c209ca", size = 1618494, upload-time = "2026-02-20T20:21:06.541Z" }, + { url = "https://files.pythonhosted.org/packages/ac/78/f93e840cbaef8becaf6adafbaf1319682a6c2d8c1c20224267a5c6c8c891/greenlet-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:5d0e35379f93a6d0222de929a25ab47b5eb35b5ef4721c2b9cbcc4036129ff1f", size = 230092, upload-time = "2026-02-20T20:17:09.379Z" }, + { url = "https://files.pythonhosted.org/packages/f3/47/16400cb42d18d7a6bb46f0626852c1718612e35dcb0dffa16bbaffdf5dd2/greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86", size = 278890, upload-time = "2026-02-20T20:19:39.263Z" }, + { url = "https://files.pythonhosted.org/packages/a3/90/42762b77a5b6aa96cd8c0e80612663d39211e8ae8a6cd47c7f1249a66262/greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f", size = 581120, upload-time = "2026-02-20T20:47:30.161Z" }, + { url = "https://files.pythonhosted.org/packages/bf/6f/f3d64f4fa0a9c7b5c5b3c810ff1df614540d5aa7d519261b53fba55d4df9/greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55", size = 594363, upload-time = "2026-02-20T20:55:56.965Z" }, + { url = "https://files.pythonhosted.org/packages/72/83/3e06a52aca8128bdd4dcd67e932b809e76a96ab8c232a8b025b2850264c5/greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358", size = 594156, upload-time = "2026-02-20T20:20:59.955Z" }, + { url = "https://files.pythonhosted.org/packages/70/79/0de5e62b873e08fe3cef7dbe84e5c4bc0e8ed0c7ff131bccb8405cd107c8/greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99", size = 1554649, upload-time = "2026-02-20T20:49:32.293Z" }, + { url = "https://files.pythonhosted.org/packages/5a/00/32d30dee8389dc36d42170a9c66217757289e2afb0de59a3565260f38373/greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be", size = 1619472, upload-time = "2026-02-20T20:21:07.966Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3a/efb2cf697fbccdf75b24e2c18025e7dfa54c4f31fab75c51d0fe79942cef/greenlet-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e692b2dae4cc7077cbb11b47d258533b48c8fde69a33d0d8a82e2fe8d8531d5", size = 230389, upload-time = "2026-02-20T20:17:18.772Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a1/65bbc059a43a7e2143ec4fc1f9e3f673e04f9c7b371a494a101422ac4fd5/greenlet-3.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:02b0a8682aecd4d3c6c18edf52bc8e51eacdd75c8eac52a790a210b06aa295fd", size = 229645, upload-time = "2026-02-20T20:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, + { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, + { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, + { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, + { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, + { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/cc802e067d02af8b60b6771cea7d57e21ef5e6659912814babb42b864713/greenlet-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:34308836d8370bddadb41f5a7ce96879b72e2fdfb4e87729330c6ab52376409f", size = 231081, upload-time = "2026-02-20T20:17:28.121Z" }, + { url = "https://files.pythonhosted.org/packages/58/2e/fe7f36ff1982d6b10a60d5e0740c759259a7d6d2e1dc41da6d96de32fff6/greenlet-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:d3a62fa76a32b462a97198e4c9e99afb9ab375115e74e9a83ce180e7a496f643", size = 230331, upload-time = "2026-02-20T20:17:23.34Z" }, + { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, + { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, + { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, + { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, + { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, + { url = "https://files.pythonhosted.org/packages/91/39/5ef5aa23bc545aa0d31e1b9b55822b32c8da93ba657295840b6b34124009/greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124", size = 230961, upload-time = "2026-02-20T20:16:58.461Z" }, + { url = "https://files.pythonhosted.org/packages/62/6b/a89f8456dcb06becff288f563618e9f20deed8dd29beea14f9a168aef64b/greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327", size = 230221, upload-time = "2026-02-20T20:17:37.152Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, + { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, + { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, + { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ca/2101ca3d9223a1dc125140dbc063644dca76df6ff356531eb27bc267b446/greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492", size = 232034, upload-time = "2026-02-20T20:20:08.186Z" }, + { url = "https://files.pythonhosted.org/packages/f6/4a/ecf894e962a59dea60f04877eea0fd5724618da89f1867b28ee8b91e811f/greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71", size = 231437, upload-time = "2026-02-20T20:18:59.722Z" }, + { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, + { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, + { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, + { url = "https://files.pythonhosted.org/packages/29/4b/45d90626aef8e65336bed690106d1382f7a43665e2249017e9527df8823b/greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a", size = 237086, upload-time = "2026-02-20T20:20:45.786Z" }, ] [[package]] @@ -797,15 +411,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] -[[package]] -name = "isort" -version = "6.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955, upload-time = "2025-02-26T21:13:16.955Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186, upload-time = "2025-02-26T21:13:14.911Z" }, -] - [[package]] name = "izulu" version = "0.50.0" @@ -815,168 +420,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4a/9f/bf9d33546bbb6e5e80ebafe46f90b7d8b4a77410b7b05160b0ca8978c15a/izulu-0.50.0-py3-none-any.whl", hash = "sha256:4e9ae2508844e7c5f62c468a8b9e2deba2f60325ef63f01e65b39fd9a6b3fab4", size = 18095, upload-time = "2025-03-24T15:52:19.667Z" }, ] -[[package]] -name = "jaraco-classes" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "more-itertools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" }, -] - -[[package]] -name = "jaraco-context" -version = "6.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/ad/f3777b81bf0b6e7bc7514a1656d3e637b2e8e15fab2ce3235730b3e7a4e6/jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", size = 13912, upload-time = "2024-08-20T03:39:27.358Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/db/0c52c4cf5e4bd9f5d7135ec7669a3a767af21b3a308e1ed3674881e52b62/jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4", size = 6825, upload-time = "2024-08-20T03:39:25.966Z" }, -] - -[[package]] -name = "jaraco-functools" -version = "4.2.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "more-itertools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/49/1c/831faaaa0f090b711c355c6d8b2abf277c72133aab472b6932b03322294c/jaraco_functools-4.2.1.tar.gz", hash = "sha256:be634abfccabce56fa3053f8c7ebe37b682683a4ee7793670ced17bab0087353", size = 19661, upload-time = "2025-06-21T19:22:03.201Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/fd/179a20f832824514df39a90bb0e5372b314fea99f217f5ab942b10a8a4e8/jaraco_functools-4.2.1-py3-none-any.whl", hash = "sha256:590486285803805f4b1f99c60ca9e94ed348d4added84b74c7a12885561e524e", size = 10349, upload-time = "2025-06-21T19:22:02.039Z" }, -] - -[[package]] -name = "jeepney" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, -] - -[[package]] -name = "keyring" -version = "25.6.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, - { name = "jaraco-classes" }, - { name = "jaraco-context" }, - { name = "jaraco-functools" }, - { name = "jeepney", marker = "sys_platform == 'linux'" }, - { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, - { name = "secretstorage", marker = "sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/70/09/d904a6e96f76ff214be59e7aa6ef7190008f52a0ab6689760a98de0bf37d/keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66", size = 62750, upload-time = "2024-12-25T15:26:45.782Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" }, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "mdurl", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, -] - -[[package]] -name = "markdown-it-py" -version = "4.0.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", -] -dependencies = [ - { name = "mdurl", marker = "python_full_version >= '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, -] - -[[package]] -name = "mccabe" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, -] - -[[package]] -name = "more-itertools" -version = "10.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ce/a0/834b0cebabbfc7e311f30b46c8188790a37f89fc8d756660346fe5abfd09/more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3", size = 127671, upload-time = "2025-04-22T14:17:41.838Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, -] - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, -] - -[[package]] -name = "nh3" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/96cff0977357f60f06ec4368c4c7a7a26cccfe7c9fcd54f5378bf0428fd3/nh3-0.3.0.tar.gz", hash = "sha256:d8ba24cb31525492ea71b6aac11a4adac91d828aadeff7c4586541bf5dc34d2f", size = 19655, upload-time = "2025-07-17T14:43:37.05Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/11/340b7a551916a4b2b68c54799d710f86cf3838a4abaad8e74d35360343bb/nh3-0.3.0-cp313-cp313t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:a537ece1bf513e5a88d8cff8a872e12fe8d0f42ef71dd15a5e7520fecd191bbb", size = 1427992, upload-time = "2025-07-17T14:43:06.848Z" }, - { url = "https://files.pythonhosted.org/packages/ad/7f/7c6b8358cf1222921747844ab0eef81129e9970b952fcb814df417159fb9/nh3-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c915060a2c8131bef6a29f78debc29ba40859b6dbe2362ef9e5fd44f11487c2", size = 798194, upload-time = "2025-07-17T14:43:08.263Z" }, - { url = "https://files.pythonhosted.org/packages/63/da/c5fd472b700ba37d2df630a9e0d8cc156033551ceb8b4c49cc8a5f606b68/nh3-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba0caa8aa184196daa6e574d997a33867d6d10234018012d35f86d46024a2a95", size = 837884, upload-time = "2025-07-17T14:43:09.233Z" }, - { url = "https://files.pythonhosted.org/packages/4c/3c/cba7b26ccc0ef150c81646478aa32f9c9535234f54845603c838a1dc955c/nh3-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:80fe20171c6da69c7978ecba33b638e951b85fb92059259edd285ff108b82a6d", size = 996365, upload-time = "2025-07-17T14:43:10.243Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ba/59e204d90727c25b253856e456ea61265ca810cda8ee802c35f3fadaab00/nh3-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e90883f9f85288f423c77b3f5a6f4486375636f25f793165112679a7b6363b35", size = 1071042, upload-time = "2025-07-17T14:43:11.57Z" }, - { url = "https://files.pythonhosted.org/packages/10/71/2fb1834c10fab6d9291d62c95192ea2f4c7518bd32ad6c46aab5d095cb87/nh3-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0649464ac8eee018644aacbc103874ccbfac80e3035643c3acaab4287e36e7f5", size = 995737, upload-time = "2025-07-17T14:43:12.659Z" }, - { url = "https://files.pythonhosted.org/packages/33/c1/8f8ccc2492a000b6156dce68a43253fcff8b4ce70ab4216d08f90a2ac998/nh3-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1adeb1062a1c2974bc75b8d1ecb014c5fd4daf2df646bbe2831f7c23659793f9", size = 980552, upload-time = "2025-07-17T14:43:13.763Z" }, - { url = "https://files.pythonhosted.org/packages/2f/d6/f1c6e091cbe8700401c736c2bc3980c46dca770a2cf6a3b48a175114058e/nh3-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:7275fdffaab10cc5801bf026e3c089d8de40a997afc9e41b981f7ac48c5aa7d5", size = 593618, upload-time = "2025-07-17T14:43:15.098Z" }, - { url = "https://files.pythonhosted.org/packages/23/1e/80a8c517655dd40bb13363fc4d9e66b2f13245763faab1a20f1df67165a7/nh3-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:423201bbdf3164a9e09aa01e540adbb94c9962cc177d5b1cbb385f5e1e79216e", size = 598948, upload-time = "2025-07-17T14:43:16.064Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e0/af86d2a974c87a4ba7f19bc3b44a8eaa3da480de264138fec82fe17b340b/nh3-0.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:16f8670201f7e8e0e05ed1a590eb84bfa51b01a69dd5caf1d3ea57733de6a52f", size = 580479, upload-time = "2025-07-17T14:43:17.038Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e0/cf1543e798ba86d838952e8be4cb8d18e22999be2a24b112a671f1c04fd6/nh3-0.3.0-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:ec6cfdd2e0399cb79ba4dcffb2332b94d9696c52272ff9d48a630c5dca5e325a", size = 1442218, upload-time = "2025-07-17T14:43:18.087Z" }, - { url = "https://files.pythonhosted.org/packages/5c/86/a96b1453c107b815f9ab8fac5412407c33cc5c7580a4daf57aabeb41b774/nh3-0.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce5e7185599f89b0e391e2f29cc12dc2e206167380cea49b33beda4891be2fe1", size = 823791, upload-time = "2025-07-17T14:43:19.721Z" }, - { url = "https://files.pythonhosted.org/packages/97/33/11e7273b663839626f714cb68f6eb49899da5a0d9b6bc47b41fe870259c2/nh3-0.3.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:389d93d59b8214d51c400fb5b07866c2a4f79e4e14b071ad66c92184fec3a392", size = 811143, upload-time = "2025-07-17T14:43:20.779Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1b/b15bd1ce201a1a610aeb44afd478d55ac018b4475920a3118ffd806e2483/nh3-0.3.0-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e9e6a7e4d38f7e8dda9edd1433af5170c597336c1a74b4693c5cb75ab2b30f2a", size = 1064661, upload-time = "2025-07-17T14:43:21.839Z" }, - { url = "https://files.pythonhosted.org/packages/8f/14/079670fb2e848c4ba2476c5a7a2d1319826053f4f0368f61fca9bb4227ae/nh3-0.3.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7852f038a054e0096dac12b8141191e02e93e0b4608c4b993ec7d4ffafea4e49", size = 997061, upload-time = "2025-07-17T14:43:23.179Z" }, - { url = "https://files.pythonhosted.org/packages/a3/e5/ac7fc565f5d8bce7f979d1afd68e8cb415020d62fa6507133281c7d49f91/nh3-0.3.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af5aa8127f62bbf03d68f67a956627b1bd0469703a35b3dad28d0c1195e6c7fb", size = 924761, upload-time = "2025-07-17T14:43:24.23Z" }, - { url = "https://files.pythonhosted.org/packages/39/2c/6394301428b2017a9d5644af25f487fa557d06bc8a491769accec7524d9a/nh3-0.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f416c35efee3e6a6c9ab7716d9e57aa0a49981be915963a82697952cba1353e1", size = 803959, upload-time = "2025-07-17T14:43:26.377Z" }, - { url = "https://files.pythonhosted.org/packages/4e/9a/344b9f9c4bd1c2413a397f38ee6a3d5db30f1a507d4976e046226f12b297/nh3-0.3.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:37d3003d98dedca6cd762bf88f2e70b67f05100f6b949ffe540e189cc06887f9", size = 844073, upload-time = "2025-07-17T14:43:27.375Z" }, - { url = "https://files.pythonhosted.org/packages/66/3f/cd37f76c8ca277b02a84aa20d7bd60fbac85b4e2cbdae77cb759b22de58b/nh3-0.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:634e34e6162e0408e14fb61d5e69dbaea32f59e847cfcfa41b66100a6b796f62", size = 1000680, upload-time = "2025-07-17T14:43:28.452Z" }, - { url = "https://files.pythonhosted.org/packages/ee/db/7aa11b44bae4e7474feb1201d8dee04fabe5651c7cb51409ebda94a4ed67/nh3-0.3.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:b0612ccf5de8a480cf08f047b08f9d3fecc12e63d2ee91769cb19d7290614c23", size = 1076613, upload-time = "2025-07-17T14:43:30.031Z" }, - { url = "https://files.pythonhosted.org/packages/97/03/03f79f7e5178eb1ad5083af84faff471e866801beb980cc72943a4397368/nh3-0.3.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c7a32a7f0d89f7d30cb8f4a84bdbd56d1eb88b78a2434534f62c71dac538c450", size = 1001418, upload-time = "2025-07-17T14:43:31.429Z" }, - { url = "https://files.pythonhosted.org/packages/ce/55/1974bcc16884a397ee699cebd3914e1f59be64ab305533347ca2d983756f/nh3-0.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3f1b4f8a264a0c86ea01da0d0c390fe295ea0bcacc52c2103aca286f6884f518", size = 986499, upload-time = "2025-07-17T14:43:32.459Z" }, - { url = "https://files.pythonhosted.org/packages/c9/50/76936ec021fe1f3270c03278b8af5f2079038116b5d0bfe8538ffe699d69/nh3-0.3.0-cp38-abi3-win32.whl", hash = "sha256:6d68fa277b4a3cf04e5c4b84dd0c6149ff7d56c12b3e3fab304c525b850f613d", size = 599000, upload-time = "2025-07-17T14:43:33.852Z" }, - { url = "https://files.pythonhosted.org/packages/8c/ae/324b165d904dc1672eee5f5661c0a68d4bab5b59fbb07afb6d8d19a30b45/nh3-0.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:bae63772408fd63ad836ec569a7c8f444dd32863d0c67f6e0b25ebbd606afa95", size = 604530, upload-time = "2025-07-17T14:43:34.95Z" }, - { url = "https://files.pythonhosted.org/packages/5b/76/3165e84e5266d146d967a6cc784ff2fbf6ddd00985a55ec006b72bc39d5d/nh3-0.3.0-cp38-abi3-win_arm64.whl", hash = "sha256:d97d3efd61404af7e5721a0e74d81cdbfc6e5f97e11e731bb6d090e30a7b62b2", size = 585971, upload-time = "2025-07-17T14:43:35.936Z" }, -] - [[package]] name = "nodeenv" version = "1.9.1" @@ -995,39 +438,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] -[[package]] -name = "pathspec" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, -] - -[[package]] -name = "pbr" -version = "7.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "setuptools", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/80/88/baf6b45d064271f19fefac7def6a030a893f912f430de0024dd595ced61f/pbr-7.0.0.tar.gz", hash = "sha256:cf4127298723dafbce3afd13775ccf3885be5d3c8435751b867f9a6a10b71a39", size = 129146, upload-time = "2025-08-13T09:16:41.654Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/98/120c3e21bf3fc0ef397a3906465ee9f5c76996c52811e65455eadc12d68a/pbr-7.0.0-py2.py3-none-any.whl", hash = "sha256:b447e63a2bc04fd975fc0480b8d5ebf979179e2c0ae203bf1eff9ea20073bc38", size = 125109, upload-time = "2025-08-13T09:16:40.269Z" }, -] - -[[package]] -name = "pep8-naming" -version = "0.13.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5b/c0/0db8b2867395a9a137e86af8bdf5a566e41d9c6453e509cd3042419ae29e/pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971", size = 16129, upload-time = "2022-12-19T20:45:27.158Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/48/9533518e0394fb858ac2b4b55fe18f24aa33c87c943f691336ec842d9728/pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80", size = 8490, upload-time = "2022-12-19T20:45:25.132Z" }, -] - [[package]] name = "platformdirs" version = "4.3.8" @@ -1255,24 +665,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/47/fd/4feb52a55c1a4bd748f2acaed1903ab54a723c47f6d0242780f4d97104d4/psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7", size = 38252, upload-time = "2025-02-26T12:03:45.073Z" }, ] -[[package]] -name = "pycodestyle" -version = "2.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/e0/abfd2a0d2efe47670df87f3e3a0e2edda42f055053c85361f19c0e2c1ca8/pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783", size = 39472, upload-time = "2025-06-20T18:49:48.75Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/27/a58ddaf8c588a3ef080db9d0b7e0b97215cee3a45df74f3a94dbbf5c893a/pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d", size = 31594, upload-time = "2025-06-20T18:49:47.491Z" }, -] - -[[package]] -name = "pycparser" -version = "2.22" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, -] - [[package]] name = "pycron" version = "3.2.0" @@ -1406,27 +798,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d4/29/3cade8a924a61f60ccfa10842f75eb12787e1440e2b8660ceffeb26685e7/pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27", size = 2066661, upload-time = "2025-04-23T18:33:49.995Z" }, ] -[[package]] -name = "pydocstyle" -version = "6.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/d5385ca59fd065e3c6a5fe19f9bc9d5ea7f2509fa8c9c22fb6b2031dd953/pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1", size = 36796, upload-time = "2023-01-17T20:29:19.838Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/ea/99ddefac41971acad68f14114f38261c1f27dac0b3ec529824ebc739bdaa/pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019", size = 38038, upload-time = "2023-01-17T20:29:18.094Z" }, -] - -[[package]] -name = "pyflakes" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/dc/fd034dc20b4b264b3d015808458391acbf9df40b1e54750ef175d39180b1/pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58", size = 64669, upload-time = "2025-06-20T18:45:27.834Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/2f/81d580a0fb83baeb066698975cb14a618bdbed7720678566f1b046a95fe8/pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f", size = 63551, upload-time = "2025-06-20T18:45:26.937Z" }, -] - [[package]] name = "pygments" version = "2.19.2" @@ -1515,15 +886,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] -[[package]] -name = "pywin32-ctypes" -version = "0.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" }, -] - [[package]] name = "pyyaml" version = "6.0.2" @@ -1577,79 +939,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, ] -[[package]] -name = "readme-renderer" -version = "44.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "nh3" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5a/a9/104ec9234c8448c4379768221ea6df01260cd6c2ce13182d4eac531c8342/readme_renderer-44.0.tar.gz", hash = "sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1", size = 32056, upload-time = "2024-07-08T15:00:57.805Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/67/921ec3024056483db83953ae8e48079ad62b92db7880013ca77632921dd0/readme_renderer-44.0-py3-none-any.whl", hash = "sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151", size = 13310, upload-time = "2024-07-08T15:00:56.577Z" }, -] - -[[package]] -name = "requests" -version = "2.32.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, -] - -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, -] - -[[package]] -name = "restructuredtext-lint" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/48/9c/6d8035cafa2d2d314f34e6cd9313a299de095b26e96f1c7312878f988eec/restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45", size = 16723, upload-time = "2022-02-24T05:51:10.907Z" } - -[[package]] -name = "rfc3986" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/85/40/1520d68bfa07ab5a6f065a186815fb6610c86fe957bc065754e47f7b0840/rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c", size = 49026, upload-time = "2022-01-10T00:52:30.832Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/9a/9afaade874b2fa6c752c36f1548f718b5b83af81ed9b76628329dab81c1b/rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd", size = 31326, upload-time = "2022-01-10T00:52:29.594Z" }, -] - -[[package]] -name = "rich" -version = "14.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, -] - [[package]] name = "ruff" version = "0.12.9" @@ -1676,28 +965,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ac/fd/669816bc6b5b93b9586f3c1d87cd6bc05028470b3ecfebb5938252c47a35/ruff-0.12.9-py3-none-win_arm64.whl", hash = "sha256:63c8c819739d86b96d500cce885956a1a48ab056bbcbc61b747ad494b2485089", size = 11949623, upload-time = "2025-08-14T16:08:52.233Z" }, ] -[[package]] -name = "secretstorage" -version = "3.3.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cryptography" }, - { name = "jeepney" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/53/a4/f48c9d79cb507ed1373477dbceaba7401fd8a23af63b837fa61f1dcd3691/SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", size = 19739, upload-time = "2022-08-13T16:22:46.976Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221, upload-time = "2022-08-13T16:22:44.457Z" }, -] - -[[package]] -name = "setuptools" -version = "80.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, -] - [[package]] name = "sniffio" version = "1.3.1" @@ -1708,24 +975,71 @@ wheels = [ ] [[package]] -name = "snowballstemmer" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, -] - -[[package]] -name = "stevedore" -version = "5.4.1" +name = "sqlalchemy" +version = "2.0.48" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pbr", marker = "python_full_version < '3.10'" }, + { name = "greenlet", version = "3.2.5", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.10' and platform_machine == 'AMD64') or (python_full_version < '3.10' and platform_machine == 'WIN32') or (python_full_version < '3.10' and platform_machine == 'aarch64') or (python_full_version < '3.10' and platform_machine == 'amd64') or (python_full_version < '3.10' and platform_machine == 'ppc64le') or (python_full_version < '3.10' and platform_machine == 'win32') or (python_full_version < '3.10' and platform_machine == 'x86_64')" }, + { name = "greenlet", version = "3.3.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and platform_machine == 'AMD64') or (python_full_version >= '3.10' and platform_machine == 'WIN32') or (python_full_version >= '3.10' and platform_machine == 'aarch64') or (python_full_version >= '3.10' and platform_machine == 'amd64') or (python_full_version >= '3.10' and platform_machine == 'ppc64le') or (python_full_version >= '3.10' and platform_machine == 'win32') or (python_full_version >= '3.10' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/3f/13cacea96900bbd31bb05c6b74135f85d15564fc583802be56976c940470/stevedore-5.4.1.tar.gz", hash = "sha256:3135b5ae50fe12816ef291baff420acb727fcd356106e3e9cbfa9e5985cd6f4b", size = 513858, upload-time = "2025-02-20T14:03:57.285Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/45/8c4ebc0c460e6ec38e62ab245ad3c7fc10b210116cea7c16d61602aa9558/stevedore-5.4.1-py3-none-any.whl", hash = "sha256:d10a31c7b86cba16c1f6e8d15416955fc797052351a56af15e608ad20811fcfe", size = 49533, upload-time = "2025-02-20T14:03:55.849Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/1f/73/b4a9737255583b5fa858e0bb8e116eb94b88c910164ed2ed719147bde3de/sqlalchemy-2.0.48.tar.gz", hash = "sha256:5ca74f37f3369b45e1f6b7b06afb182af1fd5dde009e4ffd831830d98cbe5fe7", size = 9886075, upload-time = "2026-03-02T15:28:51.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/67/1235676e93dd3b742a4a8eddfae49eea46c85e3eed29f0da446a8dd57500/sqlalchemy-2.0.48-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7001dc9d5f6bb4deb756d5928eaefe1930f6f4179da3924cbd95ee0e9f4dce89", size = 2157384, upload-time = "2026-03-02T15:38:26.781Z" }, + { url = "https://files.pythonhosted.org/packages/4d/d7/fa728b856daa18c10e1390e76f26f64ac890c947008284387451d56ca3d0/sqlalchemy-2.0.48-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a89ce07ad2d4b8cfc30bd5889ec40613e028ed80ef47da7d9dd2ce969ad30e0", size = 3236981, upload-time = "2026-03-02T15:58:53.53Z" }, + { url = "https://files.pythonhosted.org/packages/5c/ad/6c4395649a212a6c603a72c5b9ab5dce3135a1546cfdffa3c427e71fd535/sqlalchemy-2.0.48-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10853a53a4a00417a00913d270dddda75815fcb80675874285f41051c094d7dd", size = 3235232, upload-time = "2026-03-02T15:52:25.654Z" }, + { url = "https://files.pythonhosted.org/packages/01/f4/58f845e511ac0509765a6f85eb24924c1ef0d54fb50de9d15b28c3601458/sqlalchemy-2.0.48-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fac0fa4e4f55f118fd87177dacb1c6522fe39c28d498d259014020fec9164c29", size = 3188106, upload-time = "2026-03-02T15:58:55.193Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f9/6dcc7bfa5f5794c3a095e78cd1de8269dfb5584dfd4c2c00a50d3c1ade44/sqlalchemy-2.0.48-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3713e21ea67bca727eecd4a24bf68bcd414c403faae4989442be60994301ded0", size = 3209522, upload-time = "2026-03-02T15:52:27.407Z" }, + { url = "https://files.pythonhosted.org/packages/d7/5a/b632875ab35874d42657f079529f0745410604645c269a8c21fb4272ff7a/sqlalchemy-2.0.48-cp310-cp310-win32.whl", hash = "sha256:d404dc897ce10e565d647795861762aa2d06ca3f4a728c5e9a835096c7059018", size = 2117695, upload-time = "2026-03-02T15:46:51.389Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/9752eb2a41afdd8568e41ac3c3128e32a0a73eada5ab80483083604a56d1/sqlalchemy-2.0.48-cp310-cp310-win_amd64.whl", hash = "sha256:841a94c66577661c1f088ac958cd767d7c9bf507698f45afffe7a4017049de76", size = 2140928, upload-time = "2026-03-02T15:46:52.992Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6d/b8b78b5b80f3c3ab3f7fa90faa195ec3401f6d884b60221260fd4d51864c/sqlalchemy-2.0.48-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b4c575df7368b3b13e0cebf01d4679f9a28ed2ae6c1cd0b1d5beffb6b2007dc", size = 2157184, upload-time = "2026-03-02T15:38:28.161Z" }, + { url = "https://files.pythonhosted.org/packages/21/4b/4f3d4a43743ab58b95b9ddf5580a265b593d017693df9e08bd55780af5bb/sqlalchemy-2.0.48-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e83e3f959aaa1c9df95c22c528096d94848a1bc819f5d0ebf7ee3df0ca63db6c", size = 3313555, upload-time = "2026-03-02T15:58:57.21Z" }, + { url = "https://files.pythonhosted.org/packages/21/dd/3b7c53f1dbbf736fd27041aee68f8ac52226b610f914085b1652c2323442/sqlalchemy-2.0.48-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f7b7243850edd0b8b97043f04748f31de50cf426e939def5c16bedb540698f7", size = 3313057, upload-time = "2026-03-02T15:52:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cc/3e600a90ae64047f33313d7d32e5ad025417f09d2ded487e8284b5e21a15/sqlalchemy-2.0.48-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:82745b03b4043e04600a6b665cb98697c4339b24e34d74b0a2ac0a2488b6f94d", size = 3265431, upload-time = "2026-03-02T15:58:59.096Z" }, + { url = "https://files.pythonhosted.org/packages/8b/19/780138dacfe3f5024f4cf96e4005e91edf6653d53d3673be4844578faf1d/sqlalchemy-2.0.48-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5e088bf43f6ee6fec7dbf1ef7ff7774a616c236b5c0cb3e00662dd71a56b571", size = 3287646, upload-time = "2026-03-02T15:52:31.569Z" }, + { url = "https://files.pythonhosted.org/packages/40/fd/f32ced124f01a23151f4777e4c705f3a470adc7bd241d9f36a7c941a33bf/sqlalchemy-2.0.48-cp311-cp311-win32.whl", hash = "sha256:9c7d0a77e36b5f4b01ca398482230ab792061d243d715299b44a0b55c89fe617", size = 2116956, upload-time = "2026-03-02T15:46:54.535Z" }, + { url = "https://files.pythonhosted.org/packages/58/d5/dd767277f6feef12d05651538f280277e661698f617fa4d086cce6055416/sqlalchemy-2.0.48-cp311-cp311-win_amd64.whl", hash = "sha256:583849c743e0e3c9bb7446f5b5addeacedc168d657a69b418063dfdb2d90081c", size = 2141627, upload-time = "2026-03-02T15:46:55.849Z" }, + { url = "https://files.pythonhosted.org/packages/ef/91/a42ae716f8925e9659df2da21ba941f158686856107a61cc97a95e7647a3/sqlalchemy-2.0.48-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:348174f228b99f33ca1f773e85510e08927620caa59ffe7803b37170df30332b", size = 2155737, upload-time = "2026-03-02T15:49:13.207Z" }, + { url = "https://files.pythonhosted.org/packages/b9/52/f75f516a1f3888f027c1cfb5d22d4376f4b46236f2e8669dcb0cddc60275/sqlalchemy-2.0.48-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53667b5f668991e279d21f94ccfa6e45b4e3f4500e7591ae59a8012d0f010dcb", size = 3337020, upload-time = "2026-03-02T15:50:34.547Z" }, + { url = "https://files.pythonhosted.org/packages/37/9a/0c28b6371e0cdcb14f8f1930778cb3123acfcbd2c95bb9cf6b4a2ba0cce3/sqlalchemy-2.0.48-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34634e196f620c7a61d18d5cf7dc841ca6daa7961aed75d532b7e58b309ac894", size = 3349983, upload-time = "2026-03-02T15:53:25.542Z" }, + { url = "https://files.pythonhosted.org/packages/1c/46/0aee8f3ff20b1dcbceb46ca2d87fcc3d48b407925a383ff668218509d132/sqlalchemy-2.0.48-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:546572a1793cc35857a2ffa1fe0e58571af1779bcc1ffa7c9fb0839885ed69a9", size = 3279690, upload-time = "2026-03-02T15:50:36.277Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8c/a957bc91293b49181350bfd55e6dfc6e30b7f7d83dc6792d72043274a390/sqlalchemy-2.0.48-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:07edba08061bc277bfdc772dd2a1a43978f5a45994dd3ede26391b405c15221e", size = 3314738, upload-time = "2026-03-02T15:53:27.519Z" }, + { url = "https://files.pythonhosted.org/packages/4b/44/1d257d9f9556661e7bdc83667cc414ba210acfc110c82938cb3611eea58f/sqlalchemy-2.0.48-cp312-cp312-win32.whl", hash = "sha256:908a3fa6908716f803b86896a09a2c4dde5f5ce2bb07aacc71ffebb57986ce99", size = 2115546, upload-time = "2026-03-02T15:54:31.591Z" }, + { url = "https://files.pythonhosted.org/packages/f2/af/c3c7e1f3a2b383155a16454df62ae8c62a30dd238e42e68c24cebebbfae6/sqlalchemy-2.0.48-cp312-cp312-win_amd64.whl", hash = "sha256:68549c403f79a8e25984376480959975212a670405e3913830614432b5daa07a", size = 2142484, upload-time = "2026-03-02T15:54:34.072Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c6/569dc8bf3cd375abc5907e82235923e986799f301cd79a903f784b996fca/sqlalchemy-2.0.48-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e3070c03701037aa418b55d36532ecb8f8446ed0135acb71c678dbdf12f5b6e4", size = 2152599, upload-time = "2026-03-02T15:49:14.41Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/f4e04a4bd5a24304f38cb0d4aa2ad4c0fb34999f8b884c656535e1b2b74c/sqlalchemy-2.0.48-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2645b7d8a738763b664a12a1542c89c940daa55196e8d73e55b169cc5c99f65f", size = 3278825, upload-time = "2026-03-02T15:50:38.269Z" }, + { url = "https://files.pythonhosted.org/packages/fe/88/cb59509e4668d8001818d7355d9995be90c321313078c912420603a7cb95/sqlalchemy-2.0.48-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b19151e76620a412c2ac1c6f977ab1b9fa7ad43140178345136456d5265b32ed", size = 3295200, upload-time = "2026-03-02T15:53:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/87/dc/1609a4442aefd750ea2f32629559394ec92e89ac1d621a7f462b70f736ff/sqlalchemy-2.0.48-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b193a7e29fd9fa56e502920dca47dffe60f97c863494946bd698c6058a55658", size = 3226876, upload-time = "2026-03-02T15:50:39.802Z" }, + { url = "https://files.pythonhosted.org/packages/37/c3/6ae2ab5ea2fa989fbac4e674de01224b7a9d744becaf59bb967d62e99bed/sqlalchemy-2.0.48-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:36ac4ddc3d33e852da9cb00ffb08cea62ca05c39711dc67062ca2bb1fae35fd8", size = 3265045, upload-time = "2026-03-02T15:53:31.421Z" }, + { url = "https://files.pythonhosted.org/packages/6f/82/ea4665d1bb98c50c19666e672f21b81356bd6077c4574e3d2bbb84541f53/sqlalchemy-2.0.48-cp313-cp313-win32.whl", hash = "sha256:389b984139278f97757ea9b08993e7b9d1142912e046ab7d82b3fbaeb0209131", size = 2113700, upload-time = "2026-03-02T15:54:35.825Z" }, + { url = "https://files.pythonhosted.org/packages/b7/2b/b9040bec58c58225f073f5b0c1870defe1940835549dafec680cbd58c3c3/sqlalchemy-2.0.48-cp313-cp313-win_amd64.whl", hash = "sha256:d612c976cbc2d17edfcc4c006874b764e85e990c29ce9bd411f926bbfb02b9a2", size = 2139487, upload-time = "2026-03-02T15:54:37.079Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/7b17bd50244b78a49d22cc63c969d71dc4de54567dc152a9b46f6fae40ce/sqlalchemy-2.0.48-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69f5bc24904d3bc3640961cddd2523e361257ef68585d6e364166dfbe8c78fae", size = 3558851, upload-time = "2026-03-02T15:57:48.607Z" }, + { url = "https://files.pythonhosted.org/packages/20/0d/213668e9aca61d370f7d2a6449ea4ec699747fac67d4bda1bb3d129025be/sqlalchemy-2.0.48-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd08b90d211c086181caed76931ecfa2bdfc83eea3cfccdb0f82abc6c4b876cb", size = 3525525, upload-time = "2026-03-02T16:04:38.058Z" }, + { url = "https://files.pythonhosted.org/packages/85/d7/a84edf412979e7d59c69b89a5871f90a49228360594680e667cb2c46a828/sqlalchemy-2.0.48-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1ccd42229aaac2df431562117ac7e667d702e8e44afdb6cf0e50fa3f18160f0b", size = 3466611, upload-time = "2026-03-02T15:57:50.759Z" }, + { url = "https://files.pythonhosted.org/packages/86/55/42404ce5770f6be26a2b0607e7866c31b9a4176c819e9a7a5e0a055770be/sqlalchemy-2.0.48-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0dcbc588cd5b725162c076eb9119342f6579c7f7f55057bb7e3c6ff27e13121", size = 3475812, upload-time = "2026-03-02T16:04:40.092Z" }, + { url = "https://files.pythonhosted.org/packages/ae/ae/29b87775fadc43e627cf582fe3bda4d02e300f6b8f2747c764950d13784c/sqlalchemy-2.0.48-cp313-cp313t-win32.whl", hash = "sha256:9764014ef5e58aab76220c5664abb5d47d5bc858d9debf821e55cfdd0f128485", size = 2141335, upload-time = "2026-03-02T15:52:51.518Z" }, + { url = "https://files.pythonhosted.org/packages/91/44/f39d063c90f2443e5b46ec4819abd3d8de653893aae92df42a5c4f5843de/sqlalchemy-2.0.48-cp313-cp313t-win_amd64.whl", hash = "sha256:e2f35b4cccd9ed286ad62e0a3c3ac21e06c02abc60e20aa51a3e305a30f5fa79", size = 2173095, upload-time = "2026-03-02T15:52:52.79Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b3/f437eaa1cf028bb3c927172c7272366393e73ccd104dcf5b6963f4ab5318/sqlalchemy-2.0.48-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e2d0d88686e3d35a76f3e15a34e8c12d73fc94c1dea1cd55782e695cc14086dd", size = 2154401, upload-time = "2026-03-02T15:49:17.24Z" }, + { url = "https://files.pythonhosted.org/packages/6c/1c/b3abdf0f402aa3f60f0df6ea53d92a162b458fca2321d8f1f00278506402/sqlalchemy-2.0.48-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49b7bddc1eebf011ea5ab722fdbe67a401caa34a350d278cc7733c0e88fecb1f", size = 3274528, upload-time = "2026-03-02T15:50:41.489Z" }, + { url = "https://files.pythonhosted.org/packages/f2/5e/327428a034407651a048f5e624361adf3f9fbac9d0fa98e981e9c6ff2f5e/sqlalchemy-2.0.48-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:426c5ca86415d9b8945c7073597e10de9644802e2ff502b8e1f11a7a2642856b", size = 3279523, upload-time = "2026-03-02T15:53:32.962Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ca/ece73c81a918add0965b76b868b7b5359e068380b90ef1656ee995940c02/sqlalchemy-2.0.48-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:288937433bd44e3990e7da2402fabc44a3c6c25d3704da066b85b89a85474ae0", size = 3224312, upload-time = "2026-03-02T15:50:42.996Z" }, + { url = "https://files.pythonhosted.org/packages/88/11/fbaf1ae91fa4ee43f4fe79661cead6358644824419c26adb004941bdce7c/sqlalchemy-2.0.48-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8183dc57ae7d9edc1346e007e840a9f3d6aa7b7f165203a99e16f447150140d2", size = 3246304, upload-time = "2026-03-02T15:53:34.937Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5fb0deb13930b4f2f698c5541ae076c18981173e27dd00376dbaea7a9c82/sqlalchemy-2.0.48-cp314-cp314-win32.whl", hash = "sha256:1182437cb2d97988cfea04cf6cdc0b0bb9c74f4d56ec3d08b81e23d621a28cc6", size = 2116565, upload-time = "2026-03-02T15:54:38.321Z" }, + { url = "https://files.pythonhosted.org/packages/95/7e/e83615cb63f80047f18e61e31e8e32257d39458426c23006deeaf48f463b/sqlalchemy-2.0.48-cp314-cp314-win_amd64.whl", hash = "sha256:144921da96c08feb9e2b052c5c5c1d0d151a292c6135623c6b2c041f2a45f9e0", size = 2142205, upload-time = "2026-03-02T15:54:39.831Z" }, + { url = "https://files.pythonhosted.org/packages/83/e3/69d8711b3f2c5135e9cde5f063bc1605860f0b2c53086d40c04017eb1f77/sqlalchemy-2.0.48-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5aee45fd2c6c0f2b9cdddf48c48535e7471e42d6fb81adfde801da0bd5b93241", size = 3563519, upload-time = "2026-03-02T15:57:52.387Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4f/a7cce98facca73c149ea4578981594aaa5fd841e956834931de503359336/sqlalchemy-2.0.48-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7cddca31edf8b0653090cbb54562ca027c421c58ddde2c0685f49ff56a1690e0", size = 3528611, upload-time = "2026-03-02T16:04:42.097Z" }, + { url = "https://files.pythonhosted.org/packages/cd/7d/5936c7a03a0b0cb0fa0cc425998821c6029756b0855a8f7ee70fba1de955/sqlalchemy-2.0.48-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7a936f1bb23d370b7c8cc079d5fce4c7d18da87a33c6744e51a93b0f9e97e9b3", size = 3472326, upload-time = "2026-03-02T15:57:54.423Z" }, + { url = "https://files.pythonhosted.org/packages/f4/33/cea7dfc31b52904efe3dcdc169eb4514078887dff1f5ae28a7f4c5d54b3c/sqlalchemy-2.0.48-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e004aa9248e8cb0a5f9b96d003ca7c1c0a5da8decd1066e7b53f59eb8ce7c62b", size = 3478453, upload-time = "2026-03-02T16:04:44.584Z" }, + { url = "https://files.pythonhosted.org/packages/c8/95/32107c4d13be077a9cae61e9ae49966a35dc4bf442a8852dd871db31f62e/sqlalchemy-2.0.48-cp314-cp314t-win32.whl", hash = "sha256:b8438ec5594980d405251451c5b7ea9aa58dda38eb7ac35fb7e4c696712ee24f", size = 2147209, upload-time = "2026-03-02T15:52:54.274Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d7/1e073da7a4bc645eb83c76067284a0374e643bc4be57f14cc6414656f92c/sqlalchemy-2.0.48-cp314-cp314t-win_amd64.whl", hash = "sha256:d854b3970067297f3a7fbd7a4683587134aa9b3877ee15aa29eea478dc68f933", size = 2182198, upload-time = "2026-03-02T15:52:55.606Z" }, + { url = "https://files.pythonhosted.org/packages/f1/69/c84f10a7fb0d6c50c0f6028cab1373ac1bc70a824d53bf857c33eddde5c4/sqlalchemy-2.0.48-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4599a95f9430ae0de82b52ff0d27304fe898c17cb5f4099f7438a51b9998ac77", size = 2160429, upload-time = "2026-03-02T15:44:11.019Z" }, + { url = "https://files.pythonhosted.org/packages/ed/c8/2e0de4efcba76ae8cc84000bc0aedf45f7d2674a7d8cf66b884a03c3f310/sqlalchemy-2.0.48-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f27f9da0a7d22b9f981108fd4b62f8b5743423388915a563e651c20d06c1f457", size = 3236035, upload-time = "2026-03-02T16:01:29.41Z" }, + { url = "https://files.pythonhosted.org/packages/86/93/0822c24212a2943b3df02a02c49b2b32ab67705eaa0d2f40f28f9c2e8084/sqlalchemy-2.0.48-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8fcccbbc0c13c13702c471da398b8cd72ba740dca5859f148ae8e0e8e0d3e7e", size = 3235358, upload-time = "2026-03-02T16:07:58.002Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ce/f1c7c16d5ea0e4fbc14b473f02daedef8d77c582ef3c18b30b7307f85cff/sqlalchemy-2.0.48-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a5b429eb84339f9f05e06083f119ad814e6d85e27ecbdf9c551dfdbb128eaf8a", size = 3185479, upload-time = "2026-03-02T16:01:32.781Z" }, + { url = "https://files.pythonhosted.org/packages/6c/b8/95cb9642e608d02a0fd96bb3f7571b20a081313a178e1e661cc5dba37472/sqlalchemy-2.0.48-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bcb8ebbf2e2c36cfe01a94f2438012c6a9d494cf80f129d9753bcdf33bfc35a6", size = 3207488, upload-time = "2026-03-02T16:07:59.763Z" }, + { url = "https://files.pythonhosted.org/packages/24/cd/0dda04e28df0db4ed0b7d374f7eb7da8566db523dbac9f627cc6e0422c6d/sqlalchemy-2.0.48-cp39-cp39-win32.whl", hash = "sha256:e214d546c8ecb5fc22d6e6011746082abf13a9cf46eefb45769c7b31407c97b5", size = 2119494, upload-time = "2026-03-02T15:50:24.983Z" }, + { url = "https://files.pythonhosted.org/packages/d7/1d/a98057e05608316cd3c2710f0b3d35e83cec6bdf00833b53a02235a1712f/sqlalchemy-2.0.48-cp39-cp39-win_amd64.whl", hash = "sha256:b8fc3454b4f3bd0a368001d0e968852dad45a873f8b4babd41bc302ec851a099", size = 2142903, upload-time = "2026-03-02T15:50:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/46/2c/9664130905f03db57961b8980b05cab624afd114bf2be2576628a9f22da4/sqlalchemy-2.0.48-py3-none-any.whl", hash = "sha256:a66fe406437dd65cacd96a72689a3aaaecaebbcd62d81c5ac1c0fdbeac835096", size = 1940202, upload-time = "2026-03-02T15:52:43.285Z" }, ] [[package]] @@ -1761,21 +1075,23 @@ wheels = [ ] [[package]] -name = "taskiq-postgresql" -version = "0.3.2" +name = "taskiq-sqlalchemy" +version = "0.0.1" source = { editable = "." } dependencies = [ + { name = "sqlalchemy" }, { name = "taskiq" }, ] [package.optional-dependencies] -asyncpg = [ +all = [ { name = "asyncpg" }, -] -psqlpy = [ { name = "psqlpy" }, + { name = "psycopg", extra = ["binary", "pool"] }, ] -psycopg = [ +postgresql = [ + { name = "asyncpg" }, + { name = "psqlpy" }, { name = "psycopg", extra = ["binary", "pool"] }, ] @@ -1783,9 +1099,6 @@ psycopg = [ dev = [ { name = "anyio" }, { name = "asyncpg-stubs" }, - { name = "autoflake" }, - { name = "black" }, - { name = "flake8" }, { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-cov" }, @@ -1793,47 +1106,30 @@ dev = [ { name = "pytest-timeout" }, { name = "pytest-xdist" }, { name = "ruff" }, - { name = "twine" }, - { name = "wemake-python-styleguide", version = "0.19.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "wemake-python-styleguide", version = "1.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "yesqa" }, ] [package.metadata] requires-dist = [ - { name = "asyncpg", marker = "extra == 'asyncpg'", specifier = ">=0.30.0" }, - { name = "psqlpy", marker = "extra == 'psqlpy'", specifier = ">=0.11.3" }, - { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'psycopg'", specifier = ">=3.2.9" }, + { name = "asyncpg", marker = "extra == 'postgresql'", specifier = ">=0.30.0" }, + { name = "psqlpy", marker = "extra == 'postgresql'", specifier = ">=0.11.3" }, + { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgresql'", specifier = ">=3.2.9" }, + { name = "sqlalchemy", specifier = ">=2" }, { name = "taskiq", specifier = ">=0.11.7" }, + { name = "taskiq-sqlalchemy", extras = ["postgresql"], marker = "extra == 'all'" }, ] -provides-extras = ["asyncpg", "psqlpy", "psycopg"] +provides-extras = ["all", "postgresql"] [package.metadata.requires-dev] dev = [ - { name = "anyio", specifier = ">=4.6.2.post1" }, - { name = "asyncpg-stubs", specifier = ">=0.29.1" }, - { name = "autoflake", specifier = ">=2.3.1" }, - { name = "black", specifier = ">=24.10.0" }, - { name = "flake8", specifier = ">=7.1.1" }, - { name = "pre-commit", specifier = ">=4.0.1" }, - { name = "pytest", specifier = ">=8.3.3" }, - { name = "pytest-cov", specifier = ">=5.0.0" }, - { name = "pytest-env", specifier = ">=1.1.5" }, - { name = "pytest-timeout", specifier = ">=2.4.0" }, - { name = "pytest-xdist", specifier = ">=3.6.1" }, - { name = "ruff", specifier = ">=0.6.9" }, - { name = "twine", specifier = ">=5.1.1" }, - { name = "wemake-python-styleguide", specifier = ">=0.19.2" }, - { name = "yesqa", specifier = ">=1.5.0" }, -] - -[[package]] -name = "tokenize-rt" -version = "6.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/ed/8f07e893132d5051d86a553e749d5c89b2a4776eb3a579b72ed61f8559ca/tokenize_rt-6.2.0.tar.gz", hash = "sha256:8439c042b330c553fdbe1758e4a05c0ed460dbbbb24a606f11f0dee75da4cad6", size = 5476, upload-time = "2025-05-23T23:48:00.035Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/f0/3fe8c6e69135a845f4106f2ff8b6805638d4e85c264e70114e8126689587/tokenize_rt-6.2.0-py2.py3-none-any.whl", hash = "sha256:a152bf4f249c847a66497a4a95f63376ed68ac6abf092a2f7cfb29d044ecff44", size = 6004, upload-time = "2025-05-23T23:47:58.812Z" }, + { name = "anyio" }, + { name = "asyncpg-stubs" }, + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-env" }, + { name = "pytest-timeout" }, + { name = "pytest-xdist" }, + { name = "ruff" }, ] [[package]] @@ -1875,27 +1171,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, ] -[[package]] -name = "twine" -version = "6.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "id" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "keyring", marker = "platform_machine != 'ppc64le' and platform_machine != 's390x'" }, - { name = "packaging" }, - { name = "readme-renderer" }, - { name = "requests" }, - { name = "requests-toolbelt" }, - { name = "rfc3986" }, - { name = "rich" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c8/a2/6df94fc5c8e2170d21d7134a565c3a8fb84f9797c1dd65a5976aaf714418/twine-6.1.0.tar.gz", hash = "sha256:be324f6272eff91d07ee93f251edf232fc647935dd585ac003539b42404a8dbd", size = 168404, upload-time = "2025-01-21T18:45:26.758Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/b6/74e927715a285743351233f33ea3c684528a0d374d2e43ff9ce9585b73fe/twine-6.1.0-py3-none-any.whl", hash = "sha256:a47f973caf122930bf0fbbf17f80b83bc1602c9ce393c7845f289a3001dc5384", size = 40791, upload-time = "2025-01-21T18:45:24.584Z" }, -] - [[package]] name = "typing-extensions" version = "4.14.1" @@ -1926,15 +1201,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, ] -[[package]] -name = "urllib3" -version = "2.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, -] - [[package]] name = "virtualenv" version = "20.34.0" @@ -1950,70 +1216,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, ] -[[package]] -name = "wemake-python-styleguide" -version = "0.19.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "astor", marker = "python_full_version < '3.10'" }, - { name = "attrs", marker = "python_full_version < '3.10'" }, - { name = "darglint", marker = "python_full_version < '3.10'" }, - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "flake8-bandit", marker = "python_full_version < '3.10'" }, - { name = "flake8-broken-line", marker = "python_full_version < '3.10'" }, - { name = "flake8-bugbear", marker = "python_full_version < '3.10'" }, - { name = "flake8-commas", marker = "python_full_version < '3.10'" }, - { name = "flake8-comprehensions", marker = "python_full_version < '3.10'" }, - { name = "flake8-debugger", marker = "python_full_version < '3.10'" }, - { name = "flake8-docstrings", marker = "python_full_version < '3.10'" }, - { name = "flake8-eradicate", marker = "python_full_version < '3.10'" }, - { name = "flake8-isort", marker = "python_full_version < '3.10'" }, - { name = "flake8-quotes", marker = "python_full_version < '3.10'" }, - { name = "flake8-rst-docstrings", marker = "python_full_version < '3.10'" }, - { name = "flake8-string-format", marker = "python_full_version < '3.10'" }, - { name = "pep8-naming", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "setuptools", marker = "python_full_version < '3.10'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c2/f4/2a76c59661fae8534b81e992a37d347de241b242aaf5bc651b10d24b7025/wemake_python_styleguide-0.19.2.tar.gz", hash = "sha256:850fe70e6d525fd37ac51778e552a121a489f1bd057184de96ffd74a09aef414", size = 168472, upload-time = "2024-03-26T15:47:38.412Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/c4/0e36d00c88e995f2a0e5de8c61bb130a4acdc1b458b6bf8c7a474b127890/wemake_python_styleguide-0.19.2-py3-none-any.whl", hash = "sha256:d53205dbb629755026d853d15fb3ca03ebb2717c97de4198b5676b9bdc0663bd", size = 224081, upload-time = "2024-03-26T15:47:35.767Z" }, -] - -[[package]] -name = "wemake-python-styleguide" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", -] -dependencies = [ - { name = "attrs", marker = "python_full_version >= '3.10'" }, - { name = "flake8", marker = "python_full_version >= '3.10'" }, - { name = "pygments", marker = "python_full_version >= '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/de/59/489140f56e1d21c1785066f06ec19b539f5bd8f1d572983b9fdc1071979f/wemake_python_styleguide-1.3.0.tar.gz", hash = "sha256:b8fcbeb1271a0a324c30daca2940c4cf769b14215a57ba55412af543cc153c77", size = 156768, upload-time = "2025-07-13T06:22:44.689Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/75/ded3793e08269a4e0e1551ff94aafc7efd32d21de442b8d3960f314955fa/wemake_python_styleguide-1.3.0-py3-none-any.whl", hash = "sha256:3fd39228b80442f22bc4068c57930c9d468f6f5e91f52ca3b13ba5e286d63fad", size = 218520, upload-time = "2025-07-13T06:22:43.034Z" }, -] - -[[package]] -name = "yesqa" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8" }, - { name = "tokenize-rt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9e/14/20b1afffaea2fe0259f40fcd489670496b86f76777db26b690f6042d38ff/yesqa-1.5.0.tar.gz", hash = "sha256:209c17274f3c0cc22195ddb26f93df3523634d0756f756913ed05cba970efad9", size = 4421, upload-time = "2023-06-10T20:33:14.805Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/33/c5b597c690bf3c4ef0b7b93a71e7a254954f8ad6e153a5b614dc50f8da6f/yesqa-1.5.0-py2.py3-none-any.whl", hash = "sha256:600c6d6e59aff9d85422c28c0d0ff016c040f4eb653636632c18fb6c43453e00", size = 4682, upload-time = "2023-06-10T20:33:13.368Z" }, -] - [[package]] name = "zipp" version = "3.23.0" From aa97b7f4e7359e995fc5a58e56a8a4394afc76c8 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sat, 28 Mar 2026 20:01:08 +0530 Subject: [PATCH 02/25] Add ty.toml --- ty.toml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 ty.toml diff --git a/ty.toml b/ty.toml new file mode 100644 index 0000000..86aa497 --- /dev/null +++ b/ty.toml @@ -0,0 +1,5 @@ +[src] +exclude = ["**/dist", "**/build", "**/venv*"] + +[environment] +python-version = "3.9" From 74d4290138b164cd218d9b050d8f76083ac270a1 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sat, 28 Mar 2026 20:02:24 +0530 Subject: [PATCH 03/25] Delete .python-version --- .python-version | 1 - 1 file changed, 1 deletion(-) delete mode 100644 .python-version diff --git a/.python-version b/.python-version deleted file mode 100644 index bd28b9c..0000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.9 From f75e408391cafb8d73bf2d0dc19faa8a1a148fb0 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sat, 28 Mar 2026 20:02:46 +0530 Subject: [PATCH 04/25] Setup base project --- taskiq_sqlalchemy/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 taskiq_sqlalchemy/__init__.py diff --git a/taskiq_sqlalchemy/__init__.py b/taskiq_sqlalchemy/__init__.py new file mode 100644 index 0000000..e69de29 From 400c464cc93fcd99c1e27446be830ac9e77c4cf1 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sun, 29 Mar 2026 13:51:20 +0530 Subject: [PATCH 05/25] deps: Add dependencies for postgres and sqlite --- pyproject.toml | 21 +++--- uv.lock | 174 +++++++++++++++++++------------------------------ 2 files changed, 78 insertions(+), 117 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0def7e9..c572f25 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,21 +40,17 @@ keywords = [ "async", "postgresql", "asyncpg", - "psqlpy", - "psycopg3", + "psycopg3", "sqlalchemy", ] -dependencies = ["taskiq>=0.11.7", 'sqlalchemy>=2'] +dependencies = ["taskiq>=0.11.7", 'sqlalchemy>=2', 'anyio>=4'] [project.optional-dependencies] -all = ["taskiq_sqlalchemy[postgresql]"] +all = ["taskiq_sqlalchemy[postgresql,sqlite]"] -postgresql = [ - "asyncpg>=0.30.0", - "psqlpy>=0.11.3", - "psycopg[binary,pool]>=3.2.9", -] +postgresql = ["asyncpg", "psycopg[binary,pool]"] +sqlite = ["aiosqlite"] [project.urls] Homepage = "https://github.com/corridor/taskiq-sqlalchemy" @@ -63,14 +59,19 @@ Documentation = "https://github.com/corridor/taskiq-sqlalchemy" [dependency-groups] dev = [ "ruff", + "ty", "pytest", + "pytest-anyio", "pytest-cov", "pytest-env", "asyncpg-stubs", "pre-commit", "pytest-xdist", - "anyio", "pytest-timeout", + # Drivers for result-backend tests + "aiosqlite", # SQLite async driver — zero external service + "asyncpg", # PostgreSQL asyncpg driver (optional in local dev) + "psycopg", # PostgreSQL psycopg3 driver (optional in local dev) ] [tool.setuptools.packages.find] diff --git a/uv.lock b/uv.lock index f046470..fade64a 100644 --- a/uv.lock +++ b/uv.lock @@ -10,6 +10,15 @@ required-markers = [ "python_full_version == '3.10.*'", ] +[[package]] +name = "aiosqlite" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" }, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -472,104 +481,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, ] -[[package]] -name = "psqlpy" -version = "0.11.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/9e/95659a30735fbaa37f30694a0ada3afc30ee1c12f30eca73cea233cc090c/psqlpy-0.11.6.tar.gz", hash = "sha256:7dd11253fb17059db9ddf41c0b96497ba107855905fbcdfee7f7ce3ea1745adb", size = 290188, upload-time = "2025-08-14T17:23:11.46Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/aa/7a6d62eb8f0a09f6ba9998be29874ee31a352182304d0a04bcd910835b45/psqlpy-0.11.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952e2e4e0bdaa29eaf5967a82f2368f03639f2dfbe9d3c7e0b97b55830a79b27", size = 4310132, upload-time = "2025-08-14T17:20:46.739Z" }, - { url = "https://files.pythonhosted.org/packages/5b/9e/46bc085089f6671f4a5c9888e23113f556edf08e968b1870c5a28b9b22e7/psqlpy-0.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2955068c9bd5873ef77a5b86bcbe4825ce859a55c2c06ab47743d1c075f21e6c", size = 4516406, upload-time = "2025-08-14T17:20:49.488Z" }, - { url = "https://files.pythonhosted.org/packages/7f/c3/ae318e617f90a98de447292431e2ebe88130238656b7f52789363342f2a9/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec7677fc00eee7df29c5727eca54ea496686980ba00dd48311f231b06e01e12e", size = 5042443, upload-time = "2025-08-14T17:20:50.754Z" }, - { url = "https://files.pythonhosted.org/packages/9b/77/1599e3724033ac886db35399023e98d9050bebf7d709a7f76b49fb611aa4/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91dbf904c693d6432ff3a221423d62968a212151b1e945122e74dddce813e150", size = 4298972, upload-time = "2025-08-14T17:20:52.187Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3b/22a57f247755d44388a046530902403d1230c859cc23aad85d242a90f319/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a8f7813be18eda28c313c7fbe94ea2f9a3ca8d376d1a7b04ae4f5a74beb4009", size = 4925185, upload-time = "2025-08-14T17:20:54.024Z" }, - { url = "https://files.pythonhosted.org/packages/b0/78/e386bf4e69d9d594b28e6f22fe000c00eb5a107eeb09e0cdd94c2baa6862/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8411f44e2235bd3f71ed9cc6cca1b889be67e9edae2f8a33698aaaab9ea7a030", size = 5040564, upload-time = "2025-08-14T17:20:55.453Z" }, - { url = "https://files.pythonhosted.org/packages/fa/ef/2d5ca961828c66a2d13638405c6732d89ee7fae88e7b797c7cf217b0723b/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d760b4e0f41e9823a686fed49271abe8be8350be2d8f0c9f75c5a1c180f6c72a", size = 4700508, upload-time = "2025-08-14T17:20:56.812Z" }, - { url = "https://files.pythonhosted.org/packages/95/56/e42506053900a4b8d1a327ceebab47320a537526b82812ba1731417a1e3e/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa27eaf360da589fc5212bf58b0edc632f894f8c70ab16c790b55e3bbeedb8e", size = 4928317, upload-time = "2025-08-14T17:20:58.18Z" }, - { url = "https://files.pythonhosted.org/packages/fd/5d/c09b2ac6adfd1c8ef78b9cbc5f2291dbe898283fcd10c63be231ff0ddfd6/psqlpy-0.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2f6cc54f33d377dd8db562b4b6691f2af9c9bedf367bc5d7267547bf74d1ccce", size = 4957986, upload-time = "2025-08-14T17:20:59.538Z" }, - { url = "https://files.pythonhosted.org/packages/9e/57/4ad36f2260bff41e1de26a7877ec5b5c503a846b9c97a8f7669e33643269/psqlpy-0.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:38dd049421c6c7b8d6ee4a88d6404f2af2f4a5cf7722e70201bb1709ba371fb5", size = 5068208, upload-time = "2025-08-14T17:21:00.964Z" }, - { url = "https://files.pythonhosted.org/packages/67/b7/1d9aec7918ed3eab855bca1f3acf42e92e2ddf86fddc3a04de0e76292e10/psqlpy-0.11.6-cp310-cp310-win32.whl", hash = "sha256:f55f816f177db68ab216c8b49432dbf6144db1a3e2867f9f4d2feae0160f7f53", size = 3351818, upload-time = "2025-08-14T17:21:02.815Z" }, - { url = "https://files.pythonhosted.org/packages/80/4d/2b8da947e878c3ade44ba083fa60d57e1b20e5d30dc5879a4c19bb7de3ea/psqlpy-0.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:efc3a488d1cd63813be4f1863de1da9b3ae8d82e270113f365cfcbc2cf961083", size = 3761093, upload-time = "2025-08-14T17:21:04.648Z" }, - { url = "https://files.pythonhosted.org/packages/84/33/add408874f088dbe585d22653dd05dafb50776038d6282c54821877d61e0/psqlpy-0.11.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:205936327b0830298864c46ae2a43ff93a66985c7ee4e66cba7991592393c5c7", size = 4309332, upload-time = "2025-08-14T17:21:06.071Z" }, - { url = "https://files.pythonhosted.org/packages/57/b4/971c5464dec16afea6344d5808e187f5e8e8b3daa3d18878ba9fce9c89c0/psqlpy-0.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ef7e8af9dc40cfd24df7d680e4c9277d6c994acdd58da1dabe823247dbffad9", size = 4515551, upload-time = "2025-08-14T17:21:07.391Z" }, - { url = "https://files.pythonhosted.org/packages/a3/22/aaef103bd5b8c04650c83258b4b241f967afaf76b32d11c92366234e6e1e/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d8ff15e7781e0d7e70f22d8643eb952010623ba34ae8c4868cf2ec58ff24eb", size = 5039985, upload-time = "2025-08-14T17:21:08.765Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6f/a11d8dbc48be3a1587c37990972568c9da58503ec782c5ad905df6d54947/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11ab85aa5dbf64725f6d4ed809bef814ad9023891381d9775b9aaecf0f9b420e", size = 4300029, upload-time = "2025-08-14T17:21:10.377Z" }, - { url = "https://files.pythonhosted.org/packages/11/22/0461a9372a27e9cd4969431dfa7076b3c9fcad79d5b8ff0ddfc15080939a/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fbf09fdc049b7dd6f820e61d59ccf325a4f89f2bd133e0f87cb1937b0bba8b", size = 4926113, upload-time = "2025-08-14T17:21:12.112Z" }, - { url = "https://files.pythonhosted.org/packages/8b/e8/fcd0d5eb78d860b705b36e6f569eb47e0b69f2d793983dda46271b2819e3/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e09b716eebc184e08125c858dda6bf7c9f3be6bc9f9bd28f95a7cdceac0dff82", size = 5041647, upload-time = "2025-08-14T17:21:14.338Z" }, - { url = "https://files.pythonhosted.org/packages/4a/a9/a9e75a58c3e9f95479ab7ecd7f40cbb4fd1c3f21a52e434418efdb3129a3/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1c1d4a21000ee8dcb70ba7a6de5426f49354adcc205786b9a95bdd4e6151ba1", size = 4700315, upload-time = "2025-08-14T17:21:15.861Z" }, - { url = "https://files.pythonhosted.org/packages/1c/2c/d1262e9b2022e51b489e8b5bcc95d136b2247f856f433ac8c70d4fcb0501/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d4055b8496b9634540af83772575f3c8c08771279809e9fb0f1c04a8e9b8ece", size = 4928905, upload-time = "2025-08-14T17:21:17.398Z" }, - { url = "https://files.pythonhosted.org/packages/cd/9f/19b96fbbc5ebac42ff87e02fd251db8251b68347acfa57fde146b4bc57aa/psqlpy-0.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:02a5c726d804c14517040ddaee3f698c0b18ddb7aeeef08c490a909ad3a170aa", size = 4958259, upload-time = "2025-08-14T17:21:19.189Z" }, - { url = "https://files.pythonhosted.org/packages/7f/dc/00cc7dd7c067af6da1eb2ede5be062edf3b0e2a289dc5d02555cf4481949/psqlpy-0.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:10521d700d136e08a3817ba78a494171fc6d52749acda7b7e33339120297e8ae", size = 5068671, upload-time = "2025-08-14T17:21:20.935Z" }, - { url = "https://files.pythonhosted.org/packages/4e/b1/40235eac320084198a8a7ef12a9a01794e70921a78537ec30246b58dee47/psqlpy-0.11.6-cp311-cp311-win32.whl", hash = "sha256:1e06d5d7c437246568142caf675ec33c9f29be2683dc1793d3030e61bced8e76", size = 3351919, upload-time = "2025-08-14T17:21:22.355Z" }, - { url = "https://files.pythonhosted.org/packages/05/82/d5513aedb7ea0137ff9a4fc9c1f001804e4d7dcf2a975ad8ed97768459b4/psqlpy-0.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:3c20cae9fda38654931682c688f65610d93f7539de46f5b7353da8633815d166", size = 3760582, upload-time = "2025-08-14T17:21:24.007Z" }, - { url = "https://files.pythonhosted.org/packages/79/fa/a0f9c4fa00faf4d480c402502db76f83dcdb759b45d73d274d3085872e08/psqlpy-0.11.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fbfda19815ae02757b69639b21e0492dbe28d832fdc0cd083b599745c72f3287", size = 4286126, upload-time = "2025-08-14T17:21:25.615Z" }, - { url = "https://files.pythonhosted.org/packages/80/da/4ad1d15e948804fc691abd6e25c3b1f97aa0d005c3e78c359f88111c3be9/psqlpy-0.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b6ff1d79ec1e1028ac30cb08381821a4a78ecd823dd2be6cf779dde008da9dde", size = 4493871, upload-time = "2025-08-14T17:21:27.032Z" }, - { url = "https://files.pythonhosted.org/packages/74/52/4cb68092a3df6d89869657dc0b44172f97067789ad1960ec8196cf29d91c/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b07b5a46a7a8b08e9d9cd7c7765a2d2b875af2b6aa26fe30433b42de56f8bf3", size = 5033454, upload-time = "2025-08-14T17:21:28.67Z" }, - { url = "https://files.pythonhosted.org/packages/08/fa/3b253694faf2369295edf2a52bb3149ca7ba13d37a90c12cfdf454a1cbba/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:170059e1ad89a66c3e7106cae1eaf5bcb6e5a13fa6565c59e1858e862c50f7ec", size = 4286197, upload-time = "2025-08-14T17:21:30.407Z" }, - { url = "https://files.pythonhosted.org/packages/24/30/11867f7182bf13e8768415198d064a1a4f2082fb77c66e404dd12533a51b/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85986d0b516457c088b22dfbcbe6ef060d9ee9d6a120c38f29d4b1104826a8f0", size = 4899752, upload-time = "2025-08-14T17:21:31.914Z" }, - { url = "https://files.pythonhosted.org/packages/07/4b/ff8babcf13db850ecbffce93fe86f50712c35f2949c8e2ce10497eea68f4/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08dc8ccf395e0abeb1425fdda8d3d9b22840ff27358bdcbbe01a0ee1b780b549", size = 5018867, upload-time = "2025-08-14T17:21:33.368Z" }, - { url = "https://files.pythonhosted.org/packages/51/05/5ac0d6564e55a920620a9f82c6e13c299254d933000b96a7056b941e652f/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fa85619260d86937f95ad6c089968b8f03af3b62623982df468b72fde667ff5", size = 4677657, upload-time = "2025-08-14T17:21:34.998Z" }, - { url = "https://files.pythonhosted.org/packages/27/68/f4cabf3a24f0ac34b82e3f3da2415040b58ec8f0371c849bb72707927525/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:461f5bd3a226096ff7fb2e8dc702e48b231d7896151621c1f9f0f1cd9475d8e7", size = 4926305, upload-time = "2025-08-14T17:21:36.55Z" }, - { url = "https://files.pythonhosted.org/packages/ea/90/0cbb0ce1e8083aea902586a65093a9041c182dd28c7a3b4c3028fa8b6588/psqlpy-0.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3199ff6bf5a92e98546288ed23a80c586e663583cc8e15ffdc24ed1cad4b2251", size = 4938958, upload-time = "2025-08-14T17:21:38.171Z" }, - { url = "https://files.pythonhosted.org/packages/a0/c9/2abadb734cf55a1c073c4dc08cf78ce75a9d438c8bfeabb6bb42cdb4ff15/psqlpy-0.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e15da8ee6c853397a9c9dc555c3b57f848a7e7a25a0595fdbaa3c19a390160cf", size = 5065920, upload-time = "2025-08-14T17:21:40.117Z" }, - { url = "https://files.pythonhosted.org/packages/09/d6/62d0213295a63c1a4d57e8b22cfe82ff107ab5c59cc1db17de256b163d74/psqlpy-0.11.6-cp312-cp312-win32.whl", hash = "sha256:0e5b482677e21d7f03455105ac4b2ffc11411d3e7409da8870a866b3558c902b", size = 3351654, upload-time = "2025-08-14T17:21:41.766Z" }, - { url = "https://files.pythonhosted.org/packages/72/05/7c2dfc0435bc99afed67ef33b0cab6d9c4a444ae53110abb2d686534f205/psqlpy-0.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:6ef2ab055a11f920c8a021561acc9c1a58d06c215bb8ac992ddf7f79e44f7c89", size = 3766514, upload-time = "2025-08-14T17:21:43.213Z" }, - { url = "https://files.pythonhosted.org/packages/cf/6a/69a76e2f47988109389af3df81700619f6fa2566e368d5e79951e90fa562/psqlpy-0.11.6-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1508a64de1d32f24b7518521fbcfa6e2b775edd615c26491c50acefe617bef36", size = 4284249, upload-time = "2025-08-14T17:21:44.558Z" }, - { url = "https://files.pythonhosted.org/packages/de/78/6d0de2107b8bc9c91ac9db505a831091ac6fda9ef34490f699263e29a009/psqlpy-0.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4e35a360e6f900c2cb7f067a4ab94f7ee0c84eb240ade31f8e743a923b14f184", size = 4494620, upload-time = "2025-08-14T17:21:45.949Z" }, - { url = "https://files.pythonhosted.org/packages/c1/75/1b9d1fe6c3334a2b14a754531fa919f3662c4a3cf324bca32191effa5fc3/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6ddc99e9750a4227ac84caef469d7a5b991df32aca2f8e3a0b01ce870c8c93e", size = 5028860, upload-time = "2025-08-14T17:21:47.478Z" }, - { url = "https://files.pythonhosted.org/packages/9e/af/b946866dbf590411e6558e5d40a7f7a2b5ebc830c692c038c48e6d8789c3/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a0b9905602f41770bd43d85be4d9159db719c2c82127c396625e3a7e15f2dd39", size = 4288707, upload-time = "2025-08-14T17:21:49.413Z" }, - { url = "https://files.pythonhosted.org/packages/c6/81/2c18b3bf2836b5311fc8f451211fe8a3e9fd7e39923fd2bd1afa121dd123/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af8b5e3f013235355e022a7f07179948dbe9645ae990e13cfa01c4bfcba73764", size = 4915394, upload-time = "2025-08-14T17:21:51.311Z" }, - { url = "https://files.pythonhosted.org/packages/df/c7/a6df171d6e12b22d266801543c9b61922744146f827dfc92eea2233b6bdd/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0dceb698fb324729a31b3668eeb65bf107af78bdc14f3d10bf27658242fc3e46", size = 5016572, upload-time = "2025-08-14T17:21:52.848Z" }, - { url = "https://files.pythonhosted.org/packages/bb/f4/09e63cd42e5ab6337136db09e9c5d56d1f509a41826fa76c94b5b738b382/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:541c8025f94226d2521c4e8de1cbc22a5b180f0bbcd75925fbfc04cd849483d8", size = 4694538, upload-time = "2025-08-14T17:21:54.215Z" }, - { url = "https://files.pythonhosted.org/packages/d6/77/6aaec569b7443321dde4a2c67ca02cc4399b637bf85d22cd835a5a92af40/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7e34ddba63cfe66aba152df22c9c6b505974069e6ad53b54389bc46b0fc8e6c", size = 4923742, upload-time = "2025-08-14T17:21:56.409Z" }, - { url = "https://files.pythonhosted.org/packages/a4/fd/03b19e43f7a31ddea1a55bed49c3b73a104aa778f5d0c32f378c8d12c9d2/psqlpy-0.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f5c30dabc70b93627d6897054ab2c6c74c65f00ca11f3bc755044f753868c0e4", size = 4947137, upload-time = "2025-08-14T17:21:57.867Z" }, - { url = "https://files.pythonhosted.org/packages/50/bf/c7b5082c3709c3fa2c08d09971b012979a9d879f0a5ed22c7995c94ef95a/psqlpy-0.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b633ae41fd5d1295655a338c595af3f020722af312c3b541b53d1caa6882afe8", size = 5063961, upload-time = "2025-08-14T17:22:00.005Z" }, - { url = "https://files.pythonhosted.org/packages/74/6b/48089ae018f9d2fef0d8859248ef33c6b0d04299f73a554ef3a7680a2673/psqlpy-0.11.6-cp313-cp313-win32.whl", hash = "sha256:6aca8c34509c25e49c651ec59b7e164ea45a63111c0c11bdadc222c8ca714eed", size = 3351887, upload-time = "2025-08-14T17:22:01.422Z" }, - { url = "https://files.pythonhosted.org/packages/f1/74/868413b3be5b07a5b6dae9d73f71718ebb99844e0f072912490ef6f7696a/psqlpy-0.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:8e601faf0e7cf771fa118e89487c805a53271185aea63025b18914feb982ee4a", size = 3767978, upload-time = "2025-08-14T17:22:02.793Z" }, - { url = "https://files.pythonhosted.org/packages/7d/ff/96d672cb6d62b593ec6d2a53072663b72f8b4cc7c782d9b16e63a23d1f8e/psqlpy-0.11.6-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3a36263ac9ed535cb066dd9ffa75eba7c2a3d622ebadf1ee90a1adaf33e1c3dd", size = 4310264, upload-time = "2025-08-14T17:22:04.118Z" }, - { url = "https://files.pythonhosted.org/packages/df/33/8011fc5457c94d7462f667a20a6959f44d3a96ae3f6f9c697974265b3590/psqlpy-0.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:efa42516e911ed9c76ab054fd061d2456f89f20d55807d645a9ad47571234b57", size = 4518025, upload-time = "2025-08-14T17:22:05.529Z" }, - { url = "https://files.pythonhosted.org/packages/95/50/63dccb212fda68b16a9d5169d306ac38d3efdba1b8b6cf74a2cd26979b61/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaaa658b6204103ad7f03dbc762baed3803de9303d94eac551ac05c57758423", size = 5043163, upload-time = "2025-08-14T17:22:06.914Z" }, - { url = "https://files.pythonhosted.org/packages/d4/12/7900a705a990d7192f45ebf619affbf7dec4db54f3732a9a1616ee542f8a/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:392a763fc2c992d1e45c89a184d3de87189a9f247995d3e16f0871f67a67b781", size = 4299937, upload-time = "2025-08-14T17:22:08.351Z" }, - { url = "https://files.pythonhosted.org/packages/25/01/621c0d19c863019760a45e95f2e88dc007b12aaf6d8ee49f2957923ce218/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff00b4c53f6f188f48ea8d58f63f8699c623bb6ee302c99bf559f61bc78ca5b8", size = 4926194, upload-time = "2025-08-14T17:22:09.983Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d5/343a6a0f3fa88ab2da42bb4b539d46e68c88f4912865e5963f05afee25d9/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d079161fdaf4e3d2c336309161cc8a1f60bda03562872c479e805f2ccf5e7ea0", size = 5042997, upload-time = "2025-08-14T17:22:11.648Z" }, - { url = "https://files.pythonhosted.org/packages/6b/83/32b399b3a2eecafa06afd5eec465f5abbeff90aa175ae98d9dde4079ea0b/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7afa10100009833a40fbc92993ce200d46081293472f6b68aefeafba206517b5", size = 4700624, upload-time = "2025-08-14T17:22:13.156Z" }, - { url = "https://files.pythonhosted.org/packages/5f/bd/841b47e7f2b08021c2cf42788c6a669e2e3ea2cb09cbe0e58b6ba7c76cfb/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295df1f589e65e285e2cad23b4000572f083a563dd5a3ddd043de3a9e9027ab0", size = 4929964, upload-time = "2025-08-14T17:22:14.839Z" }, - { url = "https://files.pythonhosted.org/packages/9f/c7/9a7f39a74bd2f16f980b599f61c3afd55e245706f4d6d682e7621598c03e/psqlpy-0.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1ea482e2b4df2b7f5393c35f64927c4fc0ecaf57455f41127cdc78307bbb2320", size = 4958544, upload-time = "2025-08-14T17:22:16.471Z" }, - { url = "https://files.pythonhosted.org/packages/be/bc/a3186ce50e3fe76814c9cd1dc8267eb2d79015292ff346691c62e47379d7/psqlpy-0.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f231871393ff627fdb36607b8d4113bf901e3838bf573003ace6177260db61d4", size = 5069641, upload-time = "2025-08-14T17:22:17.969Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1c/d1d1b84a52949bb91f5a1c5208120b61b165fa24faafb8150709c241dae8/psqlpy-0.11.6-cp39-cp39-win32.whl", hash = "sha256:008407cac3591a7cdf45250727df2aaffe1e077aa40d0d5ef858998078c93c5f", size = 3352566, upload-time = "2025-08-14T17:22:19.526Z" }, - { url = "https://files.pythonhosted.org/packages/7c/01/5572e1e647971f0d4db0278256d57ea8ed80f57bddba9b14bd937ae9a9ae/psqlpy-0.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b6dad4f42b00b34b540cc487e923e181726b887074093e3cdea21d51fc67e3a7", size = 3761797, upload-time = "2025-08-14T17:22:21.09Z" }, - { url = "https://files.pythonhosted.org/packages/60/98/f29b17f931ea17ef673c8a2ac817901e663b2db102660a4902997859c2c2/psqlpy-0.11.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:80ed664f40b259f65c5983e529fd32212ffcb5caa5cddb8ed899f3939547c5c7", size = 4303600, upload-time = "2025-08-14T17:22:22.579Z" }, - { url = "https://files.pythonhosted.org/packages/f1/05/bd23d3c2a3254640f5205e71b41dbd8279b6dcb211d3a0834edc9e560ef5/psqlpy-0.11.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b01cc82cf9af3b5237d8059cdfcf41fce656d300f768e948c20358ce94774862", size = 4503850, upload-time = "2025-08-14T17:22:24.408Z" }, - { url = "https://files.pythonhosted.org/packages/94/7f/bcce7a63bc0fca8a43bf81abae97c6cc262c582b4a420252e1eb51c9fb41/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d2eb9dcb0a7f60d045bbe8061b8318b7718a4093eac9cd65fd3c00ebf1544b4", size = 5041335, upload-time = "2025-08-14T17:22:25.843Z" }, - { url = "https://files.pythonhosted.org/packages/dc/09/a385d371f55fb49bc52e0f25c6322b8d49960a080dae43a02f500183e3de/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e14a5539520854e1fd163016a325b39ddbd1edf621492d45383888667847575a", size = 4291607, upload-time = "2025-08-14T17:22:27.342Z" }, - { url = "https://files.pythonhosted.org/packages/5d/2c/86d84124670a1e8900be9f22e662ceda30b110d79229a1255871f1d8f402/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e706d106ca3ce4e67155a229be6e58cf0dcab354be41b2f5a6faf384cc1b049", size = 4921673, upload-time = "2025-08-14T17:22:28.805Z" }, - { url = "https://files.pythonhosted.org/packages/f7/02/89914a07c683c61d128cc341b598e7b3352a6c1e94720033bec88e019dae/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60d4afcf550f8bd2a3ad131b95ad1203149109bc40d07351c4619f79829ed411", size = 5043783, upload-time = "2025-08-14T17:22:30.322Z" }, - { url = "https://files.pythonhosted.org/packages/45/c0/67d3c8c3b5ee9fa562ac48792c84b3a0c8460e13424eb80339aa19dc5ff1/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:809b2cac178d4a707c025963ad93dc5ceef05772f3ee81a1fba664c639eeabcf", size = 4684899, upload-time = "2025-08-14T17:22:32.054Z" }, - { url = "https://files.pythonhosted.org/packages/7c/81/6be3237e807799af8c82634d1c1888c8b4206f4316426b0dbe836dfa2016/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25f2a6551b8b4ae61e5993705ed8a53d3d55f02036b473379e2c3a8208cbcfc", size = 4933885, upload-time = "2025-08-14T17:22:33.627Z" }, - { url = "https://files.pythonhosted.org/packages/a5/2d/fcc6d54d78635b559ed651ee31192c695549deef40e3f9e2a3e5a99198da/psqlpy-0.11.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a94ed32504d9193a8fa77fd98e9abb7f05d757f75af8e716e67f9a7876449d06", size = 4954441, upload-time = "2025-08-14T17:22:35.12Z" }, - { url = "https://files.pythonhosted.org/packages/04/dc/fb1a8d1f46d35d104a78799ef28ed0e501a92d728a0dc8840bc1cdf550d5/psqlpy-0.11.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e4038d7ccc6aef8d72f64d9c1a69be0174b7f014a01b7a4193b2b9ddd5d3606e", size = 5073296, upload-time = "2025-08-14T17:22:36.899Z" }, - { url = "https://files.pythonhosted.org/packages/73/a1/79aa0ab1765ae842438e4d95cfa5aba092a559a9df53730702a8a4b4729f/psqlpy-0.11.6-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:732d6a85c1a69542eebd26fb5b3062eb496cb8d4577d814b6ec523a375b9c0e1", size = 4304262, upload-time = "2025-08-14T17:22:38.463Z" }, - { url = "https://files.pythonhosted.org/packages/4d/c6/b265bd820029867d2ae0610b26c220bf67227be2f5748ba769c59d669556/psqlpy-0.11.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:08a29fd10449d4784c8d90a34265a9f642eec7ede5a04eba3fc714ce13e3531d", size = 4503291, upload-time = "2025-08-14T17:22:39.896Z" }, - { url = "https://files.pythonhosted.org/packages/d0/8a/6f0ca1741f7760183586a5b7dbb2026cb0e39aa53ec6d5959abe965b9d93/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a43c734f27d1dfcc68d45a4462b6274d085f6cafdd7d0699bc10306f956dee", size = 5041363, upload-time = "2025-08-14T17:22:41.35Z" }, - { url = "https://files.pythonhosted.org/packages/51/eb/2494e4369c09e7a9f4db2181eebbcf6abdd31cc56b5c140036b4cd927296/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbcf90d8f3910a1d5039367787bd5a5e354bcba5e2d061c0273c476b68ea8834", size = 4293301, upload-time = "2025-08-14T17:22:43.092Z" }, - { url = "https://files.pythonhosted.org/packages/a5/42/1d2c9256bcfc9ecd190d4deb7a89eb19acd85b66aba205b829ad5a19efda/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:463cfdf4a0c84f8d9a6a024a47d1a963db4321cfdca5e9f0c943c91264a64eeb", size = 4922220, upload-time = "2025-08-14T17:22:44.537Z" }, - { url = "https://files.pythonhosted.org/packages/03/75/8385f59670aaeeb2acd091978505605a12bede7a3de6c66f514bcd22b8c3/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f601e96a5d11c81dab2c0de3451203a7c5d148f313fc9774dfd258d4683ea78c", size = 5043188, upload-time = "2025-08-14T17:22:46.103Z" }, - { url = "https://files.pythonhosted.org/packages/6d/d5/383b4d9ff35af7dab12532d4b0bb9c2101ad0bf8c817d3016f6a39e55fb1/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782b0eed2587e91a46dd54300273bbfc55a5d358fd0d9d28311647e2e8412e03", size = 4703050, upload-time = "2025-08-14T17:22:47.852Z" }, - { url = "https://files.pythonhosted.org/packages/2e/52/a13570a371a9f268ded0ee6c2534ea3655878bccea905c1149a351f1ab6b/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c63c9a41ec6a6abde2c7b9dcb271c08896259911e4268124bfdd632f2c7435", size = 4933316, upload-time = "2025-08-14T17:22:49.418Z" }, - { url = "https://files.pythonhosted.org/packages/11/c4/3c51b817ebc8b8344cc27d68c3d2fec00e0291ce9350acd2a5e16144706c/psqlpy-0.11.6-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:33768b71afde498211f271f5ea0c430ed8ddfdc7ee5f541de4761222d8633c3f", size = 4956086, upload-time = "2025-08-14T17:22:51.008Z" }, - { url = "https://files.pythonhosted.org/packages/b9/bc/2f6e5ba5ca7890c56bfe59b95536557f15ddc17171234edda342e53415c7/psqlpy-0.11.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:820fbeac25919fb463de3a50594ee12cd434fa539e483adfda87404dfa602b73", size = 5073593, upload-time = "2025-08-14T17:22:52.906Z" }, - { url = "https://files.pythonhosted.org/packages/da/28/de38383c0c01643515df8a067db96b1be03a9f08d273865fa089b61c9a27/psqlpy-0.11.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:713ab646c6eb4a79bb012fceddc8261d8945f973a4e569ed66bd75efea538145", size = 4303717, upload-time = "2025-08-14T17:22:54.818Z" }, - { url = "https://files.pythonhosted.org/packages/66/01/82f4d27a6b34b1e451a584d42f7a3cc59cdafa8fa4e4a19fe5ff06d46cd0/psqlpy-0.11.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c5edca9a74a5321049fd26317941fe997a7f860ee5c18fa1210ac57849b1fa50", size = 4502218, upload-time = "2025-08-14T17:22:56.401Z" }, - { url = "https://files.pythonhosted.org/packages/9a/38/1fee83a44052ce338838839574fc6247b73809093d8ec1045e01943a8898/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3751b11b9852204d9d7f9a4a0013885e50b1efe9c519705ea3d48681c517dd9a", size = 5042449, upload-time = "2025-08-14T17:22:58.149Z" }, - { url = "https://files.pythonhosted.org/packages/30/92/2d7822567f94221205057ba6c31b5e300dc092a2774574b14e340185ce3a/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a218444193d30305088fb10b5f914454213d30ce72ec1fbc2d704955611a5d6f", size = 4294799, upload-time = "2025-08-14T17:22:59.711Z" }, - { url = "https://files.pythonhosted.org/packages/26/41/8f62d6fee34d829000e9c7b6ded565af6e477bf8acc5d86c05be85cc0db3/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:726ff110b1165ad500f8f815710d6c74e2f80f96f8e6837e01fe0a0e5d70584a", size = 4921904, upload-time = "2025-08-14T17:23:01.226Z" }, - { url = "https://files.pythonhosted.org/packages/cd/f7/7e0250d752b3a9e80546ab80083c3c1fb783cda41af0a8ab4292a795e455/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f84052981060673020c9b0fe0531df067df079350b9155a600f310915f738b27", size = 5044055, upload-time = "2025-08-14T17:23:02.871Z" }, - { url = "https://files.pythonhosted.org/packages/af/b6/a73c0627d2b522929762fa471df25da15b174a61930fca6bf3ee9e46e3e8/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3607ba741ada381bb672618c0fe2cc8e0c4bc559cdd444d17c9079947ad94fa8", size = 4684160, upload-time = "2025-08-14T17:23:04.702Z" }, - { url = "https://files.pythonhosted.org/packages/ad/d7/d76c863875b0e7385879a0502d2f67f848bc5f94da9634633966d467f0cb/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0ee9c153fb7500da1d717faecd28d99994cb8419dbe0be4b89e8821b4e9291", size = 4934082, upload-time = "2025-08-14T17:23:06.524Z" }, - { url = "https://files.pythonhosted.org/packages/23/40/613fe3a2139e131e84d138ff9e05ff6194790a8fe402bc0e801b3abb8b42/psqlpy-0.11.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:f06cd4e031364c9faa41be8481bcce2876eb7ba1b1b769aa0edb05af17af3b1b", size = 4954714, upload-time = "2025-08-14T17:23:08.176Z" }, - { url = "https://files.pythonhosted.org/packages/4d/04/3abf261a6ad16c92c1b14ae0858ae349c3836820e8aae0d507094fa127df/psqlpy-0.11.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d4a128962d01a82a9e0447e4626916d2f1ae9722d0c7898009d8c2a6e0d0ea2b", size = 5073280, upload-time = "2025-08-14T17:23:09.706Z" }, -] - [[package]] name = "psycopg" version = "3.2.9" @@ -825,6 +736,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, ] +[[package]] +name = "pytest-anyio" +version = "0.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/44/a02e5877a671b0940f21a7a0d9704c22097b123ed5cdbcca9cab39f17acc/pytest-anyio-0.0.0.tar.gz", hash = "sha256:b41234e9e9ad7ea1dbfefcc1d6891b23d5ef7c9f07ccf804c13a9cc338571fd3", size = 1560, upload-time = "2021-06-29T22:57:30.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/25/bd6493ae85d0a281b6a0f248d0fdb1d9aa2b31f18bcd4a8800cf397d8209/pytest_anyio-0.0.0-py2.py3-none-any.whl", hash = "sha256:dc8b5c4741cb16ff90be37fddd585ca943ed12bbeb563de7ace6cd94441d8746", size = 1999, upload-time = "2021-06-29T22:57:29.158Z" }, +] + [[package]] name = "pytest-cov" version = "6.2.1" @@ -1079,57 +1003,69 @@ name = "taskiq-sqlalchemy" version = "0.0.1" source = { editable = "." } dependencies = [ + { name = "anyio" }, { name = "sqlalchemy" }, { name = "taskiq" }, ] [package.optional-dependencies] all = [ + { name = "aiosqlite" }, { name = "asyncpg" }, - { name = "psqlpy" }, { name = "psycopg", extra = ["binary", "pool"] }, ] postgresql = [ { name = "asyncpg" }, - { name = "psqlpy" }, { name = "psycopg", extra = ["binary", "pool"] }, ] +sqlite = [ + { name = "aiosqlite" }, +] [package.dev-dependencies] dev = [ - { name = "anyio" }, + { name = "aiosqlite" }, + { name = "asyncpg" }, { name = "asyncpg-stubs" }, { name = "pre-commit" }, + { name = "psycopg" }, { name = "pytest" }, + { name = "pytest-anyio" }, { name = "pytest-cov" }, { name = "pytest-env" }, { name = "pytest-timeout" }, { name = "pytest-xdist" }, { name = "ruff" }, + { name = "ty" }, ] [package.metadata] requires-dist = [ - { name = "asyncpg", marker = "extra == 'postgresql'", specifier = ">=0.30.0" }, - { name = "psqlpy", marker = "extra == 'postgresql'", specifier = ">=0.11.3" }, - { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgresql'", specifier = ">=3.2.9" }, + { name = "aiosqlite", marker = "extra == 'sqlite'" }, + { name = "anyio", specifier = ">=4" }, + { name = "asyncpg", marker = "extra == 'postgresql'" }, + { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgresql'" }, { name = "sqlalchemy", specifier = ">=2" }, { name = "taskiq", specifier = ">=0.11.7" }, - { name = "taskiq-sqlalchemy", extras = ["postgresql"], marker = "extra == 'all'" }, + { name = "taskiq-sqlalchemy", extras = ["postgresql", "sqlite"], marker = "extra == 'all'" }, ] -provides-extras = ["all", "postgresql"] +provides-extras = ["all", "postgresql", "sqlite"] [package.metadata.requires-dev] dev = [ - { name = "anyio" }, + { name = "aiosqlite" }, + { name = "asyncpg" }, { name = "asyncpg-stubs" }, { name = "pre-commit" }, + { name = "psycopg" }, { name = "pytest" }, + { name = "pytest-anyio" }, { name = "pytest-cov" }, { name = "pytest-env" }, { name = "pytest-timeout" }, { name = "pytest-xdist" }, { name = "ruff" }, + { name = "ty" }, ] [[package]] @@ -1171,6 +1107,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, ] +[[package]] +name = "ty" +version = "0.0.26" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/94/4879b81f8681117ccaf31544579304f6dc2ddcc0c67f872afb35869643a2/ty-0.0.26.tar.gz", hash = "sha256:0496b62405d62de7b954d6d677dc1cc5d3046197215d7a0a7fef37745d7b6d29", size = 5393643, upload-time = "2026-03-26T16:27:11.067Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/24/99fe33ecd7e16d23c53b0d4244778c6d1b6eb1663b091236dcba22882d67/ty-0.0.26-py3-none-linux_armv6l.whl", hash = "sha256:35beaa56cf59725fd59ab35d8445bbd40b97fe76db39b052b1fcb31f9bf8adf7", size = 10521856, upload-time = "2026-03-26T16:27:06.335Z" }, + { url = "https://files.pythonhosted.org/packages/55/97/1b5e939e2ff69b9bb279ab680bfa8f677d886309a1ac8d9588fd6ce58146/ty-0.0.26-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:487a0be58ab0eb02e31ba71eb6953812a0f88e50633469b0c0ce3fb795fe0fa1", size = 10320958, upload-time = "2026-03-26T16:27:13.849Z" }, + { url = "https://files.pythonhosted.org/packages/71/25/37081461e13d38a190e5646948d7bc42084f7bd1c6b44f12550be3923e7e/ty-0.0.26-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a01b7de5693379646d423b68f119719a1338a20017ba48a93eefaff1ee56f97b", size = 9799905, upload-time = "2026-03-26T16:26:55.805Z" }, + { url = "https://files.pythonhosted.org/packages/a1/1c/295d8f55a7b0e037dfc3a5ec4bdda3ab3cbca6f492f725bf269f96a4d841/ty-0.0.26-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:628c3ee869d113dd2bd249925662fd39d9d0305a6cb38f640ddaa7436b74a1ef", size = 10317507, upload-time = "2026-03-26T16:27:31.887Z" }, + { url = "https://files.pythonhosted.org/packages/1d/62/48b3875c5d2f48fe017468d4bbdde1164c76a8184374f1d5e6162cf7d9b8/ty-0.0.26-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:63d04f35f5370cbc91c0b9675dc83e0c53678125a7b629c9c95769e86f123e65", size = 10319821, upload-time = "2026-03-26T16:27:29.647Z" }, + { url = "https://files.pythonhosted.org/packages/ff/28/cfb2d495046d5bf42d532325cea7412fa1189912d549dbfae417a24fd794/ty-0.0.26-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a53c4e6f6a91927f8b90e584a4b12bcde05b0c1870ddff8d17462168ad7947a", size = 10831757, upload-time = "2026-03-26T16:27:37.441Z" }, + { url = "https://files.pythonhosted.org/packages/26/bf/dbc3e42f448a2d862651de070b4108028c543ca18cab096b38d7de449915/ty-0.0.26-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:caf2ced0e58d898d5e3ba5cb843e0ebd377c8a461464748586049afbd9321f51", size = 11369556, upload-time = "2026-03-26T16:26:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/92/4c/6d2f8f34bc6d502ab778c9345a4a936a72ae113de11329c1764bb1f204f6/ty-0.0.26-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:384807bbcb7d7ce9b97ee5aaa6417a8ae03ccfb426c52b08018ca62cf60f5430", size = 11085679, upload-time = "2026-03-26T16:27:21.746Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f4/f3f61c203bc980dd9bba0ba7ed3c6e81ddfd36b286330f9487c2c7d041aa/ty-0.0.26-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2c766a94d79b4f82995d41229702caf2d76e5c440ec7e543d05c70e98bf8ab", size = 10900581, upload-time = "2026-03-26T16:27:24.39Z" }, + { url = "https://files.pythonhosted.org/packages/3d/fd/3ca1b4e4bdd129829e9ce78677e0f8e0f1038a7702dccecfa52f037c6046/ty-0.0.26-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f41ac45a0f8e3e8e181508d863a0a62156341db0f624ffd004b97ee550a9de80", size = 10294401, upload-time = "2026-03-26T16:27:03.999Z" }, + { url = "https://files.pythonhosted.org/packages/de/20/4ee3d8c3f90e008843795c765cb8bb245f188c23e5e5cc612c7697406fba/ty-0.0.26-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:73eb8327a34d529438dfe4db46796946c4e825167cbee434dc148569892e435f", size = 10351469, upload-time = "2026-03-26T16:27:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b1/9fb154ade65906d4148f0b999c4a8257c2a34253cb72e15d84c1f04a064e/ty-0.0.26-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4bb53a79259516535a1b55f613ba1619e9c666854946474ca8418c35a5c4fd60", size = 10529488, upload-time = "2026-03-26T16:27:01.378Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b02b03b1862e27b64143db65946d68b138160a5b6bfea193bee0b8bbc34/ty-0.0.26-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2f0e75edc1aeb1b4b84af516c7891f631254a4ca3dcd15e848fa1e061e1fe9da", size = 10999015, upload-time = "2026-03-26T16:27:34.636Z" }, + { url = "https://files.pythonhosted.org/packages/21/16/0a56b8667296e2989b9d48095472d98ebf57a0006c71f2a101bbc62a142d/ty-0.0.26-py3-none-win32.whl", hash = "sha256:943c998c5523ed6b519c899c0c39b26b4c751a9759e460fb964765a44cde226f", size = 9912378, upload-time = "2026-03-26T16:27:08.999Z" }, + { url = "https://files.pythonhosted.org/packages/60/c2/fef0d4bba9cd89a82d725b3b1a66efb1b36629ecf0fb1d8e916cb75b8829/ty-0.0.26-py3-none-win_amd64.whl", hash = "sha256:19c856d343efeb1ecad8ee220848f5d2c424daf7b2feda357763ad3036e2172f", size = 10863737, upload-time = "2026-03-26T16:27:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/4d/05/888ebcb3c4d3b6b72d5d3241fddd299142caa3c516e6d26a9cd887dfed3b/ty-0.0.26-py3-none-win_arm64.whl", hash = "sha256:2cde58ccffa046db1223dc28f3e7d4f2c7da8267e97cc5cd186af6fe85f1758a", size = 10285408, upload-time = "2026-03-26T16:27:16.432Z" }, +] + [[package]] name = "typing-extensions" version = "4.14.1" From eee2ef429c4f04e55e96881b9e40c7f1aeeb39e5 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sun, 29 Mar 2026 13:56:14 +0530 Subject: [PATCH 06/25] feat: Add sqlalchemy backed result backend and tests --- pyproject.toml | 8 +- taskiq_postgresql/__init__.py | 7 - taskiq_postgresql/abc/__init__.py | 0 taskiq_postgresql/abc/driver.py | 182 -------- taskiq_postgresql/abc/query.py | 258 ----------- taskiq_postgresql/broker.py | 267 ------------ taskiq_postgresql/drivers/__init__.py | 14 - taskiq_postgresql/drivers/_asyncpg.py | 301 ------------- taskiq_postgresql/drivers/_psqlpy.py | 313 -------------- taskiq_postgresql/drivers/_psycopg.py | 353 --------------- taskiq_postgresql/exceptions.py | 10 - taskiq_postgresql/py.typed | 0 taskiq_postgresql/result_backend.py | 206 --------- taskiq_postgresql/scheduler_source.py | 213 --------- taskiq_postgresql/utils/__init__.py | 5 - taskiq_postgresql/utils/get_db_driver.py | 35 -- .../utils/get_db_listen_driver.py | 40 -- taskiq_postgresql/utils/libs_available.py | 16 - taskiq_sqlalchemy/__init__.py | 8 + taskiq_sqlalchemy/manager.py | 55 +++ taskiq_sqlalchemy/models.py | 54 +++ taskiq_sqlalchemy/result_backend.py | 119 ++++++ tests/conftest.py | 115 +---- tests/result_backend/__init__.py | 1 + tests/result_backend/conftest.py | 180 ++++++++ tests/result_backend/test_get_result.py | 125 ++++++ tests/result_backend/test_is_result_ready.py | 78 ++++ tests/result_backend/test_set_result.py | 123 ++++++ tests/test_broker.py | 200 --------- tests/test_result_backend.py | 164 ------- tests/test_scheduler_source.py | 403 ------------------ 31 files changed, 755 insertions(+), 3098 deletions(-) delete mode 100644 taskiq_postgresql/__init__.py delete mode 100644 taskiq_postgresql/abc/__init__.py delete mode 100644 taskiq_postgresql/abc/driver.py delete mode 100644 taskiq_postgresql/abc/query.py delete mode 100644 taskiq_postgresql/broker.py delete mode 100644 taskiq_postgresql/drivers/__init__.py delete mode 100644 taskiq_postgresql/drivers/_asyncpg.py delete mode 100644 taskiq_postgresql/drivers/_psqlpy.py delete mode 100644 taskiq_postgresql/drivers/_psycopg.py delete mode 100644 taskiq_postgresql/exceptions.py delete mode 100644 taskiq_postgresql/py.typed delete mode 100644 taskiq_postgresql/result_backend.py delete mode 100644 taskiq_postgresql/scheduler_source.py delete mode 100644 taskiq_postgresql/utils/__init__.py delete mode 100644 taskiq_postgresql/utils/get_db_driver.py delete mode 100644 taskiq_postgresql/utils/get_db_listen_driver.py delete mode 100644 taskiq_postgresql/utils/libs_available.py create mode 100644 taskiq_sqlalchemy/manager.py create mode 100644 taskiq_sqlalchemy/models.py create mode 100644 taskiq_sqlalchemy/result_backend.py create mode 100644 tests/result_backend/__init__.py create mode 100644 tests/result_backend/conftest.py create mode 100644 tests/result_backend/test_get_result.py create mode 100644 tests/result_backend/test_is_result_ready.py create mode 100644 tests/result_backend/test_set_result.py delete mode 100644 tests/test_broker.py delete mode 100644 tests/test_result_backend.py delete mode 100644 tests/test_scheduler_source.py diff --git a/pyproject.toml b/pyproject.toml index c572f25..87027d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ keywords = [ "async", "postgresql", "asyncpg", - "psycopg3", + "psycopg3", "sqlalchemy", ] @@ -89,6 +89,11 @@ required-environments = [ [tool.uv.pip] universal = true +[tool.pytest.ini_options] +markers = [ + "postgresql: tests that require a running PostgreSQL server (skip if unavailable)", +] + [tool.ruff.lint] select = [ "E", # Error @@ -97,7 +102,6 @@ select = [ "C90", # McCabe complexity "I", # Isort "N", # pep8-naming - "D", # Pydocstyle "ANN", # Pytype annotations "S", # Bandit "B", # Bugbear diff --git a/taskiq_postgresql/__init__.py b/taskiq_postgresql/__init__.py deleted file mode 100644 index b37599f..0000000 --- a/taskiq_postgresql/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from taskiq_postgresql.broker import PostgresqlBroker -from taskiq_postgresql.result_backend import PostgresqlResultBackend - -__all__ = [ - "PostgresqlBroker", - "PostgresqlResultBackend", -] diff --git a/taskiq_postgresql/abc/__init__.py b/taskiq_postgresql/abc/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/taskiq_postgresql/abc/driver.py b/taskiq_postgresql/abc/driver.py deleted file mode 100644 index 69d47c8..0000000 --- a/taskiq_postgresql/abc/driver.py +++ /dev/null @@ -1,182 +0,0 @@ -from abc import ABC, abstractmethod -from datetime import date, datetime -from types import TracebackType -from typing import Any, AsyncIterator, Optional, Sequence, Union - -from typing_extensions import Self - -from taskiq_postgresql.abc.query import ( - Column, - CreatedAtColumn, - CreateIndexQuery, - CreateTableQuery, - DeleteByDateQuery, - DeleteQuery, - DeleteReturningQuery, - InsertOrUpdateQuery, - InsertQuery, - SelectQuery, -) - - -class QueryDriver(ABC): - """Base class for all PostgreSQL backends.""" - - def __init__( - self, - connection_string: str, - table_name: str, - columns: Sequence[Column], - primary_key: Column, - created_at: Optional[Column] = None, - index_columns: Optional[Sequence[Column]] = None, - run_migrations: bool = False, - **connection_kwargs: Any, - ) -> None: - """Initialize the backend.""" - self.connection_string = connection_string - self.table_name = table_name - self.columns = columns - self.primary_key = primary_key - self.created_at = created_at or CreatedAtColumn() - self.index_columns = index_columns - self.connection_kwargs = connection_kwargs - - self.create_table_query = CreateTableQuery( - self.table_name, - [self.primary_key, *self.columns, self.created_at], - ) - self.create_index_query = CreateIndexQuery(self.table_name) - self.insert_query = InsertQuery( - self.table_name, - ) - self.delete_query = DeleteQuery(self.table_name) - self.delete_returning_query = DeleteReturningQuery(self.table_name) - self.delete_by_date_query = DeleteByDateQuery(self.table_name) - self.select_query = SelectQuery( - self.table_name, - ) - self.insert_or_update_query = InsertOrUpdateQuery(self.table_name) - self.run_migrations = run_migrations - - @abstractmethod - async def __aenter__(self) -> Self: - """Enter the context manager.""" - return self - - @abstractmethod - async def __aexit__( - self, - exc_type: Optional[type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - """Exit the context manager.""" - - @abstractmethod - async def create_table(self) -> Any: - """Create a table.""" - - @abstractmethod - async def create_index(self) -> Any: - """Create an index.""" - - @abstractmethod - async def insert( - self, - columns: Sequence[Column], - values: Sequence[Any], - returning: Optional[Sequence[Column]], - ) -> Any: - """Insert a row into a table.""" - - @abstractmethod - async def insert_or_update( - self, - columns: Sequence[Column], - values: Sequence[Any], - on_conflict_columns: Sequence[Column], - on_conflict_update_columns: Sequence[Column], - returning: Optional[Sequence[Column]], - ) -> Any: - """Insert or update a row into a table.""" - - @abstractmethod - async def delete(self, column: Column, value: Any) -> Any: - """Delete a row from a table.""" - - @abstractmethod - async def delete_returning( - self, - where_column: Column, - value: Any, - returning: Sequence[Column], - ) -> Optional[dict[str, Any]]: - """Atomically delete a row and return requested columns. - - Returns a dictionary mapping column names to values for the deleted row, - or None if no row matched (e.g., already claimed by another worker). - """ - - @abstractmethod - async def delete_by_date( - self, - from_date: Union[datetime, date], - to_date: Optional[Union[datetime, date]] = None, - ) -> None: - """Delete results by date.""" - - @abstractmethod - async def select( - self, - columns: Sequence[Column], - where_columns: Optional[Sequence[Column]] = None, - where_values: Optional[Sequence[Any]] = None, - ) -> list[dict[str, Any]]: - """Select a row from a table.""" - - @abstractmethod - async def exists(self, id: Any) -> bool: - """Check if a row exists in a table.""" - - @abstractmethod - async def on_startup(self) -> None: - """On startup.""" - async with self: - await self.create_table() - await self.create_index() - - @abstractmethod - async def on_shutdown(self) -> None: - """On shutdown.""" - - @abstractmethod - async def execute(self, query: str, *values: Any) -> Any: - """Execute a query.""" - - -class ListenDriver(ABC): - """Base class for all PostgreSQL broker drivers.""" - - def __init__( - self, - connection_string: str, - channel_name: str, - **connection_kwargs: Any, - ) -> None: - """Initialize the broker driver.""" - self.channel_name = channel_name - self.connection_string = connection_string - self.connection_kwargs = connection_kwargs - - @abstractmethod - async def on_startup(self) -> None: - """On startup.""" - - @abstractmethod - async def on_shutdown(self) -> None: - """On shutdown.""" - - @abstractmethod - async def __aiter__(self) -> AsyncIterator[bytes]: - """Iterate over the queue.""" diff --git a/taskiq_postgresql/abc/query.py b/taskiq_postgresql/abc/query.py deleted file mode 100644 index d5ea680..0000000 --- a/taskiq_postgresql/abc/query.py +++ /dev/null @@ -1,258 +0,0 @@ -from abc import ABC, abstractmethod -from typing import Any, Literal, Optional, Sequence - - -class QueryBase(ABC): - """Base class for all queries.""" - - def __init__(self, table_name: str) -> None: - """Initialize the query.""" - self.table_name = table_name - - @abstractmethod - def make_query(self, *values: Any) -> str: - """Return the query as a string.""" - - -class Column(QueryBase): - """Base class for all columns.""" - - def __init__( - self, - name: str, - type_: str, - nullable: bool = False, - default: Any = None, - primary_key: bool = False, - ) -> None: - """Initialize the column.""" - self.name = name - self.type = type_ - self.nullable = nullable - self.default = default - self.primary_key = primary_key - - def make_query(self) -> str: - """Return the column definition as a SQL string.""" - parts = [self.name, self.type] - - if not self.nullable: - parts.append("NOT NULL") - - if self.default is not None: - parts.append(f"DEFAULT {self.default}") - - if self.primary_key: - parts.append("PRIMARY KEY") - - return " ".join(parts) - - -class CreateTableQuery(QueryBase): - """Query to create a table.""" - - def __init__(self, table_name: str, columns: Sequence[Column]) -> None: - """Initialize the query.""" - super().__init__(table_name) - self.columns = columns - - def make_query(self) -> str: - """Return the query as a string.""" - return ( - f"CREATE TABLE IF NOT EXISTS {self.table_name} " - f"({', '.join(column.make_query() for column in self.columns)})" - ) - - -class CreateIndexQuery(QueryBase): - """Query to create an index.""" - - def __init__(self, table_name: str) -> None: - """Initialize the query.""" - super().__init__(table_name) - - def make_query(self, columns: Sequence[Column]) -> str: - """Return the query as a string.""" - return "\n".join( - "CREATE INDEX IF NOT EXISTS " - f"{self.table_name}_{column.name}_idx " - f"ON {self.table_name} USING HASH ({column.name});" - for column in columns - ) - - -class InsertQuery(QueryBase): - """Query to insert a row into a table.""" - - def __init__( - self, - table_name: str, - ) -> None: - """Initialize the query.""" - super().__init__(table_name) - - def make_query( - self, - columns: Sequence[Column], - returning: Optional[Sequence[Column]] = None, - ) -> str: - """Return the query as a string.""" - return ( - f"INSERT INTO {self.table_name} " # noqa: S608 - f"({', '.join(column.name for column in columns)}) " - f"VALUES ({', '.join(f'${i}' for i in range(1, len(columns) + 1))})" - + ( - f" RETURNING {', '.join(column.name for column in returning)}" - if returning is not None - else "" - ) - ) - - -class InsertOrUpdateQuery(InsertQuery): - """Query to insert or update a row into a table.""" - - def __init__(self, table_name: str) -> None: - """Initialize the query.""" - super().__init__(table_name) - - def make_query( - self, - columns: Sequence[Column], - returning: Sequence[Column], - on_conflict_columns: Sequence[Column], - on_conflict_action: Literal["UPDATE", "NOTHING"] = "UPDATE", - on_conflict_update_columns: Optional[Sequence[Column]] = None, - ) -> str: - """Return the query as a string.""" - insert_query = super().make_query(columns) - returning_query = ( - f" RETURNING {', '.join(column.name for column in returning)}" - if returning is not None - else "" - ) - if on_conflict_action == "UPDATE": - if on_conflict_update_columns is None: - raise ValueError( - "on_conflict_update_columns is required when " - "on_conflict_action is UPDATE", - ) - - set_query = ", ".join( - f"{column.name} = EXCLUDED.{column.name}" - for column in on_conflict_update_columns - ) - conflict_query = ", ".join(column.name for column in on_conflict_columns) - update_query = f"ON CONFLICT ({conflict_query}) DO UPDATE SET {set_query}" - return f"{insert_query} {update_query} {returning_query}" - - return ( - f"{insert_query} ON CONFLICT ({', '.join(on_conflict_columns)}) DO NOTHING" - + returning_query - ) - - -class DeleteQuery(QueryBase): - """Query to delete a row from a table.""" - - def __init__(self, table_name: str) -> None: - """Initialize the query.""" - super().__init__(table_name) - - def make_query(self, column: Column) -> str: - """Return the query as a string.""" - return f"DELETE FROM {self.table_name} WHERE {column.name} = $1" # noqa: S608 - - -class DeleteReturningQuery(QueryBase): - """Query to delete a row from a table and return specified columns.""" - - def __init__(self, table_name: str) -> None: - """Initialize the query.""" - super().__init__(table_name) - - def make_query(self, where_column: Column, returning: Sequence[Column]) -> str: - """Return the query as a string.""" - return ( - f"DELETE FROM {self.table_name} " # noqa: S608 - f"WHERE {where_column.name} = $1 " - f"RETURNING {', '.join(column.name for column in returning)}" - ) - - -class DeleteByDateQuery(QueryBase): - """Query to delete a row from a table by date.""" - - def __init__(self, table_name: str) -> None: - """Initialize the query.""" - super().__init__(table_name) - - def make_query(self, column: Column) -> str: - """Return the query as a string.""" - return f"DELETE FROM {self.table_name} WHERE {column.name} BETWEEN $1 AND $2" # noqa: S608 - - -class SelectQuery(QueryBase): - """Query to select a row from a table.""" - - def __init__( - self, - table_name: str, - ) -> None: - """Initialize the query.""" - super().__init__(table_name) - - def make_query( - self, - columns: Sequence[Column], - where_columns: Optional[Sequence[Column]] = None, - ) -> str: - """Return the query as a string.""" - return ( - f"SELECT {', '.join(column.name for column in columns)} " # noqa: S608 - f"FROM {self.table_name} " - + ( - f"WHERE {' AND '.join(f'{column.name} = ${i}' for i, column in enumerate(where_columns, start=1))}" # noqa: E501 - if where_columns is not None - else "" - ) - ) - - -class CreatedAtColumn(Column): - """Column for the created at timestamp.""" - - def __init__(self) -> None: - """Initialize the column.""" - super().__init__( - "created_at", - "TIMESTAMP WITH TIME ZONE", - nullable=False, - default="NOW()", - ) - - -class UpdatedAtColumn(Column): - """Column for the updated at timestamp.""" - - def __init__(self) -> None: - """Initialize the column.""" - super().__init__( - "updated_at", - "TIMESTAMP WITH TIME ZONE", - nullable=False, - default="NOW()", - ) - - -class PrimaryKeyColumn(Column): - """Column for the primary key.""" - - def __init__( - self, - name: str = "id", - type_: str = "UUID", - default: Any = None, - ) -> None: - """Initialize the column.""" - super().__init__(name, type_, primary_key=True, default=default) diff --git a/taskiq_postgresql/broker.py b/taskiq_postgresql/broker.py deleted file mode 100644 index f69bbe2..0000000 --- a/taskiq_postgresql/broker.py +++ /dev/null @@ -1,267 +0,0 @@ -from __future__ import annotations - -from asyncio import Queue as AsyncQueue -from asyncio import Task, get_running_loop -from dataclasses import dataclass -from logging import getLogger -from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, TypeVar, Union - -from taskiq import AckableMessage, AsyncBroker, AsyncResultBackend, BrokerMessage - -from taskiq_postgresql.abc.driver import ListenDriver -from taskiq_postgresql.abc.query import Column, CreatedAtColumn, PrimaryKeyColumn -from taskiq_postgresql.utils import get_db_driver, get_db_listen_driver - -if TYPE_CHECKING: - from collections.abc import AsyncGenerator - - -_T = TypeVar("_T") - -logger = getLogger("taskiq.asyncpg_broker") - - -@dataclass -class Table: - """Columns for the result backend.""" - - primary_key = PrimaryKeyColumn( - name="id", - type_="SERIAL", - ) - task_id: Column - task_name = Column( - name="task_name", - type_="VARCHAR", - ) - message = Column( - name="message", - type_="BYTEA", - ) - labels = Column( - name="labels", - type_="JSONB", - ) - created_at = CreatedAtColumn() - - -class PostgresqlBroker(AsyncBroker): - """Broker that uses PostgreSQL and asyncpg with LISTEN/NOTIFY.""" - - def __init__( - self, - dsn: Union[ - str, - Callable[ - [], - str, - ], - ] = "postgresql://postgres:postgres@localhost:5432/postgres", - result_backend: AsyncResultBackend[_T] | None = None, - task_id_generator: Callable[[], str] | None = None, - field_for_task_id: Literal["VarChar", "Text", "Uuid"] = "Uuid", - channel_name: str = "taskiq", - table_name: str = "taskiq_messages", - driver: Literal["asyncpg", "psqlpy", "psycopg", "pg8000"] = "asyncpg", - max_retry_attempts: int = 5, - connection_kwargs: dict[str, Any] | None = None, - pool_kwargs: dict[str, Any] | None = None, - run_migrations: bool = False, - ) -> None: - """ - Construct a new broker. - - Args: - dsn (Union[str, Callable[[], str]], optional): \ - connection string to PostgreSQL, or callable returning one. - result_backend (AsyncResultBackend[_T] | None, optional): \ - Custom result backend. - task_id_generator (Callable[[], str] | None, optional): \ - Custom task_id generator. - field_for_task_id (Literal["VarChar", "Text", "Uuid"], optional): \ - Field for task_id. Defaults to "Uuid". - channel_name (str, optional): \ - Name of the channel to listen on. - table_name (str, optional): \ - Name of the table to store messages. - driver (Literal["asyncpg"], optional): \ - Driver to use. Defaults to "asyncpg". - max_retry_attempts (int, optional): \ - Maximum number of message processing attempts. - connection_kwargs (dict[str, Any] | None, optional): \ - Additional arguments for asyncpg connection. - pool_kwargs (dict[str, Any] | None, optional): \ - Additional arguments for asyncpg pool creation. - """ - super().__init__( - result_backend=result_backend, - task_id_generator=task_id_generator, - ) - self._dsn: str | Callable[[], str] = dsn - self.channel_name: str = channel_name - self.table_name: str = table_name - self.connection_kwargs: dict[str, Any] = ( - connection_kwargs if connection_kwargs else {} - ) - self.pool_kwargs: dict[str, Any] = pool_kwargs if pool_kwargs else {} - self.max_retry_attempts: int = max_retry_attempts - self._queue: AsyncQueue[str] | None = None - - self.columns = Table(task_id=Column(name="task_id", type_=field_for_task_id)) - - self.driver = get_db_driver(driver)( - connection_string=self.dsn, - table_name=self.table_name, - columns=[ - self.columns.task_id, - self.columns.task_name, - self.columns.message, - self.columns.labels, - ], - primary_key=self.columns.primary_key, - created_at=self.columns.created_at, - index_columns=[self.columns.primary_key], - run_migrations=run_migrations, - **self.connection_kwargs, - ) - self.listen_driver: ListenDriver = get_db_listen_driver(driver)( - connection_string=self.dsn, - channel_name=self.channel_name, - **self.connection_kwargs, - ) - - @property - def dsn(self) -> str: - """ - Get the DSN string. - - Returns the DSN string or None if not set. - """ - if callable(self._dsn): - return self._dsn() - return self._dsn - - async def startup(self) -> None: - """Initialize the broker.""" - await super().startup() - - await self.driver.on_startup() - await self.listen_driver.on_startup() - - async def shutdown(self) -> None: - """Close all connections on shutdown.""" - await super().shutdown() - await self.driver.on_shutdown() - await self.listen_driver.on_shutdown() - - async def kick(self, message: BrokerMessage) -> None: - """ - Send message to the channel. - - Inserts the message into the database and sends a NOTIFY. - - :param message: Message to send. - """ - message_inserted_id = await self.driver.insert( - [ - self.columns.task_id, - self.columns.task_name, - self.columns.message, - self.columns.labels, - ], - [ - message.task_id, - message.task_name, - message.message, - message.labels, - ], - [ - self.columns.primary_key, - ], - ) - - delay_value = message.labels.get("delay") - if delay_value is not None: - delay_seconds = int(delay_value) - return await self._schedule_notification(message_inserted_id, delay_seconds) - - return await self._send_notification(message_inserted_id) - - async def _send_notification(self, message_id: int) -> None: - """Send a notification with the message ID as payload.""" - try: - await self.driver.execute(f"NOTIFY {self.channel_name}, '{message_id}'") - except Exception as error: - logger.exception("Error sending notification: %s", error) - raise - - async def _schedule_notification(self, message_id: int, delay_seconds: int) -> None: - """Schedules the next task based on the schedule object.""" - loop = get_running_loop() - - loop_now = loop.time() - when = loop_now + delay_seconds - - loop.call_at( - when, - lambda: Task( - self._send_notification(message_id), - loop=loop, - ), - ) - - async def listen(self) -> AsyncGenerator[AckableMessage, None]: - """ - Listen to the channel. - - Yields messages as they are received. - - This method atomically claims messages using DELETE ... RETURNING, ensuring - that only a single worker processes each message even though NOTIFY is - broadcast to all listeners. - - :yields: AckableMessage instances. - """ - while True: - try: - async for message_id in self.listen_driver: - # Normalize payload to integer ID (psycopg may yield string payloads). - try: - normalized_id = int(message_id) # type: ignore[arg-type] - except (TypeError, ValueError): - logger.warning( - "Invalid NOTIFY payload %r on channel %s", - message_id, - self.channel_name, - ) - continue - - # Atomically claim the message row. If None is returned, another - # worker has already claimed it. - row = await self.driver.delete_returning( - self.columns.primary_key, - normalized_id, - [self.columns.message], - ) - - if row is None: - # Claimed elsewhere or missing; skip. - continue - - message: Optional[bytes] = row.get(self.columns.message.name) - - if message is None: - logger.warning( - "Message with id %s has no payload.", - message_id, - ) - continue - - async def ack(*, _message_id: int = message_id) -> None: # noqa: ARG001 - # No-op: the row was already deleted when claimed. - return None - - yield AckableMessage(data=message, ack=ack) - except Exception as error: - logger.exception("Error processing message: %s", error) - continue diff --git a/taskiq_postgresql/drivers/__init__.py b/taskiq_postgresql/drivers/__init__.py deleted file mode 100644 index 2b1ac9e..0000000 --- a/taskiq_postgresql/drivers/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -from taskiq_postgresql.utils.libs_available import ( - is_asyncpg_available, - is_psqlpy_available, - is_psycopg_available, -) - -if is_asyncpg_available(): - from ._asyncpg import AsyncpgDriver, AsyncpgListenDriver # noqa: F401 - -if is_psqlpy_available(): - from ._psqlpy import PsqlpyDriver, PsqlpyListenDriver # noqa: F401 - -if is_psycopg_available(): - from ._psycopg import PsycopgDriver, PsycopgListenDriver # noqa: F401 diff --git a/taskiq_postgresql/drivers/_asyncpg.py b/taskiq_postgresql/drivers/_asyncpg.py deleted file mode 100644 index 1ca242d..0000000 --- a/taskiq_postgresql/drivers/_asyncpg.py +++ /dev/null @@ -1,301 +0,0 @@ -from asyncio import Queue as AsyncQueue -from contextlib import asynccontextmanager -from datetime import date, datetime -from types import TracebackType -from typing import Any, AsyncIterator, Optional, Sequence, Union -from uuid import UUID - -from asyncpg import Connection, Pool, connect, create_pool -from asyncpg.transaction import Transaction -from taskiq.compat import IS_PYDANTIC2 - -from taskiq_postgresql.abc.driver import ListenDriver, QueryDriver -from taskiq_postgresql.abc.query import Column -from taskiq_postgresql.exceptions import DatabaseConnectionError - -if IS_PYDANTIC2: - from pydantic_core import to_json - - def dumps(value: dict) -> str: - return to_json(value).decode() -else: - from json import dumps as to_json - - def dumps(value: dict) -> str: - return to_json(value) - - -class AsyncpgDriver(QueryDriver): - """Asyncpg backend.""" - - pool: Pool = None - transaction: Transaction = None - - def __init__( - self, - connection_string: str, - table_name: str, - columns: Sequence[Column], - primary_key: Column, - created_at: Optional[Column] = None, - index_columns: Optional[Sequence[Column]] = None, - run_migrations: bool = False, - **connection_kwargs: Any, - ) -> None: - """Initialize the backend.""" - super().__init__( - connection_string, - table_name, - columns, - primary_key, - created_at, - index_columns, - run_migrations, - **connection_kwargs, - ) - - def __parser_query( - self, - columns: Sequence[Column], - values: Sequence[Any], - ) -> list[Any]: - """Parser query. - - Args: - columns (Sequence[Column]): Columns to parse. - values (Sequence[Any]): Values to parse. - - Returns: - list[Any]: Parsed values. - """ - if not values: - return [] - - if not columns: - return values - - new_values = [*values] - - for index, (column, value) in enumerate(zip(columns, values)): - if column.type.upper() == "JSONB" and isinstance(value, dict): - new_values[index] = dumps(value) - - if column.type.upper() == "UUID" and isinstance(value, UUID): - new_values[index] = value.hex - - return new_values - - @asynccontextmanager - async def connection(self) -> AsyncIterator[Connection]: - async with self.pool.acquire() as connection: - yield connection - - async def __aenter__(self) -> Connection: - """Enter the context manager.""" - try: - if self.pool is None: - self.pool = await create_pool( - self.connection_string, - **self.connection_kwargs, - ) - return self - except Exception as error: - raise DatabaseConnectionError(str(error)) from error - - async def __aexit__( - self, - exc_type: Optional[type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - """Exit the context manager.""" - - async def create_table(self) -> str: - """Create a table.""" - async with self, self.connection() as connection: - return await connection.execute(self.create_table_query.make_query()) - - async def create_index(self) -> None: - """Create an index.""" - if self.index_columns is not None: - async with self, self.connection() as connection: - await connection.execute( - self.create_index_query.make_query(self.index_columns), - ) - - async def insert( - self, - columns: Sequence[Column], - values: Sequence[Any], - returning: Optional[Sequence[Column]] = None, - ) -> Any: - """Insert a row into a table.""" - async with self, self.connection() as connection: - return await connection.fetchval( - self.insert_query.make_query(columns, returning), - *self.__parser_query(columns, values), - ) - - async def insert_or_update( - self, - columns: Sequence[Column], - values: Sequence[Any], - on_conflict_columns: Sequence[Column], - on_conflict_update_columns: Sequence[Column], - returning: Optional[Sequence[Column]] = None, - ) -> Any: - """Insert or update a row into a table.""" - async with self, self.connection() as connection: - return ( - await connection.fetchval( - self.insert_or_update_query.make_query( - columns, - returning, - on_conflict_columns, - on_conflict_update_columns=on_conflict_update_columns, - ), - *self.__parser_query(columns, values), - ), - ) - - async def delete(self, column: Column, value: Any) -> str: - """Delete a row from a table.""" - async with self, self.connection() as connection: - return await connection.execute( - self.delete_query.make_query(column), - value, - ) - - async def delete_returning( - self, - where_column: Column, - value: Any, - returning: Sequence[Column], - ) -> Optional[dict[str, Any]]: - """Atomically delete a row and return requested columns.""" - async with self, self.connection() as connection: - row = await connection.fetchrow( - self.delete_returning_query.make_query(where_column, returning), - value, - ) - if row is None: - return None - return {column.name: row[column.name] for column in returning} - - async def select( - self, - columns: Sequence[Column], - where_columns: Optional[Sequence[Column]] = None, - where_values: Optional[Sequence[Any]] = None, - ) -> list[dict[str, Any]]: - """Select a row from a table.""" - async with self, self.connection() as connection: - rows = await connection.fetch( - self.select_query.make_query(columns, where_columns), - *self.__parser_query(where_columns, where_values or ()), - ) - - return [ - {column.name: row[column.name] for column in columns} for row in rows - ] - - async def exists(self, id: Any) -> bool: - """Check if a row exists in a table.""" - async with self, self.connection() as connection: - return await connection.fetchval( - self.select_query.make_query([Column("1", "")], [self.primary_key]), - id, - ) - - async def delete_by_date( - self, - from_date: Union[datetime, date], - to_date: Optional[Union[datetime, date]] = None, - ) -> str: - """Delete a row from a table by date.""" - async with self, self.connection() as connection: - return await connection.execute( - self.delete_by_date_query.make_query(self.created_at), - from_date, - to_date, - ) - - async def on_startup(self) -> None: - """On startup.""" - if self.run_migrations: - async with self, self.connection() as connection: - transaction = connection.transaction() - await transaction.start() - await self.create_table() - await self.create_index() - await transaction.commit() - - async def on_shutdown(self) -> None: - """On shutdown.""" - await self.pool.close() - self.pool = None - - async def execute(self, query: str, *values: Any) -> str: - """Execute a query.""" - async with self, self.connection() as connection: - return await connection.fetch(query, *values) - - -class AsyncpgListenDriver(ListenDriver): - """Asyncpg listen driver.""" - - def __init__( - self, - connection_string: str, - channel_name: str, - **connection_kwargs: Any, - ) -> None: - """Initialize the listen driver.""" - super().__init__(connection_string, channel_name, **connection_kwargs) - self._queue: AsyncQueue[int] = AsyncQueue() - - async def on_startup(self) -> None: - """On startup.""" - self.connection = await connect( - self.connection_string, - **self.connection_kwargs, - ) - await self.connection.add_listener( - self.channel_name, - self._notification_handler, - ) - - async def on_shutdown(self) -> None: - """On shutdown.""" - await self.connection.remove_listener( - self.channel_name, - self._notification_handler, - ) - await self.connection.close() - - def _notification_handler( - self, - con_ref: object, - pid: int, - channel: str, - payload: object, - /, - ) -> None: - """ - Handle NOTIFY messages. - - From asyncpg.connection.add_listener docstring: - A callable or a coroutine function receiving the following arguments: - **con_ref**: a Connection the callback is registered with; - **pid**: PID of the Postgres server that sent the notification; - **channel**: name of the channel the notification was sent to; - **payload**: the payload. - """ - if self._queue is not None: - self._queue.put_nowait(int(payload)) - - async def __aiter__(self) -> AsyncIterator[Any]: - """Iterate over the queue.""" - while not self.connection.is_closed(): - message_id = await self._queue.get() - yield message_id diff --git a/taskiq_postgresql/drivers/_psqlpy.py b/taskiq_postgresql/drivers/_psqlpy.py deleted file mode 100644 index 79bd236..0000000 --- a/taskiq_postgresql/drivers/_psqlpy.py +++ /dev/null @@ -1,313 +0,0 @@ -import json -from asyncio import Queue as AsyncQueue -from contextlib import asynccontextmanager -from datetime import date, datetime -from types import TracebackType -from typing import Any, AsyncIterator, Final, Optional, Sequence, Union -from uuid import UUID - -from psqlpy import Connection, ConnectionPool, QueryResult -from psqlpy.extra_types import JSONB - -from taskiq_postgresql.abc.driver import ListenDriver, QueryDriver -from taskiq_postgresql.abc.query import Column -from taskiq_postgresql.exceptions import DatabaseConnectionError - - -class PsqlpyDriver(QueryDriver): - """Asyncpg backend.""" - - pool: ConnectionPool = None - - def __init__( - self, - connection_string: str, - table_name: str, - columns: Sequence[Column], - primary_key: Column, - created_at: Optional[Column] = None, - index_columns: Optional[Sequence[Column]] = None, - run_migrations: bool = False, - **connection_kwargs: Any, - ) -> None: - """Initialize the backend.""" - super().__init__( - connection_string, - table_name, - columns, - primary_key, - created_at, - index_columns, - run_migrations, - **connection_kwargs, - ) - - def __parser_query( - self, - columns: Sequence[Column], - values: Sequence[Any], - ) -> list[Any]: - """Parser query.""" - if values is None: - return [] - - new_values = [*values] - - for index, (column, value) in enumerate(zip(columns, values)): - if column.type.upper() == "JSONB": - if isinstance(value, str): - value = json.loads(value) # noqa: PLW2901 - new_values[index] = JSONB(value) - - if column.type.upper() == "UUID": - new_values[index] = UUID(value) - - return new_values - - @asynccontextmanager - async def connection(self) -> AsyncIterator[Connection]: - try: - async with self.pool.acquire() as connection: - yield connection - except Exception as error: - raise DatabaseConnectionError(str(error)) from error - - async def __aenter__(self) -> Connection: - """Enter the context manager.""" - if self.pool is None: - self.pool = ConnectionPool( - self.connection_string, - **self.connection_kwargs, - ) - return self - - async def __aexit__( - self, - exc_type: Optional[type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - """Exit the context manager.""" - - async def create_table(self) -> list: - """Create a table.""" - async with self, self.connection() as connection: - results: Final[QueryResult] = await connection.fetch( - querystring=self.create_table_query.make_query(), - ) - return results.result() - - async def create_index(self) -> None: - """Create an index.""" - if self.index_columns is not None: - async with self, self.connection() as connection: - await connection.execute_batch( - self.create_index_query.make_query(self.index_columns), - ) - - async def insert( - self, - columns: Sequence[Column], - values: Sequence[Any], - returning: Optional[Sequence[Column]] = None, - ) -> Any: - """Insert a row into a table.""" - async with self, self.connection() as connection: - if returning: - return await connection.fetch_val( - self.insert_query.make_query(columns, returning), - self.__parser_query(columns, values), - ) - - await connection.execute( - self.insert_query.make_query(columns, returning), - self.__parser_query(columns, values), - ) - - return None - - async def insert_or_update( - self, - columns: Sequence[Column], - values: Sequence[Any], - on_conflict_columns: Sequence[Column], - on_conflict_update_columns: Sequence[Column], - returning: Optional[Sequence[Column]] = None, - ) -> Any: - """Insert or update a row into a table.""" - async with self, self.connection() as connection: - if returning: - return await connection.fetch_val( - self.insert_or_update_query.make_query( - columns, - returning, - on_conflict_columns, - on_conflict_update_columns=on_conflict_update_columns, - ), - self.__parser_query(columns, values), - ) - - await connection.execute( - self.insert_or_update_query.make_query( - columns, - returning, - on_conflict_columns, - on_conflict_update_columns=on_conflict_update_columns, - ), - self.__parser_query(columns, values), - ) - - return None - - async def delete(self, column: Column, value: Any) -> str: - """Delete a row from a table.""" - async with self, self.connection() as connection: - results = await connection.execute( - self.delete_query.make_query(column), - self.__parser_query([column], [value]), - ) - return results.result(as_tuple=True) - - async def delete_returning( - self, - where_column: Column, - value: Any, - returning: Sequence[Column], - ) -> Optional[dict[str, Any]]: - """Atomically delete a row and return requested columns.""" - async with self, self.connection() as connection: - results = await connection.fetch( - self.delete_returning_query.make_query(where_column, returning), - self.__parser_query([where_column], [value]), - ) - rows = results.result() - if not rows: - return None - row = rows[0] - return {column.name: row[column.name] for column in returning} - - async def select( - self, - columns: Sequence[Column], - where_columns: Optional[Sequence[Column]] = None, - where_values: Optional[Sequence[Any]] = None, - ) -> list[dict[str, Any]]: - """Select a row from a table.""" - parameters = self.__parser_query(where_columns, where_values) - async with self, self.connection() as connection: - rows = await connection.execute( - querystring=self.select_query.make_query(columns, where_columns), - parameters=parameters, - ) - - return rows.result() - - async def exists(self, id: Any) -> bool: - """Check if a row exists in a table.""" - async with self, self.connection() as connection: - results = await connection.fetch( - self.select_query.make_query([Column("1", "")], [self.primary_key]), - self.__parser_query([self.primary_key], [id]), - ) - v = results.result() - return len(v) == 1 - - async def delete_by_date( - self, - from_date: Union[datetime, date], - to_date: Optional[Union[datetime, date]] = None, - ) -> str: - """Delete a row from a table by date.""" - async with self, self.connection() as connection: - results = await connection.execute( - self.delete_by_date_query.make_query(self.created_at), - (from_date, to_date), - ) - return results.result(as_tuple=True) - - async def on_startup(self) -> None: - """On startup.""" - if self.run_migrations: - async with self, self.connection() as connection: - transaction = connection.transaction() - await transaction.begin() - await self.create_table() - await self.create_index() - await transaction.commit() - - async def on_shutdown(self) -> None: - """On shutdown.""" - self.pool.close() - self.pool = None - - async def execute(self, query: str, *values: Any) -> str: - """Execute a query.""" - async with self, self.connection() as connection: - results = await connection.fetch(query, parameters=values) - return results.result() - - -class PsqlpyListenDriver(ListenDriver): - """Asyncpg listen driver.""" - - def __init__( - self, - connection_string: str, - channel_name: str, - **connection_kwargs: Any, - ) -> None: - """Initialize the listen driver.""" - super().__init__(connection_string, channel_name, **connection_kwargs) - self._queue: AsyncQueue[int] = AsyncQueue() - - async def on_startup(self) -> None: - """On startup.""" - self.connection = ConnectionPool( - self.connection_string, - **self.connection_kwargs, - ) - self.listener = self.connection.listener() - - await self.listener.add_callback( - self.channel_name, - self._notification_handler, - ) - - await self.listener.startup() - - self.listener.listen() - - async def on_shutdown(self) -> None: - """On shutdown.""" - await self.listener.clear_channel_callbacks( - self.channel_name, - ) - await self.listener.shutdown() - - self.connection.close() - - async def _notification_handler( - self, - connection: object, - payload: str, - channel: str, - process_id: int, - ) -> None: - """ - Handle NOTIFY messages. - - From asyncpg.connection.add_listener docstring: - A callable or a coroutine function receiving the following arguments: - **con_ref**: a Connection the callback is registered with; - **pid**: PID of the Postgres server that sent the notification; - **channel**: name of the channel the notification was sent to; - **payload**: the payload. - """ - if self._queue is not None: - self._queue.put_nowait(int(payload)) - - async def __aiter__(self) -> AsyncIterator[Any]: - """Iterate over the queue.""" - while True: - message_id = await self._queue.get() - yield message_id diff --git a/taskiq_postgresql/drivers/_psycopg.py b/taskiq_postgresql/drivers/_psycopg.py deleted file mode 100644 index 209cdab..0000000 --- a/taskiq_postgresql/drivers/_psycopg.py +++ /dev/null @@ -1,353 +0,0 @@ -import logging -from asyncio import Queue as AsyncQueue -from contextlib import asynccontextmanager -from datetime import date, datetime -from types import TracebackType -from typing import Any, AsyncIterator, Optional, Sequence, Union -from uuid import UUID - -from psycopg import AsyncConnection, AsyncCursor, AsyncRawCursor, Notify -from psycopg.rows import DictRow, TupleRow, dict_row -from psycopg_pool import AsyncConnectionPool -from taskiq.compat import IS_PYDANTIC2 -from typing_extensions import Self - -from taskiq_postgresql.abc.driver import ListenDriver, QueryDriver -from taskiq_postgresql.abc.query import Column -from taskiq_postgresql.exceptions import DatabaseConnectionError - -if IS_PYDANTIC2: - from pydantic_core import to_json - - def dumps(value: dict) -> str: - return to_json(value).decode() -else: - from json import dumps as to_json - - def dumps(value: dict) -> str: - return to_json(value) - - -logger = logging.getLogger(__name__) - - -class PsycopgDriver(QueryDriver): - """Asyncpg backend.""" - - pool: AsyncConnectionPool = None - - def __init__( - self, - connection_string: str, - table_name: str, - columns: Sequence[Column], - primary_key: Column, - created_at: Optional[Column] = None, - index_columns: Optional[Sequence[Column]] = None, - run_migrations: bool = False, - **connection_kwargs: Any, - ) -> None: - """Initialize the backend.""" - super().__init__( - connection_string, - table_name, - columns, - primary_key, - created_at, - index_columns, - run_migrations, - **connection_kwargs, - ) - - def __parser_params( - self, - columns: Sequence[Column], - values: Sequence[Any], - ) -> list[Any]: - """Parser query. - - Args: - columns (Sequence[Column]): Columns to parse. - values (Sequence[Any]): Values to parse. - - Returns: - list[Any]: Parsed values. - """ - if not values: - return [] - - if not columns: - return values - - new_values = [*values] - - for index, (column, value) in enumerate(zip(columns, values)): - if column.type.upper() == "JSONB" and isinstance(value, dict): - new_values[index] = dumps(value) - - if column.type.upper() == "UUID" and isinstance(value, UUID): - new_values[index] = value.hex - - return new_values - - @asynccontextmanager - async def connection(self) -> AsyncIterator[AsyncConnection[TupleRow]]: - try: - await self.pool.open() - async with self.pool.connection() as connection: - yield connection - - except Exception as error: - raise DatabaseConnectionError(str(error)) from error - - async def __aenter__(self) -> Self: - """Enter the context manager.""" - if self.pool is None: - self.pool = AsyncConnectionPool( - self.connection_string, - kwargs={ - "cursor_factory": AsyncRawCursor, - }, - open=False, - **self.connection_kwargs, - ) - return self - - async def __aexit__( - self, - exc_type: Optional[type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - """Exit the context manager.""" - - async def create_table(self) -> str: - """Create a table.""" - async with self, self.connection() as connection: - cursor: AsyncCursor[TupleRow] = await connection.execute( - self.create_table_query.make_query(), - ) - - return cursor.statusmessage - - async def create_index(self) -> None: - """Create an index.""" - if self.index_columns is not None: - async with self, self.connection() as connection: - await connection.execute( - self.create_index_query.make_query(self.index_columns), - ) - - async def insert( - self, - columns: Sequence[Column], - values: Sequence[Any], - returning: Optional[Sequence[Column]] = None, - ) -> Any: - """Insert a row into a table.""" - async with self, self.connection() as connection: - cursor: AsyncCursor[TupleRow] = await connection.execute( - self.insert_query.make_query(columns, returning), - params=self.__parser_params(columns, values), - ) - - if cursor.rownumber is not None: - value = await cursor.fetchone() - if value is not None: - return next(iter(value)) - return None - - async def insert_or_update( - self, - columns: Sequence[Column], - values: Sequence[Any], - on_conflict_columns: Sequence[Column], - on_conflict_update_columns: Sequence[Column], - returning: Optional[Sequence[Column]] = None, - ) -> Any: - """Insert or update a row into a table.""" - async with self, self.connection() as connection: - cursor: AsyncCursor[TupleRow] = await connection.execute( - self.insert_or_update_query.make_query( - columns, - returning, - on_conflict_columns, - on_conflict_update_columns=on_conflict_update_columns, - ), - params=self.__parser_params(columns, values), - ) - - if cursor.rownumber is not None: - value = await cursor.fetchone() - if value is not None: - return next(iter(value)) - return None - - async def delete(self, column: Column, value: Any) -> str: - """Delete a row from a table.""" - async with self, self.connection() as connection: - cursor: AsyncCursor[TupleRow] = await connection.execute( - self.delete_query.make_query(column), - params=self.__parser_params([column], [value]), - ) - - if cursor.rownumber is not None: - data = await cursor.fetchone() - if data is not None: - return next(iter(value)) - return None - - async def delete_returning( - self, - where_column: Column, - value: Any, - returning: Sequence[Column], - ) -> Optional[dict[str, Any]]: - """Atomically delete a row and return requested columns.""" - async with self, self.connection() as connection: - cursor = connection.cursor(row_factory=dict_row) - cursor = await cursor.execute( - self.delete_returning_query.make_query(where_column, returning), - params=self.__parser_params([where_column], [value]), - ) - if cursor.rownumber is not None: - row = await cursor.fetchone() - if row is not None: - # row is already a dict thanks to dict_row - return {column.name: row[column.name] for column in returning} - return None - - async def select( - self, - columns: Sequence[Column], - where_columns: Optional[Sequence[Column]] = None, - where_values: Optional[Sequence[Any]] = None, - ) -> list[dict[str, Any]]: - """Select a row from a table.""" - async with self, self.connection() as connection: - cursor = connection.cursor(row_factory=dict_row) - cursor: AsyncCursor[DictRow] = await cursor.execute( - self.select_query.make_query(columns, where_columns), - params=self.__parser_params(where_columns, where_values), - ) - - if cursor.rownumber is not None: - return await cursor.fetchall() - return [] - - async def exists(self, id: Any) -> bool: - """Check if a row exists in a table.""" - async with self, self.connection() as connection: - cursor: AsyncCursor[TupleRow] = await connection.execute( - self.select_query.make_query([Column("1", "")], [self.primary_key]), - params=self.__parser_params([self.primary_key], [id]), - ) - - if cursor.rownumber is not None: - value = await cursor.fetchone() - if value is not None: - return bool(next(iter(value))) - return False - - async def delete_by_date( - self, - from_date: Union[datetime, date], - to_date: Optional[Union[datetime, date]] = None, - ) -> str: - """Delete a row from a table by date.""" - async with self, self.connection() as connection: - cursor: AsyncCursor[TupleRow] = await connection.execute( - self.delete_by_date_query.make_query(self.created_at), - params=self.__parser_params( - [self.created_at, self.created_at], - [from_date, to_date], - ), - ) - - if cursor.rownumber is not None: - value = await cursor.fetchone() - if value is not None: - return next(iter(value)) - return None - - async def on_startup(self) -> None: - """On startup.""" - if self.run_migrations: - async with self, self.connection() as connection: # noqa: SIM117 - async with connection.transaction(): - await self.create_table() - await self.create_index() - - async def on_shutdown(self) -> None: - """On shutdown.""" - await self.pool.close() - self.pool = None - - async def execute(self, query: str, *values: Any) -> list[Any]: - """Execute a query.""" - async with self, self.connection() as connection: - cursor: AsyncRawCursor[TupleRow] = await connection.execute( - query, - params=values, - ) - - if cursor.rownumber is not None: - return await cursor.fetchall() - return [] - - -class PsycopgListenDriver(ListenDriver): - """Asyncpg listen driver.""" - - def __init__( - self, - connection_string: str, - channel_name: str, - **connection_kwargs: Any, - ) -> None: - """Initialize the listen driver.""" - super().__init__(connection_string, channel_name, **connection_kwargs) - self._queue: AsyncQueue[int] = AsyncQueue() - - async def on_startup(self) -> None: - """On startup.""" - self.connection: AsyncConnection[DictRow] = await AsyncConnection.connect( - self.connection_string, - **self.connection_kwargs, - autocommit=True, - cursor_factory=AsyncRawCursor, - ) - async with self.connection.cursor() as cursor: - await cursor.execute(f"LISTEN {self.channel_name}") - - self.connection.add_notify_handler(self._notification_handler) - - async def on_shutdown(self) -> None: - """On shutdown.""" - await self.connection.close() - - def _notification_handler( - self, - notify: Notify, - ) -> None: - """ - Handle NOTIFY messages. - - From asyncpg.connection.add_listener docstring: - A callable or a coroutine function receiving the following arguments: - **con_ref**: a Connection the callback is registered with; - **pid**: PID of the Postgres server that sent the notification; - **channel**: name of the channel the notification was sent to; - **payload**: the payload. - """ - if self._queue is not None: - self._queue.put_nowait(int(notify.payload)) - - async def __aiter__(self) -> AsyncIterator[Any]: - """Iterate over the queue.""" - async for notify in self.connection.notifies(): - yield notify.payload - - while not self.connection.closed: - message_id = await self._queue.get() - yield message_id diff --git a/taskiq_postgresql/exceptions.py b/taskiq_postgresql/exceptions.py deleted file mode 100644 index 43b7ab8..0000000 --- a/taskiq_postgresql/exceptions.py +++ /dev/null @@ -1,10 +0,0 @@ -class BaseTaskiqAsyncpgError(Exception): - """Base error for all possible exception in the lib.""" - - -class DatabaseConnectionError(BaseTaskiqAsyncpgError): - """Error if cannot connect to PostgreSQL.""" - - -class ResultIsMissingError(BaseTaskiqAsyncpgError): - """Error if cannot retrieve result from PostgreSQL.""" diff --git a/taskiq_postgresql/py.typed b/taskiq_postgresql/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/taskiq_postgresql/result_backend.py b/taskiq_postgresql/result_backend.py deleted file mode 100644 index aaa5e27..0000000 --- a/taskiq_postgresql/result_backend.py +++ /dev/null @@ -1,206 +0,0 @@ -from dataclasses import dataclass -from datetime import date, datetime -from typing import Any, Callable, Final, Literal, Optional, TypeVar, Union - -from taskiq import AsyncResultBackend, TaskiqResult -from taskiq.abc.serializer import TaskiqSerializer -from taskiq.serializers.pickle import PickleSerializer - -from taskiq_postgresql.abc.driver import QueryDriver -from taskiq_postgresql.abc.query import Column, PrimaryKeyColumn -from taskiq_postgresql.exceptions import ResultIsMissingError -from taskiq_postgresql.utils import get_db_driver - -_ReturnType = TypeVar("_ReturnType") - - -@dataclass -class Table: - """Columns for the result backend.""" - - primary_key: PrimaryKeyColumn - result: Column = Column( # noqa: RUF009 - name="result", - type_="BYTEA", - ) - - -class PostgresqlResultBackend(AsyncResultBackend[_ReturnType]): - """Result backend for TaskIQ based on Asyncpg.""" - - pg_backend: QueryDriver - - def __init__( - self, - dsn: Union[ - str, - Callable[ - [], - str, - ], - ] = "postgresql://postgres:postgres@localhost:5432/postgres", - keep_results: bool = True, - table_name: str = "taskiq_results", - field_for_task_id: Literal["VarChar", "Text", "Uuid"] = "Uuid", - serializer: Optional[TaskiqSerializer] = None, - driver: Literal["asyncpg", "psqlpy", "psycopg", "pg8000"] = "asyncpg", - run_migrations: bool = False, - **connect_kwargs: Any, - ) -> None: - """ - Construct new result backend. - - :param dsn: connection string to PostgreSQL. - :param keep_results: flag to not remove results from Redis after reading. - :param connect_kwargs: additional arguments for nats `ConnectionPool` class. - """ - self._dsn: Final = dsn - self.keep_results: Final = keep_results - self.table_name: Final = table_name - self.field_for_task_id: Final = field_for_task_id - self.serializer: Final = serializer or PickleSerializer() - self.connect_kwargs: Final = connect_kwargs - - self.columns = Table( - primary_key=PrimaryKeyColumn( - name="task_id", - type_=field_for_task_id, - ), - ) - - self.driver = get_db_driver(driver)( - dsn, - table_name, - columns=[ - self.columns.result, - ], - primary_key=self.columns.primary_key, - index_columns=[self.columns.primary_key], - run_migrations=run_migrations, - **connect_kwargs, - ) - - @property - def dsn(self) -> str: - """ - Get the DSN string. - - Returns the DSN string or None if not set. - """ - if callable(self._dsn): - return self._dsn() - return self._dsn - - async def startup(self) -> None: - """ - Initialize the result backend. - - Construct new connection pool - and create new table for results if not exists. - """ - await self.driver.on_startup() - - async def shutdown(self) -> None: - """Close the connection pool.""" - - async def set_result( - self, - task_id: Any, - result: TaskiqResult[_ReturnType], - ) -> None: - """ - Set result to the PostgreSQL table. - - Args: - task_id (Any): ID of the task. - result (TaskiqResult[_ReturnType]): result of the task. - - """ - await self.driver.insert_or_update( - [ - self.columns.primary_key, - self.columns.result, - ], - [ - task_id, - self.serializer.dumpb(result), - ], - [ - self.columns.primary_key, - ], - [ - self.columns.result, - ], - ) - - async def is_result_ready(self, task_id: Any) -> bool: - """ - Returns whether the result is ready. - - Args: - task_id (Any): ID of the task. - - Returns: - bool: True if the result is ready else False. - - """ - return await self.driver.exists(task_id) - - async def get_result( - self, - task_id: Any, - with_logs: bool = False, - ) -> TaskiqResult[_ReturnType]: - """ - Retrieve result from the task. - - :param task_id: task's id. - :param with_logs: if True it will download task's logs. - :raises ResultIsMissingError: if there is no result when trying to get it. - :return: TaskiqResult. - """ - data = await self.driver.select( - [ - self.columns.result, - ], - [ - self.columns.primary_key, - ], - [task_id], - ) - - if len(data) == 0: - raise ResultIsMissingError( - f"Cannot find record with task_id = {task_id} in PostgreSQL", - ) - - result_in_bytes = next(iter(data))["result"] - - if not self.keep_results: - await self.driver.delete( - self.columns.primary_key, - task_id, - ) - - taskiq_result: TaskiqResult[_ReturnType] = self.serializer.loadb( - result_in_bytes, - ) - - if not with_logs: - taskiq_result.log = None - - return taskiq_result - - async def delete_by_date( - self, - from_date: Union[datetime, date], - to_date: Optional[Union[datetime, date]] = None, - ) -> None: - """ - Delete results by date. - - Args: - from_date (datetime | date): Date from which to delete results. - to_date (datetime | date | None): Date to which to delete results. - """ - await self.driver.delete_by_date(from_date, to_date) diff --git a/taskiq_postgresql/scheduler_source.py b/taskiq_postgresql/scheduler_source.py deleted file mode 100644 index c73f779..0000000 --- a/taskiq_postgresql/scheduler_source.py +++ /dev/null @@ -1,213 +0,0 @@ -from dataclasses import dataclass -from typing import Any, Callable, Final, Literal, Optional, Union -from uuid import uuid4 - -from taskiq import ScheduledTask, ScheduleSource -from taskiq.abc.broker import AsyncBroker -from taskiq.compat import model_dump_json, model_validate_json - -from taskiq_postgresql.abc.query import ( - Column, - CreatedAtColumn, - PrimaryKeyColumn, - UpdatedAtColumn, -) -from taskiq_postgresql.utils import get_db_driver - -__all__ = ["PostgresqlSchedulerSource"] - - -@dataclass -class Table: - """Columns for the result backend.""" - - primary_key = PrimaryKeyColumn( - name="id", - type_="UUID", - ) - task_name = Column( - name="task_name", - type_="VARCHAR(100)", - ) - schedule = Column( - name="schedule", - type_="JSONB", - ) - created_at = CreatedAtColumn() - updated_at = UpdatedAtColumn() - - -class PostgresqlSchedulerSource(ScheduleSource): - """Schedule source for PostgreSQL.""" - - def __init__( - self, - dsn: Union[ - str, - Callable[ - [], - str, - ], - ] = "postgresql://postgres:postgres@localhost:5432/postgres", - table_name: str = "taskiq_schedulers", - driver: Literal["asyncpg", "psqlpy", "psycopg", "pg8000"] = "asyncpg", - startup_schedule: Optional[dict[str, list[dict[str, Any]]]] = None, - broker: Optional[AsyncBroker] = None, - run_migrations: bool = False, - **connect_kwargs: Any, - ) -> None: - """Initialize the PostgreSQL scheduler source. - - Sets up a scheduler source that stores scheduled tasks in a PostgreSQL database. - This scheduler source manages task schedules, allowing for persistent storage \ - and - retrieval of scheduled tasks across application restarts. - - Args: - dsn: PostgreSQL connection string. Defaults to a local PostgreSQL instance. - Format: "postgres://user:password@host:port/database" - table_name: Name of the table to store scheduled tasks. Will be created - automatically if it doesn't exist. Defaults to "taskiq_schedulers". - driver: Database driver to use for connections. Currently only "asyncpg" - is supported. - startup_schedule: Dictionary of task schedules to automatically add when - the scheduler starts up. Format: {task_name: [schedule_configs]}. - Each schedule_config should contain 'cron' or 'time' keys along with - optional 'args', 'kwargs', and 'cron_offset' keys. - broker: The TaskIQ broker instance to use for finding and managing tasks. - Required if startup_schedule is provided. - **connect_kwargs: Additional keyword arguments passed to the database - connection pool. These are driver-specific connection parameters. - - Example: - ```python - scheduler = AsyncPgSchedulerSource( - dsn="postgres://user:pass@localhost:5432/mydb", - table_name="my_schedules", - startup_schedule={ - "my_task": [ - {"cron": "0 */6 * * *", "cron_offset": "UTC",}} - ] - }, - broker=my_broker, - max_connections=20, # connect_kwargs example - ) - ``` - - Note: - The database table and required indexes will be created automatically - during the startup() method call. - """ - self._dsn: Final = dsn - self.table_name: Final = table_name - self.connect_kwargs: Final = connect_kwargs - self.columns = Table() - self.startup_schedule = startup_schedule - self.broker = broker - self.driver = get_db_driver(driver)( - dsn, - table_name, - columns=[ - self.columns.task_name, - self.columns.schedule, - self.columns.updated_at, - ], - primary_key=self.columns.primary_key, - created_at=self.columns.created_at, - index_columns=[self.columns.primary_key, self.columns.task_name], - run_migrations=run_migrations, - **connect_kwargs, - ) - - @property - def dsn(self) -> str: - """ - Get the DSN string. - - Returns the DSN string or None if not set. - """ - if callable(self._dsn): - return self._dsn() - return self._dsn - - async def startup(self) -> None: - """ - Create the table and index for the schedules. - - Raises: - DatabaseConnectionError: if the connection to the database fails. - """ - await self.driver.on_startup() - - if self.startup_schedule is not None and self.broker is not None: - existing_schedules = { - schedule.task_name for schedule in await self.get_schedules() - } - - for task_name, schedules in self.startup_schedule.items(): - if task_name in existing_schedules: - continue - - task = self.broker.find_task(task_name) - - if task is None: - continue - - for schedule in schedules: - if "cron" not in schedule and "time" not in schedule: - continue - - await self.add_schedule( - ScheduledTask( - task_name=task_name, - labels=task.labels, - schedule_id=uuid4().hex, - args=schedule.get("args", []), - kwargs=schedule.get("kwargs", {}), - cron=schedule.get("cron"), - time=schedule.get("time"), - cron_offset=schedule.get("cron_offset"), - ), - ) - - async def shutdown(self) -> None: - """Close the connection pool.""" - await self.driver.on_shutdown() - - async def get_schedules(self) -> list[ScheduledTask]: - """Get the schedules from the database.""" - schedule_rows = await self.driver.select( - [self.columns.schedule], - ) - - return [ - model_validate_json(ScheduledTask, schedule_row["schedule"]) - if isinstance(schedule_row["schedule"], str) - else ScheduledTask(**schedule_row["schedule"]) - for schedule_row in schedule_rows - ] - - async def add_schedule(self, schedule: ScheduledTask) -> None: - """Add a schedule to the database.""" - await self.driver.insert( - [self.columns.primary_key, self.columns.task_name, self.columns.schedule], - [ - schedule.schedule_id, - schedule.task_name, - model_dump_json( - schedule, - ), - ], - ) - - async def delete_schedule(self, schedule_id: str) -> None: - """Delete a schedule from the database.""" - await self.driver.delete( - self.columns.primary_key, - schedule_id, - ) - - async def post_send(self, task: ScheduledTask) -> None: - """Delete a task after it's completed.""" - if task.time is not None: - await self.delete_schedule(task.schedule_id) diff --git a/taskiq_postgresql/utils/__init__.py b/taskiq_postgresql/utils/__init__.py deleted file mode 100644 index df03cc5..0000000 --- a/taskiq_postgresql/utils/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from taskiq_postgresql.utils.get_db_driver import get_db_driver -from taskiq_postgresql.utils.get_db_listen_driver import get_db_listen_driver -from taskiq_postgresql.utils.libs_available import is_asyncpg_available - -__all__ = ["get_db_driver", "get_db_listen_driver", "is_asyncpg_available"] diff --git a/taskiq_postgresql/utils/get_db_driver.py b/taskiq_postgresql/utils/get_db_driver.py deleted file mode 100644 index 0930517..0000000 --- a/taskiq_postgresql/utils/get_db_driver.py +++ /dev/null @@ -1,35 +0,0 @@ -from typing import TYPE_CHECKING, Literal, Type, Union - -if TYPE_CHECKING: - from taskiq_postgresql.drivers import ( - AsyncpgDriver, - PsqlpyDriver, - PsycopgDriver, - ) - - -def get_db_driver( - driver: Literal["asyncpg", "psqlpy", "psycopg"], -) -> Type[Union["AsyncpgDriver", "PsqlpyDriver", "PsycopgDriver"]]: # type: ignore - """Get the database driver.""" - try: - if driver == "asyncpg": - from taskiq_postgresql.drivers import AsyncpgDriver # noqa: PLC0415 - - return AsyncpgDriver - if driver == "psqlpy": - from taskiq_postgresql.drivers import PsqlpyDriver # noqa: PLC0415 - - return PsqlpyDriver - if driver == "psycopg": - from taskiq_postgresql.drivers import PsycopgDriver # noqa: PLC0415 - - return PsycopgDriver - - except ImportError: - raise ImportError( - f"{driver} is not installed. \ - Please install it with `pip install taskiq-postgresql[{driver}]`.", - ) from None - - raise ValueError(f"Driver {driver} is not supported.") diff --git a/taskiq_postgresql/utils/get_db_listen_driver.py b/taskiq_postgresql/utils/get_db_listen_driver.py deleted file mode 100644 index f04f4a0..0000000 --- a/taskiq_postgresql/utils/get_db_listen_driver.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import TYPE_CHECKING, Literal, Type, Union - -if TYPE_CHECKING: - from taskiq_postgresql.drivers import ( - AsyncpgListenDriver, - PsqlpyListenDriver, - PsycopgListenDriver, - ) - - -def get_db_listen_driver( - driver: Literal["asyncpg", "psqlpy", "psycopg"], -) -> Type[ - Union[ - "AsyncpgListenDriver", - "PsqlpyListenDriver", - "PsycopgListenDriver", - ] -]: # type: ignore - """Get the database driver.""" - try: - if driver == "asyncpg": - from taskiq_postgresql.drivers import AsyncpgListenDriver # noqa: PLC0415 - - return AsyncpgListenDriver - if driver == "psqlpy": - from taskiq_postgresql.drivers import PsqlpyListenDriver # noqa: PLC0415 - - return PsqlpyListenDriver - if driver == "psycopg": - from taskiq_postgresql.drivers import PsycopgListenDriver # noqa: PLC0415 - - return PsycopgListenDriver - except ImportError: - raise ImportError( - f"{driver} is not installed. \ - Please install it with `pip install taskiq-postgresql[{driver}]`.", - ) from None - - raise ValueError(f"Driver {driver} is not supported.") diff --git a/taskiq_postgresql/utils/libs_available.py b/taskiq_postgresql/utils/libs_available.py deleted file mode 100644 index cda2693..0000000 --- a/taskiq_postgresql/utils/libs_available.py +++ /dev/null @@ -1,16 +0,0 @@ -from importlib.util import find_spec - - -def is_asyncpg_available() -> bool: - """Check if asyncpg is available.""" - return find_spec("asyncpg") is not None - - -def is_psqlpy_available() -> bool: - """Check if psqlpy is available.""" - return find_spec("psqlpy") is not None - - -def is_psycopg_available() -> bool: - """Check if psycopg is available.""" - return find_spec("psycopg") is not None diff --git a/taskiq_sqlalchemy/__init__.py b/taskiq_sqlalchemy/__init__.py index e69de29..a4be39a 100644 --- a/taskiq_sqlalchemy/__init__.py +++ b/taskiq_sqlalchemy/__init__.py @@ -0,0 +1,8 @@ +""" +taskiq-sqlalchemy - SQLAlchemy-backed broker, result backend, and scheduler source for + TaskIQ. +""" + +from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + +__all__ = ["SQLAlchemyResultBackend"] diff --git a/taskiq_sqlalchemy/manager.py b/taskiq_sqlalchemy/manager.py new file mode 100644 index 0000000..7304ef7 --- /dev/null +++ b/taskiq_sqlalchemy/manager.py @@ -0,0 +1,55 @@ +import typing as t + +from sqlalchemy.ext.asyncio import AsyncEngine + +from taskiq_sqlalchemy.models import ( + TaskiqQueueMixin, + TaskiqResultMixin, + TaskiqScheduleMixin, +) + + +class SQLAlchemyManager: + engine: t.Optional[AsyncEngine] + + queue_cls: t.Optional[type[TaskiqQueueMixin]] + result_cls: t.Optional[type[TaskiqResultMixin]] + schedule_cls: t.Optional[type[TaskiqScheduleMixin]] + + def __init__( + self, + queue_cls: t.Optional[type[TaskiqQueueMixin]] = None, + result_cls: t.Optional[type[TaskiqResultMixin]] = None, + schedule_cls: t.Optional[type[TaskiqScheduleMixin]] = None, + ) -> None: + self.engine = None + + self.queue_cls = queue_cls + self.result_cls = result_cls + self.schedule_cls = schedule_cls + + def register_tables(self, base_classes: t.Sequence[t.Any]) -> None: + + if self.queue_cls is None: + + class TaskiqQueue(*base_classes, TaskiqQueueMixin): + pass + + self.queue_cls = TaskiqQueue + + if self.result_cls is None: + + class TaskiqResult(*base_classes, TaskiqResultMixin): + pass + + self.result_cls = TaskiqResult + + if self.schedule_cls is None: + + class TaskiqSchedule(*base_classes, TaskiqScheduleMixin): + pass + + self.schedule_cls = TaskiqSchedule + + def configure(self, engine: AsyncEngine) -> None: + self.engine = engine diff --git a/taskiq_sqlalchemy/models.py b/taskiq_sqlalchemy/models.py new file mode 100644 index 0000000..9a1901c --- /dev/null +++ b/taskiq_sqlalchemy/models.py @@ -0,0 +1,54 @@ +import datetime +import typing as t + +import sqlalchemy as sa +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.sql import expression + + +class BaseMixin: + # Sqlite doesn't allow BIGINT to be used as a primary key with autoincrement. + # See: https://stackoverflow.com/questions/18835740 + id: Mapped[int] = mapped_column( + (sa.BigInteger().with_variant(sa.Integer, "sqlite")), + primary_key=True, + ) + + created_at: Mapped[datetime.datetime] = mapped_column( + sa.DateTime, server_default=sa.func.now() + ) + + +class TaskiqQueueMixin(BaseMixin): + __tablename__ = "taskiq_queue" + + task_id: Mapped[str] = mapped_column(sa.String(255), index=True) + task_name: Mapped[str] = mapped_column(sa.String(255)) + + channel: Mapped[str] = mapped_column(sa.String(255), index=True) + message: Mapped[bytes] = mapped_column(sa.LargeBinary) + + +class TaskiqResultMixin(BaseMixin): + __tablename__ = "taskiq_result" + + task_id: Mapped[str] = mapped_column(sa.String(255), unique=True) + + result: Mapped[t.Optional[bytes]] = mapped_column(sa.LargeBinary) + is_err: Mapped[bool] = mapped_column( + sa.Boolean(name="bool_is_err"), server_default=expression.false() + ) + + +class TaskiqScheduleMixin(BaseMixin): + __tablename__ = "taskiq_schedule" + + task_name: Mapped[str] = mapped_column(sa.String(255)) + + schedule: Mapped[t.Any] = mapped_column(sa.JSON) + + updated_at: Mapped[datetime.datetime] = mapped_column( + sa.DateTime, + server_default=sa.func.now(), + onupdate=sa.func.now(), + ) diff --git a/taskiq_sqlalchemy/result_backend.py b/taskiq_sqlalchemy/result_backend.py new file mode 100644 index 0000000..47c6d7f --- /dev/null +++ b/taskiq_sqlalchemy/result_backend.py @@ -0,0 +1,119 @@ +"""taskiq_sqlalchemy.result_backend + +Pure SQLAlchemy result backend +Works on any SQLAlchemy async engine (Postgres, Oracle, MSSQL, SQLite, ...). +""" + +import logging +import typing as t +from typing import TypeVar + +import sqlalchemy as sa +from taskiq import AsyncResultBackend +from taskiq.abc.serializer import TaskiqSerializer +from taskiq.result import TaskiqResult +from taskiq.serializers.pickle import PickleSerializer + +from taskiq_sqlalchemy.manager import SQLAlchemyManager + +_ReturnType = TypeVar("_ReturnType") +logger = logging.getLogger(__name__) + + +class SQLAlchemyResultBackend(AsyncResultBackend[_ReturnType]): + """ + Stores and retrieves task results in a SQL table. + """ + + def __init__( + self, + manager: SQLAlchemyManager, + *, + keep_results: bool = True, + serializer: t.Optional[TaskiqSerializer] = None, + ) -> None: + self.manager = manager + self.keep_results = keep_results + self.serializer = serializer or PickleSerializer() + + # ------------------------------------------------------------------ + # AsyncResultBackend protocol + # ------------------------------------------------------------------ + + async def set_result(self, task_id: str, result: TaskiqResult[_ReturnType]) -> None: + async_engine = self.manager.engine + serialised = self.serializer.dumpb(result) + async with async_engine.begin() as conn: + # Upsert pattern: try insert, update on conflict. + # SQLAlchemy Core provides dialect-agnostic on_conflict helpers + # only for Postgres. For other dialects we fall back + # to a delete-then-insert, which is safe because set_result is + # called at most once per task_id. + dialect = async_engine.dialect.name + if dialect in ("postgresql",): + from sqlalchemy.dialects.postgresql import insert as pg_insert + + stmt = ( + pg_insert(self.manager.result_cls) + .values( + task_id=task_id, + result=serialised, + is_err=result.is_err, + ) + .on_conflict_do_update( + index_elements=["task_id"], + set_={ + "result": serialised, + "is_err": result.is_err, + }, + ) + ) + else: + # Generic fallback: delete then insert (safe, task results are + # written exactly once per task_id in normal operation) + await conn.execute( + sa.delete(self.manager.result_cls).where( + self.manager.result_cls.task_id == task_id + ) + ) + stmt = sa.insert(self.manager.result_cls).values( + task_id=task_id, + result=serialised, + is_err=result.is_err, + ) + await conn.execute(stmt) + + async def get_result( + self, task_id: str, with_logs: bool = False + ) -> TaskiqResult[_ReturnType]: + async with self.manager.engine.begin() as conn: + row = ( + await conn.execute( + sa.select(self.manager.result_cls).where( + self.manager.result_cls.task_id == task_id + ) + ) + ).fetchone() + + if row is None: + raise ValueError(f"No result found for task_id={task_id!r}") + + if not self.keep_results: + await conn.execute( + sa.delete(self.manager.result_cls).where( + self.manager.result_cls.task_id == task_id + ) + ) + + result: TaskiqResult[_ReturnType] = self.serializer.loadb(row.result) + if not with_logs: + result.log = None + return result + + async def is_result_ready(self, task_id: str) -> bool: + async with self.manager.engine.connect() as conn: + exists_stmt = sa.select( + sa.exists().where(self.manager.result_cls.task_id == task_id) + ) + + return (await conn.execute(exists_stmt)).scalar_one() diff --git a/tests/conftest.py b/tests/conftest.py index 1ee269a..b3ef23b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,116 +1,13 @@ -import os -import random -import string -from typing import AsyncGenerator, TypeVar +"""tests/conftest.py -import pytest - -from taskiq_postgresql.broker import PostgresqlBroker -from taskiq_postgresql.result_backend import PostgresqlResultBackend -from taskiq_postgresql.scheduler_source import PostgresqlSchedulerSource +Top-level conftest — kept intentionally minimal. +Per-component fixtures live in their own sub-package conftest files +(e.g. tests/result_backend/conftest.py). +""" -_ReturnType = TypeVar("_ReturnType") +import pytest @pytest.fixture(scope="session") def anyio_backend() -> str: - """ - Anyio backend. - - Backend for anyio pytest plugin. - :return: backend name. - """ return "asyncio" - - -@pytest.fixture -def postgres_table() -> str: - """ - Name of a postgresql table for current test. - - :return: random string. - """ - return "".join( - random.choice( - string.ascii_lowercase, - ) - for _ in range(10) - ) - - -@pytest.fixture -def postgresql_dsn() -> str: - """ - DSN to PostgreSQL. - - :return: dsn to PostgreSQL. - """ - return os.environ.get("POSTGRESQL_URL") or "postgresql://root:secret@localhost:5432" - - -@pytest.fixture(params=["asyncpg", "psqlpy", "psycopg"]) -async def result_backend( - postgresql_dsn: str, - postgres_table: str, - request: pytest.FixtureRequest, -) -> AsyncGenerator[PostgresqlResultBackend[_ReturnType], None]: - backend: PostgresqlResultBackend[_ReturnType] = PostgresqlResultBackend( - dsn=postgresql_dsn, - table_name=postgres_table, - driver=request.param, - ) - await backend.startup() - yield backend - - async with backend.driver, backend.driver.connection() as connection: - await connection.execute(f"DROP TABLE {postgres_table}") - - await backend.shutdown() - - -@pytest.fixture(params=["asyncpg", "psqlpy", "psycopg"]) -async def broker( - postgresql_dsn: str, - postgres_table: str, - request: pytest.FixtureRequest, -) -> AsyncGenerator[PostgresqlBroker, None]: - """ - Fixture to set up and tear down the broker. - - Initializes the broker with test parameters. - """ - broker = PostgresqlBroker( - dsn=postgresql_dsn, - channel_name=f"{postgres_table}_channel", - table_name=postgres_table, - driver=request.param, - ) - await broker.startup() - yield broker - - async with broker.driver, broker.driver.connection() as connection: - await connection.execute(f"DROP TABLE {postgres_table}") - - await broker.shutdown() - - -@pytest.fixture(params=["asyncpg", "psqlpy", "psycopg"]) -async def scheduler_source( - postgresql_dsn: str, - postgres_table: str, - request: pytest.FixtureRequest, -) -> AsyncGenerator[PostgresqlSchedulerSource, None]: - """Fixture to set up and tear down the scheduler source.""" - scheduler = PostgresqlSchedulerSource( - dsn=postgresql_dsn, - table_name=postgres_table, - driver=request.param, - ) - await scheduler.startup() - yield scheduler - - # Clean up: drop the table - async with scheduler.driver, scheduler.driver.connection() as connection: - await connection.execute(f"DROP TABLE {postgres_table}") - - await scheduler.shutdown() diff --git a/tests/result_backend/__init__.py b/tests/result_backend/__init__.py new file mode 100644 index 0000000..5bb28a1 --- /dev/null +++ b/tests/result_backend/__init__.py @@ -0,0 +1 @@ +# tests/result_backend diff --git a/tests/result_backend/conftest.py b/tests/result_backend/conftest.py new file mode 100644 index 0000000..2034521 --- /dev/null +++ b/tests/result_backend/conftest.py @@ -0,0 +1,180 @@ +import typing as t +import uuid +from contextlib import asynccontextmanager + +import pytest +import sqlalchemy as sa +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine +from sqlalchemy.orm import DeclarativeBase +from taskiq import TaskiqResult + +from taskiq_sqlalchemy.manager import SQLAlchemyManager +from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + +# Map of (pytest param id) → (SQLAlchemy async URL) +_ENGINE_PARAMS: list = [ + pytest.param( + "sqlite+aiosqlite:///:memory:", + id="sqlite+aiosqlite", + ), + pytest.param( + "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres", + id="postgresql+asyncpg", + marks=pytest.mark.postgresql, + ), + pytest.param( + "postgresql+psycopg://postgres:postgres@localhost:5432/postgres", + id="postgresql+psycopg", + marks=pytest.mark.postgresql, + ), +] + + +@asynccontextmanager +async def _try_connect(engine: AsyncEngine) -> t.AsyncGenerator[AsyncEngine, None]: + """Yield the engine; skip the test if the DB is unreachable.""" + try: + async with engine.connect() as conn: + await conn.execute(sa.text("SELECT 1")) + yield engine + except Exception as exc: + await engine.dispose() + pytest.skip(f"Database not reachable ({engine.url.drivername}): {exc}") + + +@pytest.fixture(params=_ENGINE_PARAMS) +async def async_engine( + request: pytest.FixtureRequest, +) -> t.AsyncGenerator[AsyncEngine, None]: + """ + Yields one ``AsyncEngine`` per dialect/driver combination. + + Tests are **skipped** (not failed) when the backing service is unreachable + or the required driver package is not installed. This keeps the local dev + loop fast (SQLite always works) while giving full coverage in CI. + """ + url: str = request.param + engine = create_async_engine(url) + try: + async with _try_connect(engine): + yield engine + finally: + await engine.dispose() + + +@pytest.fixture +async def manager_and_backend( + async_engine: AsyncEngine, +) -> t.AsyncGenerator[tuple[SQLAlchemyManager, SQLAlchemyResultBackend[t.Any]], None]: + """ + Creates the taskiq_result table, yields a ready (manager, backend) pair, + then drops the table — giving every test a clean slate. + + The ``DeclarativeBase`` is created fresh per test so that the ORM metadata + is not shared between parametrized runs (avoids SQLAlchemy mapper conflicts + when the same engine is reused across tests with different table names). + """ + + class _Base(DeclarativeBase): + pass + + manager = SQLAlchemyManager() + manager.register_tables(base_classes=(_Base,)) + manager.configure(engine=async_engine) + + # Create schema + async with async_engine.begin() as conn: + await conn.run_sync(_Base.metadata.create_all) + + backend: SQLAlchemyResultBackend[t.Any] = SQLAlchemyResultBackend(manager) + + try: + yield manager, backend + finally: + # Drop schema — even if the test raised + async with async_engine.begin() as conn: + await conn.run_sync(_Base.metadata.drop_all) + + +@pytest.fixture +async def result_backend( + manager_and_backend: tuple[SQLAlchemyManager, SQLAlchemyResultBackend[t.Any]], +) -> SQLAlchemyResultBackend[t.Any]: + """Convenience fixture: just the backend (most tests don't need the manager).""" + _, backend = manager_and_backend + return backend + + +@pytest.fixture +async def keep_results_false_backend( + async_engine: AsyncEngine, +) -> t.AsyncGenerator[SQLAlchemyResultBackend[t.Any], None]: + """ + A backend with ``keep_results=False``, created against the same engine + as the parametrized ``async_engine`` fixture. + """ + + class _Base(DeclarativeBase): + pass + + manager = SQLAlchemyManager() + manager.register_tables(base_classes=(_Base,)) + manager.configure(engine=async_engine) + + async with async_engine.begin() as conn: + await conn.run_sync(_Base.metadata.create_all) + + backend: SQLAlchemyResultBackend[t.Any] = SQLAlchemyResultBackend( + manager, keep_results=False + ) + + try: + yield backend + finally: + async with async_engine.begin() as conn: + await conn.run_sync(_Base.metadata.drop_all) + + +@pytest.fixture +def task_id() -> str: + """A fresh UUID string for each test.""" + return str(uuid.uuid4()) + + +@pytest.fixture +def another_task_id() -> str: + """A second, distinct UUID string.""" + return str(uuid.uuid4()) + + +@pytest.fixture +def simple_result() -> TaskiqResult[str]: + """Successful result with a plain string return value.""" + return TaskiqResult( + is_err=False, + log=None, + return_value="hello world", + execution_time=0.42, + ) + + +@pytest.fixture +def result_with_logs() -> TaskiqResult[str]: + """Successful result that carries log output.""" + return TaskiqResult( + is_err=False, + log="some log line", + return_value="with logs", + execution_time=0.1, + ) + + +@pytest.fixture +def error_result() -> TaskiqResult[str]: + """A result representing a failed task (``is_err=True``).""" + return TaskiqResult( + is_err=True, + log="Traceback (most recent call last): ...", + return_value="", + execution_time=0.05, + ) diff --git a/tests/result_backend/test_get_result.py b/tests/result_backend/test_get_result.py new file mode 100644 index 0000000..96417c5 --- /dev/null +++ b/tests/result_backend/test_get_result.py @@ -0,0 +1,125 @@ +import types +import typing as t +import uuid + +import pytest +from taskiq import TaskiqResult + +from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + +pytestmark = pytest.mark.anyio + + +async def test_get_result_returns_stored_value( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, + simple_result: TaskiqResult[str], +) -> None: + """get_result returns the same TaskiqResult that was stored.""" + await result_backend.set_result(task_id=task_id, result=simple_result) + + recovered = await result_backend.get_result(task_id=task_id) + + assert recovered.return_value == simple_result.return_value + assert recovered.is_err == simple_result.is_err + assert recovered.execution_time == simple_result.execution_time + + +async def test_get_result_missing_raises( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, +) -> None: + """ValueError is raised when no result exists for the given task_id.""" + with pytest.raises(ValueError, match="No result found"): + await result_backend.get_result(task_id=task_id) + + +async def test_get_result_logs_stripped_by_default( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, + result_with_logs: TaskiqResult[str], +) -> None: + """``result.log`` is None when with_logs=False (the default).""" + await result_backend.set_result(task_id=task_id, result=result_with_logs) + + recovered = await result_backend.get_result(task_id=task_id, with_logs=False) + + assert recovered.log is None, "Log should be stripped when with_logs=False" + assert recovered.return_value == result_with_logs.return_value + + +async def test_get_result_logs_preserved_when_requested( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, + result_with_logs: TaskiqResult[str], +) -> None: + """``result.log`` is preserved when with_logs=True.""" + await result_backend.set_result(task_id=task_id, result=result_with_logs) + + recovered = await result_backend.get_result(task_id=task_id, with_logs=True) + + assert recovered.log == result_with_logs.log, ( + "Log should be present when with_logs=True" + ) + + +async def test_get_result_keep_results_false_deletes_row( + keep_results_false_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, + simple_result: TaskiqResult[str], +) -> None: + """ + With keep_results=False the row is deleted after the first successful + get_result call; a subsequent call raises ValueError. + """ + backend = keep_results_false_backend + + await backend.set_result(task_id=task_id, result=simple_result) + + # First read — should succeed + recovered = await backend.get_result(task_id=task_id) + assert recovered.return_value == simple_result.return_value + + # Second read — row must have been deleted + with pytest.raises(ValueError, match="No result found"): + await backend.get_result(task_id=task_id) + + +async def test_get_result_keep_results_true_keeps_row( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, + simple_result: TaskiqResult[str], +) -> None: + """ + With keep_results=True (default) repeated get_result calls always succeed. + """ + await result_backend.set_result(task_id=task_id, result=simple_result) + + first = await result_backend.get_result(task_id=task_id) + second = await result_backend.get_result(task_id=task_id) + + assert first.return_value == second.return_value == simple_result.return_value + + +async def test_get_result_complex_object_roundtrip( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, +) -> None: + """ + A complex Python object (SimpleNamespace with a UUID field) survives + the pickle serialization round-trip through the database. + """ + original_value = types.SimpleNamespace(job_id=uuid.uuid4(), score=3.14) + complex_result = TaskiqResult( + is_err=False, + log=None, + return_value=original_value, + execution_time=0.2, + ) + + await result_backend.set_result(task_id=task_id, result=complex_result) + recovered = await result_backend.get_result(task_id=task_id, with_logs=False) + + assert recovered.return_value.job_id == original_value.job_id + assert recovered.return_value.score == original_value.score + assert recovered.is_err is False diff --git a/tests/result_backend/test_is_result_ready.py b/tests/result_backend/test_is_result_ready.py new file mode 100644 index 0000000..cdecbc2 --- /dev/null +++ b/tests/result_backend/test_is_result_ready.py @@ -0,0 +1,78 @@ +import typing as t + +import pytest +from taskiq import TaskiqResult + +from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + +pytestmark = pytest.mark.anyio + + +async def test_is_result_ready_false_before_set( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, +) -> None: + """is_result_ready returns False when no result has been stored yet.""" + ready = await result_backend.is_result_ready(task_id=task_id) + assert ready is False + + +async def test_is_result_ready_true_after_set( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, + simple_result: TaskiqResult[str], +) -> None: + """is_result_ready returns True immediately after set_result.""" + assert (await result_backend.is_result_ready(task_id=task_id)) is False + + await result_backend.set_result(task_id=task_id, result=simple_result) + + assert (await result_backend.is_result_ready(task_id=task_id)) is True + + +async def test_is_result_ready_false_after_consumed( + keep_results_false_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, + simple_result: TaskiqResult[str], +) -> None: + """ + After the result is consumed via get_result (keep_results=False), + is_result_ready returns False because the row has been deleted. + """ + backend = keep_results_false_backend + + await backend.set_result(task_id=task_id, result=simple_result) + assert await backend.is_result_ready(task_id=task_id) + + # Consume — deletes the row + await backend.get_result(task_id=task_id) + + assert not await backend.is_result_ready(task_id=task_id) + + +async def test_is_result_ready_independent_task_ids( + result_backend: SQLAlchemyResultBackend[t.Any], + task_id: str, + another_task_id: str, + simple_result: TaskiqResult[str], + error_result: TaskiqResult[str], +) -> None: + """ + Setting a result for task_id_a must not affect the readiness state of + task_id_b, and vice-versa. + """ + # Neither is ready yet + assert not await result_backend.is_result_ready(task_id=task_id) + assert not await result_backend.is_result_ready(task_id=another_task_id) + + # Store only the first + await result_backend.set_result(task_id=task_id, result=simple_result) + + assert await result_backend.is_result_ready(task_id=task_id) + assert not await result_backend.is_result_ready(task_id=another_task_id) + + # Store the second + await result_backend.set_result(task_id=another_task_id, result=error_result) + + assert await result_backend.is_result_ready(task_id=task_id) + assert await result_backend.is_result_ready(task_id=another_task_id) diff --git a/tests/result_backend/test_set_result.py b/tests/result_backend/test_set_result.py new file mode 100644 index 0000000..846648a --- /dev/null +++ b/tests/result_backend/test_set_result.py @@ -0,0 +1,123 @@ +import typing as t + +import pytest +import sqlalchemy as sa +from taskiq import TaskiqResult +from taskiq.serializers.pickle import PickleSerializer + +from taskiq_sqlalchemy.manager import SQLAlchemyManager +from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + +pytestmark = pytest.mark.anyio + + +async def test_set_result_stores_row( + result_backend: SQLAlchemyResultBackend[t.Any], + manager_and_backend: tuple[SQLAlchemyManager, SQLAlchemyResultBackend[t.Any]], + task_id: str, + simple_result: TaskiqResult[str], +) -> None: + """A row with the correct task_id is written to the database.""" + manager, _ = manager_and_backend + + await result_backend.set_result(task_id=task_id, result=simple_result) + + async with manager.engine.connect() as conn: + row = ( + await conn.execute( + sa.select(manager.result_cls).where( + manager.result_cls.task_id == task_id + ) + ) + ).fetchone() + + assert row is not None, "Expected a row in the DB after set_result" + assert row.task_id == task_id + assert row.is_err is False + assert row.result is not None # bytes, non-empty + + +async def test_set_result_error_flag( + result_backend: SQLAlchemyResultBackend[t.Any], + manager_and_backend: tuple[SQLAlchemyManager, SQLAlchemyResultBackend[t.Any]], + task_id: str, + error_result: TaskiqResult[str], +) -> None: + """``is_err=True`` is correctly persisted for a failed task.""" + manager, _ = manager_and_backend + + await result_backend.set_result(task_id=task_id, result=error_result) + + async with manager.engine.connect() as conn: + row = ( + await conn.execute( + sa.select(manager.result_cls).where( + manager.result_cls.task_id == task_id + ) + ) + ).fetchone() + + assert row is not None + assert row.is_err is True + + +async def test_set_result_idempotent_overwrite( + result_backend: SQLAlchemyResultBackend[t.Any], + manager_and_backend: tuple[SQLAlchemyManager, SQLAlchemyResultBackend[t.Any]], + task_id: str, + simple_result: TaskiqResult[str], + error_result: TaskiqResult[str], +) -> None: + """ + Calling set_result twice for the same task_id must not raise. + The second write wins (upsert for PostgreSQL; delete+insert for others). + """ + manager, _ = manager_and_backend + + await result_backend.set_result(task_id=task_id, result=simple_result) + # Second call — must not raise + await result_backend.set_result(task_id=task_id, result=error_result) + + # Verify only one row exists and it reflects the second write + async with manager.engine.connect() as conn: + rows = ( + await conn.execute( + sa.select(manager.result_cls).where( + manager.result_cls.task_id == task_id + ) + ) + ).fetchall() + + assert len(rows) == 1, "Exactly one row should exist after two set_result calls" + assert rows[0].is_err is True, "Latest write (error_result) should have won" + + +async def test_set_result_serialization_roundtrip( + result_backend: SQLAlchemyResultBackend[t.Any], + manager_and_backend: tuple[SQLAlchemyManager, SQLAlchemyResultBackend[t.Any]], + task_id: str, + simple_result: TaskiqResult[str], +) -> None: + """ + Raw bytes stored in the DB can be deserialized back to the original + TaskiqResult using the default PickleSerializer. + """ + manager, _ = manager_and_backend + serializer = PickleSerializer() + + await result_backend.set_result(task_id=task_id, result=simple_result) + + async with manager.engine.connect() as conn: + row = ( + await conn.execute( + sa.select(manager.result_cls).where( + manager.result_cls.task_id == task_id + ) + ) + ).fetchone() + + assert row is not None + recovered: TaskiqResult[str] = serializer.loadb(row.result) + assert recovered.return_value == simple_result.return_value + assert recovered.is_err == simple_result.is_err + assert recovered.execution_time == simple_result.execution_time diff --git a/tests/test_broker.py b/tests/test_broker.py deleted file mode 100644 index d532bf1..0000000 --- a/tests/test_broker.py +++ /dev/null @@ -1,200 +0,0 @@ -import asyncio -import json -import uuid -from typing import Literal, Union - -import pytest -from taskiq import AckableMessage, BrokerMessage -from taskiq.utils import maybe_awaitable - -from taskiq_postgresql import PostgresqlBroker -from taskiq_postgresql.exceptions import DatabaseConnectionError - -pytestmark = pytest.mark.anyio - - -async def get_first_task( - broker: PostgresqlBroker, -) -> Union[AckableMessage, bytes]: - """ - Get the first message from the broker's listen method. - - :param broker: Instance of AsyncpgBroker. - :return: The first AckableMessage received. - """ - async for message in broker.listen(): - return message - return b"" - - -@pytest.mark.parametrize("driver", ["asyncpg", "psqlpy", "psycopg"]) -async def test_failure_connection_database( - driver: Literal["asyncpg", "psqlpy", "psycopg"], -) -> None: - """Test exception raising in connection database.""" - with pytest.raises(expected_exception=DatabaseConnectionError): - await PostgresqlBroker( - dsn="postgresql://postgres:postgres@localhost:5432/aaaaaaaaa", - table_name="postgres_table", - driver=driver, - ).startup() - - -async def test_when_broker_deliver_message__then_worker_receive_message( - broker: PostgresqlBroker, -) -> None: - """ - Test that messages are published and read correctly. - - We kick the message, listen to the queue, and check that - the received message matches what was sent. - """ - valid_broker_message = BrokerMessage( - task_id=uuid.uuid4().hex, - task_name=uuid.uuid4().hex, - message=b"my_msg", - labels={ - "label1": "val1", - }, - ) - - worker_task = asyncio.create_task(get_first_task(broker)) - await asyncio.sleep(0.2) - - # Send the message - await broker.kick(valid_broker_message) - await asyncio.sleep(0.2) - - message = next(iter(await asyncio.gather(worker_task))) - assert message.data == valid_broker_message.message - - -@pytest.mark.parametrize( - "table_already_exists", - [ - pytest.param(True, id="table_already_exists"), - pytest.param(False, id="table_does_not_exist"), - ], -) -async def test_when_startup__then_table_should_be_created( - broker: PostgresqlBroker, - table_already_exists: bool, -) -> None: - """ - Test the startup process of the broker. - - We drop the messages table, restart the broker, and ensure - that the table is recreated. - """ - await broker.shutdown() - - if not table_already_exists: - await broker.driver.execute( - f"DROP TABLE IF EXISTS {broker.table_name}", - ) - - await broker.startup() - - # Verify that the table exists - table_exists = await broker.driver.execute( - f"SELECT * FROM {broker.table_name}", # noqa: S608 - ) - assert table_exists == [] # Table should be empty - - -async def test_listen( - broker: PostgresqlBroker, -) -> None: - """ - Test listening to messages. - - Test that the broker can listen to messages inserted directly into the database - and notified via the channel. - """ - # Insert a message directly into the database - message_content = b"test_message" - task_id = uuid.uuid4().hex - task_name = "test_task" - labels = {"label1": "label_val"} - message_id = await broker.driver.insert( - columns=[ - broker.columns.task_id, - broker.columns.task_name, - broker.columns.message, - broker.columns.labels, - ], - values=[task_id, task_name, message_content, json.dumps(labels)], - returning=[broker.columns.primary_key], - ) - # Send a NOTIFY with the message ID - await broker.driver.execute( - f"NOTIFY {broker.channel_name}, '{message_id}'", - ) - - # Listen for the message - message = await asyncio.wait_for(get_first_task(broker), timeout=1.0) - assert message.data == message_content - - # Acknowledge the message - await maybe_awaitable(message.ack()) - - -async def test_wrong_format( - broker: PostgresqlBroker, -) -> None: - """Test that messages with incorrect formats are still received.""" - # Insert a message with missing task_id and task_name - - message_id = await broker.driver.insert( - columns=[ - broker.columns.task_id, - broker.columns.task_name, - broker.columns.message, - broker.columns.labels, - ], - values=[ - uuid.uuid4().hex, # Missing task_id - "", # Missing task_name - b"wrong", # Message content - json.dumps({}), # Empty labels - ], - returning=[broker.columns.primary_key], - ) - # Send a NOTIFY with the message ID - await broker.driver.execute( - f"NOTIFY {broker.channel_name}, '{message_id}'", - ) - - # Listen for the message - message = await asyncio.wait_for(get_first_task(broker), timeout=1.0) - assert message.data == b"wrong" - - # Acknowledge the message - await maybe_awaitable(message.ack()) - - -async def test_delayed_message(broker: PostgresqlBroker) -> None: - """Test that delayed messages are delivered correctly after the specified delay.""" - # Send a message with a delay - task_id = uuid.uuid4().hex - task_name = "test_task" - sent = BrokerMessage( - task_id=task_id, - task_name=task_name, - message=b"delayed_message", - labels={ - "delay": "1", # Delay in seconds - }, - ) - await broker.kick(sent) - - # Try to get the message immediately (should not be available yet) - with pytest.raises(asyncio.TimeoutError): - await asyncio.wait_for(get_first_task(broker), timeout=1.0) - - # Wait for the delay to pass and receive the message - message = await asyncio.wait_for(get_first_task(broker), timeout=2.0) - assert message.data == sent.message - - # Acknowledge the message - await maybe_awaitable(message.ack()) diff --git a/tests/test_result_backend.py b/tests/test_result_backend.py deleted file mode 100644 index bffb7de..0000000 --- a/tests/test_result_backend.py +++ /dev/null @@ -1,164 +0,0 @@ -import uuid -from typing import Any, TypeVar - -import pytest -from taskiq import TaskiqResult - -from taskiq_postgresql.exceptions import DatabaseConnectionError, ResultIsMissingError -from taskiq_postgresql.result_backend import PostgresqlResultBackend - -_ReturnType = TypeVar("_ReturnType") - -pytestmark = pytest.mark.anyio - - -class ResultForTest: - """Just test class for testing.""" - - def __init__(self) -> None: - """Generates test class for result testing.""" - self.test_arg = uuid.uuid4() - - -@pytest.fixture -def task_id() -> str: - """ - Generates ID for taskiq result. - - :returns: uuid as string. - """ - return str(uuid.uuid4()) - - -@pytest.fixture -def default_taskiq_result() -> TaskiqResult[Any]: - """ - Generates default TaskiqResult. - - :returns: TaskiqResult with generic result. - """ - return TaskiqResult( - is_err=False, - log=None, - return_value="Best test ever.", - execution_time=0.1, - ) - - -@pytest.fixture -def custom_taskiq_result() -> TaskiqResult[Any]: - """ - Generates custom TaskiqResult. - - :returns: TaskiqResult with custom class result. - """ - return TaskiqResult( - is_err=False, - log=None, - return_value=ResultForTest(), - execution_time=0.1, - ) - - -async def test_failure_connection_database() -> None: - """Test exception raising in connection database.""" - with pytest.raises(expected_exception=DatabaseConnectionError): - await PostgresqlResultBackend( - dsn="postgresql://postgres:postgres@localhost:5432/aaaaaaaaa", - table_name="postgres_table", - ).startup() - - -async def test_failure_backend_result( - result_backend: PostgresqlResultBackend[_ReturnType], - task_id: str, -) -> None: - """Test exception raising in `get_result` method.""" - with pytest.raises(expected_exception=ResultIsMissingError): - await result_backend.get_result(task_id=task_id) - - -async def test_success_backend_default_result_delete_res( - postgresql_dsn: str, - postgres_table: str, - default_taskiq_result: TaskiqResult[_ReturnType], - task_id: str, -) -> None: - backend: PostgresqlResultBackend[_ReturnType] = PostgresqlResultBackend( - dsn=postgresql_dsn, - table_name=postgres_table, - keep_results=False, - ) - await backend.startup() - - await backend.set_result( - task_id=task_id, - result=default_taskiq_result, - ) - await backend.get_result(task_id=task_id) - - with pytest.raises(expected_exception=ResultIsMissingError): - await backend.get_result(task_id=task_id) - - -async def test_success_backend_default_result( - result_backend: PostgresqlResultBackend[_ReturnType], - default_taskiq_result: TaskiqResult[_ReturnType], - task_id: str, -) -> None: - """ - Tests normal behavior with default result in TaskiqResult. - - :param default_taskiq_result: TaskiqResult with default result. - :param task_id: ID for task. - :param nats_urls: urls to NATS. - """ - await result_backend.set_result( - task_id=task_id, - result=default_taskiq_result, - ) - result = await result_backend.get_result(task_id=task_id) - - assert result == default_taskiq_result - - -async def test_success_backend_custom_result( - result_backend: PostgresqlResultBackend[_ReturnType], - custom_taskiq_result: TaskiqResult[_ReturnType], - task_id: str, -) -> None: - """ - Tests normal behavior with custom result in TaskiqResult. - - :param custom_taskiq_result: TaskiqResult with custom result. - :param task_id: ID for task. - :param redis_url: url to redis. - """ - await result_backend.set_result( - task_id=task_id, - result=custom_taskiq_result, - ) - result = await result_backend.get_result(task_id=task_id) - - assert ( - result.return_value.test_arg # type: ignore - == custom_taskiq_result.return_value.test_arg # type: ignore - ) - assert result.is_err == custom_taskiq_result.is_err - assert result.execution_time == custom_taskiq_result.execution_time - assert result.log == custom_taskiq_result.log - - -async def test_success_backend_is_result_ready( - result_backend: PostgresqlResultBackend[_ReturnType], - custom_taskiq_result: TaskiqResult[_ReturnType], - task_id: str, -) -> None: - """Tests `is_result_ready` method.""" - assert not await result_backend.is_result_ready(task_id=task_id) - await result_backend.set_result( - task_id=task_id, - result=custom_taskiq_result, - ) - - assert await result_backend.is_result_ready(task_id=task_id) diff --git a/tests/test_scheduler_source.py b/tests/test_scheduler_source.py deleted file mode 100644 index 7079d32..0000000 --- a/tests/test_scheduler_source.py +++ /dev/null @@ -1,403 +0,0 @@ -import uuid -from datetime import datetime, timezone -from unittest.mock import Mock - -import pytest -from taskiq import AsyncTaskiqDecoratedTask as TaskiqTask -from taskiq import ScheduledTask - -from taskiq_postgresql.exceptions import DatabaseConnectionError -from taskiq_postgresql.scheduler_source import PostgresqlSchedulerSource - -pytestmark = pytest.mark.anyio - - -@pytest.fixture -async def mock_broker() -> Mock: - """Create a mock broker for testing startup schedules.""" - broker = Mock() - mock_task = Mock(spec=TaskiqTask) - mock_task.labels = {"test": "label"} - broker.find_task.return_value = mock_task - return broker - - -@pytest.fixture -def sample_scheduled_task() -> ScheduledTask: - """Create a sample scheduled task for testing.""" - return ScheduledTask( - task_name="test_task", - labels={"env": "test"}, - schedule_id=uuid.uuid4().hex, - args=["arg1", "arg2"], - kwargs={"key": "value"}, - cron="0 */6 * * *", - cron_offset="UTC", - ) - - -@pytest.fixture -def time_based_scheduled_task() -> ScheduledTask: - """Create a time-based scheduled task for testing post_send.""" - return ScheduledTask( - task_name="time_task", - labels={"type": "time"}, - schedule_id=uuid.uuid4().hex, - args=[], - kwargs={}, - time=datetime.now(timezone.utc), - ) - - -async def test_failure_connection_database() -> None: - """Test exception raising when connecting to invalid database.""" - with pytest.raises(expected_exception=DatabaseConnectionError): - scheduler = PostgresqlSchedulerSource( - dsn="postgresql://postgres:postgres@localhost:5432/nonexistent_db", - table_name="test_table", - ) - await scheduler.startup() - - -async def test_scheduler_initialization() -> None: - """Test scheduler source initialization with various parameters.""" - # Test with string DSN - scheduler = PostgresqlSchedulerSource( - dsn="postgresql://postgres:postgres@localhost:5432/test", - table_name="custom_table", - driver="asyncpg", - ) - assert scheduler.table_name == "custom_table" - assert scheduler.dsn == "postgresql://postgres:postgres@localhost:5432/test" - - # Test with callable DSN - def get_dsn() -> str: - return "postgresql://postgres:postgres@localhost:5432/test" - - scheduler_callable = PostgresqlSchedulerSource( - dsn=get_dsn, - table_name="custom_table", - ) - assert ( - scheduler_callable.dsn == "postgresql://postgres:postgres@localhost:5432/test" - ) - - -async def test_startup_creates_table( - scheduler_source: PostgresqlSchedulerSource, -) -> None: - """Test that startup creates the scheduler table.""" - # Verify table exists and is empty - table_exists = await scheduler_source.driver.execute( - f"SELECT * FROM {scheduler_source.table_name}", # noqa: S608 - ) - assert table_exists == [] - - -async def test_add_and_get_schedules( - scheduler_source: PostgresqlSchedulerSource, - sample_scheduled_task: ScheduledTask, -) -> None: - """Test adding and retrieving schedules.""" - # Initially no schedules - schedules = await scheduler_source.get_schedules() - assert len(schedules) == 0 - - # Add a schedule - await scheduler_source.add_schedule(sample_scheduled_task) - - # Retrieve schedules - schedules = await scheduler_source.get_schedules() - assert len(schedules) == 1 - - retrieved_schedule = schedules[0] - assert retrieved_schedule.task_name == sample_scheduled_task.task_name - assert retrieved_schedule.schedule_id == sample_scheduled_task.schedule_id - assert retrieved_schedule.cron == sample_scheduled_task.cron - assert retrieved_schedule.args == sample_scheduled_task.args - assert retrieved_schedule.kwargs == sample_scheduled_task.kwargs - assert retrieved_schedule.labels == sample_scheduled_task.labels - - -async def test_add_multiple_schedules( - scheduler_source: PostgresqlSchedulerSource, -) -> None: - """Test adding multiple schedules for the same task.""" - task_name = "multi_schedule_task" - - # Create multiple schedules for the same task - schedule1 = ScheduledTask( - task_name=task_name, - labels={"env": "test"}, - schedule_id=uuid.uuid4().hex, - cron="0 */6 * * *", - args=[], - kwargs={}, - ) - - schedule2 = ScheduledTask( - task_name=task_name, - labels={"env": "test"}, - schedule_id=uuid.uuid4().hex, - cron="0 */12 * * *", - args=[], - kwargs={}, - ) - - await scheduler_source.add_schedule(schedule1) - await scheduler_source.add_schedule(schedule2) - - schedules = await scheduler_source.get_schedules() - expected_schedules = 2 - assert len(schedules) == expected_schedules - - # Verify both schedules are retrieved - schedule_ids = {schedule.schedule_id for schedule in schedules} - assert schedule1.schedule_id in schedule_ids - assert schedule2.schedule_id in schedule_ids - - -async def test_delete_schedule( - scheduler_source: PostgresqlSchedulerSource, - sample_scheduled_task: ScheduledTask, -) -> None: - """Test deleting a schedule.""" - # Add a schedule - await scheduler_source.add_schedule(sample_scheduled_task) - - # Verify it exists - schedules = await scheduler_source.get_schedules() - assert len(schedules) == 1 - - # Delete the schedule - await scheduler_source.delete_schedule(sample_scheduled_task.schedule_id) - - # Verify it's gone - schedules = await scheduler_source.get_schedules() - assert len(schedules) == 0 - - -async def test_delete_nonexistent_schedule( - scheduler_source: PostgresqlSchedulerSource, -) -> None: - """Test deleting a schedule that doesn't exist (should not raise error).""" - fake_schedule_id = uuid.uuid4().hex - # This should not raise an exception - await scheduler_source.delete_schedule(fake_schedule_id) - - -async def test_post_send_deletes_time_based_task( - scheduler_source: PostgresqlSchedulerSource, - time_based_scheduled_task: ScheduledTask, - sample_scheduled_task: ScheduledTask, -) -> None: - """Test that post_send deletes time-based tasks but keeps cron tasks.""" - # Add both time-based and cron-based tasks - await scheduler_source.add_schedule(time_based_scheduled_task) - await scheduler_source.add_schedule(sample_scheduled_task) - - # Verify both exist - schedules = await scheduler_source.get_schedules() - expected_schedules = 2 - assert len(schedules) == expected_schedules - - # Call post_send on time-based task - await scheduler_source.post_send(time_based_scheduled_task) - - # Verify only cron task remains - schedules = await scheduler_source.get_schedules() - assert len(schedules) == 1 - assert schedules[0].schedule_id == sample_scheduled_task.schedule_id - - # Call post_send on cron task (should not delete it) - await scheduler_source.post_send(sample_scheduled_task) - - # Verify cron task still exists - schedules = await scheduler_source.get_schedules() - assert len(schedules) == 1 - assert schedules[0].schedule_id == sample_scheduled_task.schedule_id - - -async def test_startup_schedule_processing( - postgresql_dsn: str, - postgres_table: str, - mock_broker: Mock, -) -> None: - """Test startup schedule processing functionality.""" - startup_schedule = { - "test_task": [ - { - "cron": "0 */6 * * *", - "cron_offset": "UTC", - "args": ["startup_arg"], - "kwargs": {"startup": True}, - }, - { - "time": datetime.now(timezone.utc).isoformat(), - "args": ["time_arg"], - }, - ], - "missing_task": [ - {"cron": "0 */12 * * *"}, - ], - "invalid_schedule": [ - {"invalid": "schedule"}, # Missing cron/time - ], - } - - # Mock broker to return task for "test_task" but None for "missing_task" - def mock_find_task(task_name: str) -> TaskiqTask | None: - if task_name == "test_task": - task = Mock(spec=TaskiqTask) - task.labels = {"from": "broker"} - return task - return None - - mock_broker.find_task.side_effect = mock_find_task - - scheduler = PostgresqlSchedulerSource( - dsn=postgresql_dsn, - table_name=postgres_table, - startup_schedule=startup_schedule, - broker=mock_broker, - ) - - await scheduler.startup() - - try: - # Verify schedules were created - schedules = await scheduler.get_schedules() - expected_schedules = 2 - # Should have 2 schedules (both valid schedules for "test_task") - # "missing_task" and "invalid_schedule" should be skipped - assert len(schedules) == expected_schedules - - # Verify all schedules are for "test_task" - for schedule in schedules: - assert schedule.task_name == "test_task" - assert schedule.labels == {"from": "broker"} - - # Verify one has cron and one has time - cron_schedules = [s for s in schedules if s.cron is not None] - time_schedules = [s for s in schedules if s.time is not None] - expected_cron_schedules = 1 - expected_time_schedules = 1 - assert len(cron_schedules) == expected_cron_schedules - assert len(time_schedules) == expected_time_schedules - - assert cron_schedules[0].cron == "0 */6 * * *" - assert cron_schedules[0].args == ["startup_arg"] - assert cron_schedules[0].kwargs == {"startup": True} - - finally: - # Cleanup - async with scheduler.driver, scheduler.driver.connection() as connection: - await connection.execute(f"DROP TABLE {postgres_table}") - - await scheduler.shutdown() - - -async def test_startup_schedule_no_duplicates( - postgresql_dsn: str, - postgres_table: str, - mock_broker: Mock, -) -> None: - """Test that startup schedules don't create duplicates on multiple startups.""" - startup_schedule = { - "test_task": [ - {"cron": "0 */6 * * *"}, - ], - } - - mock_task = Mock(spec=TaskiqTask) - mock_task.labels = {"test": "label"} - mock_broker.find_task.return_value = mock_task - - scheduler = PostgresqlSchedulerSource( - dsn=postgresql_dsn, - table_name=postgres_table, - startup_schedule=startup_schedule, - broker=mock_broker, - ) - - # First startup - await scheduler.startup() - schedules = await scheduler.get_schedules() - assert len(schedules) == 1 - - # Restart (shutdown and startup again) - await scheduler.shutdown() - await scheduler.startup() - - try: - # Should still have only one schedule (no duplicates) - schedules = await scheduler.get_schedules() - assert len(schedules) == 1 - - finally: - # Cleanup - # Cleanup - async with scheduler.driver, scheduler.driver.connection() as connection: - await connection.execute(f"DROP TABLE {postgres_table}") - await scheduler.shutdown() - - -async def test_startup_without_broker_or_startup_schedule( - scheduler_source: PostgresqlSchedulerSource, -) -> None: - """Test startup when no broker or startup_schedule is provided.""" - # This should work without any issues - # The scheduler_source fixture already calls startup, so if we get here, it worked - schedules = await scheduler_source.get_schedules() - assert len(schedules) == 0 - - -async def test_schedule_serialization( - scheduler_source: PostgresqlSchedulerSource, -) -> None: - """Test that complex schedule data is properly serialized and deserialized.""" - complex_schedule = ScheduledTask( - task_name="complex_task", - labels={"env": "test", "priority": "high", "numbers": [1, 2, 3]}, - schedule_id=uuid.uuid4().hex, - args=["string", 42, True, {"nested": "dict"}], - kwargs={ - "complex_key": {"nested": {"deeply": "nested"}}, - "list_value": [1, "two", {"three": 3}], - "none_value": None, - }, - cron="0 */6 * * *", - cron_offset="UTC", - ) - - await scheduler_source.add_schedule(complex_schedule) - - schedules = await scheduler_source.get_schedules() - assert len(schedules) == 1 - - retrieved = schedules[0] - assert retrieved.task_name == complex_schedule.task_name - assert retrieved.labels == complex_schedule.labels - assert retrieved.args == complex_schedule.args - assert retrieved.kwargs == complex_schedule.kwargs - assert retrieved.cron == complex_schedule.cron - assert retrieved.cron_offset == complex_schedule.cron_offset - - -@pytest.mark.parametrize( - "invalid_dsn", - [ - "postgresql://postgres:wrong@localhost:5432/postgres", - "postgresql://postgres:postgres@nonexistent:5432/postgres", - "invalid://connection/string", - ], -) -async def test_connection_failures(invalid_dsn: str) -> None: - """Test various connection failure scenarios.""" - scheduler = PostgresqlSchedulerSource( - dsn=invalid_dsn, - table_name="test_table", - ) - - with pytest.raises(DatabaseConnectionError): - await scheduler.startup() From 075808aa4a18fb0324cc1c4b69fa7bede735ab54 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sun, 29 Mar 2026 15:43:06 +0530 Subject: [PATCH 07/25] deps: Add oracledb and oracle extra to dependencies --- pyproject.toml | 4 +- uv.lock | 232 ++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 233 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 87027d0..5281965 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,10 +47,11 @@ keywords = [ dependencies = ["taskiq>=0.11.7", 'sqlalchemy>=2', 'anyio>=4'] [project.optional-dependencies] -all = ["taskiq_sqlalchemy[postgresql,sqlite]"] +all = ["taskiq_sqlalchemy[postgresql,sqlite,oracle]"] postgresql = ["asyncpg", "psycopg[binary,pool]"] sqlite = ["aiosqlite"] +oracle = ["oracledb"] [project.urls] Homepage = "https://github.com/corridor/taskiq-sqlalchemy" @@ -72,6 +73,7 @@ dev = [ "aiosqlite", # SQLite async driver — zero external service "asyncpg", # PostgreSQL asyncpg driver (optional in local dev) "psycopg", # PostgreSQL psycopg3 driver (optional in local dev) + "oracledb", # OracleDB driver (optional in local dev) ] [tool.setuptools.packages.find] diff --git a/uv.lock b/uv.lock index fade64a..51a924e 100644 --- a/uv.lock +++ b/uv.lock @@ -116,6 +116,101 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/22/77a4a08cc9ef4f8bbb5e7ffbf4be008e596b535a3533a28c3465e9400d75/asyncpg_stubs-0.30.2-py3-none-any.whl", hash = "sha256:e57818bbaf10945a60ff3219da3c5ce97e1b424503b6a6f0a18db99797397cbb", size = 26929, upload-time = "2025-06-27T20:03:14.847Z" }, ] +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", version = "2.23", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10' and implementation_name != 'PyPy'" }, + { name = "pycparser", version = "3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10' and implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/08ed5a43f2996a16b462f64a7055c6e962803534924b9b2f1371d8c00b7b/cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf", size = 184288, upload-time = "2025-09-08T23:23:48.404Z" }, + { url = "https://files.pythonhosted.org/packages/3d/de/38d9726324e127f727b4ecc376bc85e505bfe61ef130eaf3f290c6847dd4/cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7", size = 180509, upload-time = "2025-09-08T23:23:49.73Z" }, + { url = "https://files.pythonhosted.org/packages/9b/13/c92e36358fbcc39cf0962e83223c9522154ee8630e1df7c0b3a39a8124e2/cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c", size = 208813, upload-time = "2025-09-08T23:23:51.263Z" }, + { url = "https://files.pythonhosted.org/packages/15/12/a7a79bd0df4c3bff744b2d7e52cc1b68d5e7e427b384252c42366dc1ecbc/cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165", size = 216498, upload-time = "2025-09-08T23:23:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/5c51c1c7600bdd7ed9a24a203ec255dccdd0ebf4527f7b922a0bde2fb6ed/cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534", size = 203243, upload-time = "2025-09-08T23:23:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/32/f2/81b63e288295928739d715d00952c8c6034cb6c6a516b17d37e0c8be5600/cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f", size = 203158, upload-time = "2025-09-08T23:23:55.169Z" }, + { url = "https://files.pythonhosted.org/packages/1f/74/cc4096ce66f5939042ae094e2e96f53426a979864aa1f96a621ad128be27/cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63", size = 216548, upload-time = "2025-09-08T23:23:56.506Z" }, + { url = "https://files.pythonhosted.org/packages/e8/be/f6424d1dc46b1091ffcc8964fa7c0ab0cd36839dd2761b49c90481a6ba1b/cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2", size = 218897, upload-time = "2025-09-08T23:23:57.825Z" }, + { url = "https://files.pythonhosted.org/packages/f7/e0/dda537c2309817edf60109e39265f24f24aa7f050767e22c98c53fe7f48b/cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65", size = 211249, upload-time = "2025-09-08T23:23:59.139Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e7/7c769804eb75e4c4b35e658dba01de1640a351a9653c3d49ca89d16ccc91/cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322", size = 218041, upload-time = "2025-09-08T23:24:00.496Z" }, + { url = "https://files.pythonhosted.org/packages/aa/d9/6218d78f920dcd7507fc16a766b5ef8f3b913cc7aa938e7fc80b9978d089/cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a", size = 172138, upload-time = "2025-09-08T23:24:01.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/8f/a1e836f82d8e32a97e6b29cc8f641779181ac7363734f12df27db803ebda/cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9", size = 182794, upload-time = "2025-09-08T23:24:02.943Z" }, +] + [[package]] name = "cfgv" version = "3.4.0" @@ -234,6 +329,66 @@ toml = [ { name = "tomli", marker = "python_full_version <= '3.11'" }, ] +[[package]] +name = "cryptography" +version = "46.0.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" }, + { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" }, + { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" }, + { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" }, + { url = "https://files.pythonhosted.org/packages/01/41/3a578f7fd5c70611c0aacba52cd13cb364a5dee895a5c1d467208a9380b0/cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275", size = 7117147, upload-time = "2026-03-25T23:33:48.249Z" }, + { url = "https://files.pythonhosted.org/packages/fa/87/887f35a6fca9dde90cad08e0de0c89263a8e59b2d2ff904fd9fcd8025b6f/cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4", size = 4266221, upload-time = "2026-03-25T23:33:49.874Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a8/0a90c4f0b0871e0e3d1ed126aed101328a8a57fd9fd17f00fb67e82a51ca/cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b", size = 4408952, upload-time = "2026-03-25T23:33:52.128Z" }, + { url = "https://files.pythonhosted.org/packages/16/0b/b239701eb946523e4e9f329336e4ff32b1247e109cbab32d1a7b61da8ed7/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707", size = 4270141, upload-time = "2026-03-25T23:33:54.11Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a8/976acdd4f0f30df7b25605f4b9d3d89295351665c2091d18224f7ad5cdbf/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361", size = 4904178, upload-time = "2026-03-25T23:33:55.725Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1b/bf0e01a88efd0e59679b69f42d4afd5bced8700bb5e80617b2d63a3741af/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b", size = 4441812, upload-time = "2026-03-25T23:33:57.364Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/11df86de2ea389c65aa1806f331cae145f2ed18011f30234cc10ca253de8/cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca", size = 3963923, upload-time = "2026-03-25T23:33:59.361Z" }, + { url = "https://files.pythonhosted.org/packages/91/e0/207fb177c3a9ef6a8108f234208c3e9e76a6aa8cf20d51932916bd43bda0/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013", size = 4269695, upload-time = "2026-03-25T23:34:00.909Z" }, + { url = "https://files.pythonhosted.org/packages/21/5e/19f3260ed1e95bced52ace7501fabcd266df67077eeb382b79c81729d2d3/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4", size = 4869785, upload-time = "2026-03-25T23:34:02.796Z" }, + { url = "https://files.pythonhosted.org/packages/10/38/cd7864d79aa1d92ef6f1a584281433419b955ad5a5ba8d1eb6c872165bcb/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a", size = 4441404, upload-time = "2026-03-25T23:34:04.35Z" }, + { url = "https://files.pythonhosted.org/packages/09/0a/4fe7a8d25fed74419f91835cf5829ade6408fd1963c9eae9c4bce390ecbb/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d", size = 4397549, upload-time = "2026-03-25T23:34:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a0/7d738944eac6513cd60a8da98b65951f4a3b279b93479a7e8926d9cd730b/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736", size = 4651874, upload-time = "2026-03-25T23:34:07.916Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f1/c2326781ca05208845efca38bf714f76939ae446cd492d7613808badedf1/cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed", size = 3001511, upload-time = "2026-03-25T23:34:09.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/fe4a23eb549ac9d903bd4698ffda13383808ef0876cc912bcb2838799ece/cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4", size = 3471692, upload-time = "2026-03-25T23:34:11.613Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" }, + { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" }, + { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" }, + { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" }, + { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" }, + { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" }, + { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/7ccff00ced5bac74b775ce0beb7d1be4e8637536b522b5df9b73ada42da2/cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead", size = 3475444, upload-time = "2026-03-25T23:34:38.944Z" }, + { url = "https://files.pythonhosted.org/packages/bc/1f/4c926f50df7749f000f20eede0c896769509895e2648db5da0ed55db711d/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8", size = 4218227, upload-time = "2026-03-25T23:34:40.871Z" }, + { url = "https://files.pythonhosted.org/packages/c6/65/707be3ffbd5f786028665c3223e86e11c4cda86023adbc56bd72b1b6bab5/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0", size = 4381399, upload-time = "2026-03-25T23:34:42.609Z" }, + { url = "https://files.pythonhosted.org/packages/f3/6d/73557ed0ef7d73d04d9aba745d2c8e95218213687ee5e76b7d236a5030fc/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b", size = 4217595, upload-time = "2026-03-25T23:34:44.205Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c5/e1594c4eec66a567c3ac4400008108a415808be2ce13dcb9a9045c92f1a0/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a", size = 4380912, upload-time = "2026-03-25T23:34:46.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/89/843b53614b47f97fe1abc13f9a86efa5ec9e275292c457af1d4a60dc80e0/cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e", size = 3409955, upload-time = "2026-03-25T23:34:48.465Z" }, +] + [[package]] name = "distlib" version = "0.4.0" @@ -438,6 +593,48 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] +[[package]] +name = "oracledb" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/02/70a872d1a4a739b4f7371ab8d3d5ed8c6e57e142e2503531aafcb220893c/oracledb-3.4.2.tar.gz", hash = "sha256:46e0f2278ff1fe83fbc33a3b93c72d429323ec7eed47bc9484e217776cd437e5", size = 855467, upload-time = "2026-01-28T17:25:39.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/5d/b8a0ca1c520fa43ae33260f6f8ca9bd468ade43da7986029bc214965df12/oracledb-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff3c89cecea62af8ca02aa33cab0f2edc0214c747eac7d3364ed6b2640cb55e4", size = 4243966, upload-time = "2026-01-28T17:25:45.05Z" }, + { url = "https://files.pythonhosted.org/packages/f6/43/26e2bbb2a6ee31392a339089e53cb2e386ca795ff4fbe2f673c167821bd6/oracledb-3.4.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e068ef844a327877bfefbef1bc6fb7284c727bb87af80095f08d95bcaf7b8bb2", size = 2426056, upload-time = "2026-01-28T17:25:47.176Z" }, + { url = "https://files.pythonhosted.org/packages/09/ba/11ee1d044295465a04ff45c6e3023d35400bb3f67bc5fed9408f0f2dc04c/oracledb-3.4.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9f434a739405557bd57cb39b62238142bb27855a524a70dc6d397a2a8c576c9d", size = 2603062, upload-time = "2026-01-28T17:25:49.817Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bc/292f2f5f7b65a667787871e300889ab8f4a3b9cfd88c5d78f828a40f6d31/oracledb-3.4.2-cp310-cp310-win32.whl", hash = "sha256:00c79448017f367bb7ab6900efe0706658a53768abea2b4519a4c9b2d5743890", size = 1496639, upload-time = "2026-01-28T17:25:51.298Z" }, + { url = "https://files.pythonhosted.org/packages/21/23/81931c16663e771937c0161bb90460668d2a5f7982b5030ab7bef3b3a4f9/oracledb-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:574c8280d49cbbe21dbe03fc28356d9b9a5b9e300ebcde6c6d106e51453a7e65", size = 1837314, upload-time = "2026-01-28T17:25:52.718Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/be263b668ba32b258d07c85f7bfb6967a9677e016c299207b28734f04c4b/oracledb-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b8e4b8a852251cef09038b75f30fce1227010835f4e19cfbd436027acba2697c", size = 4228552, upload-time = "2026-01-28T17:25:54.844Z" }, + { url = "https://files.pythonhosted.org/packages/91/bc/e832a649529da7c60409a81be41f3213b4c7ffda4fe424222b2145e8d43c/oracledb-3.4.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1617a1db020346883455af005efbefd51be2c4d797e43b1b38455a19f8526b48", size = 2421924, upload-time = "2026-01-28T17:25:56.984Z" }, + { url = "https://files.pythonhosted.org/packages/86/21/d867c37e493a63b5521bd248110ad5b97b18253d64a30703e3e8f3d9631e/oracledb-3.4.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed78d7e7079a778062744ccf42141ce4806818c3f4dd6463e4a7edd561c9f86", size = 2599301, upload-time = "2026-01-28T17:25:58.529Z" }, + { url = "https://files.pythonhosted.org/packages/2a/de/9b1843ea27f7791449652d7f340f042c3053336d2c11caf29e59bab86189/oracledb-3.4.2-cp311-cp311-win32.whl", hash = "sha256:0e16fe3d057e0c41a23ad2ae95bfa002401690773376d476be608f79ac74bf05", size = 1492890, upload-time = "2026-01-28T17:26:00.662Z" }, + { url = "https://files.pythonhosted.org/packages/d6/10/cbc8afa2db0cec80530858d3e4574f9734fae8c0b7f1df261398aa026c5f/oracledb-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:f93cae08e8ed20f2d5b777a8602a71f9418389c661d2c937e84d94863e7e7011", size = 1843355, upload-time = "2026-01-28T17:26:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/8f/81/2e6154f34b71cd93b4946c73ea13b69d54b8d45a5f6bbffe271793240d21/oracledb-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a7396664e592881225ba66385ee83ce339d864f39003d6e4ca31a894a7e7c552", size = 4220806, upload-time = "2026-01-28T17:26:04.322Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a9/a1d59aaac77d8f727156ec6a3b03399917c90b7da4f02d057f92e5601f56/oracledb-3.4.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f04a2d62073407672f114d02529921de0677c6883ed7c64d8d1a3c04caa3238", size = 2233795, upload-time = "2026-01-28T17:26:05.877Z" }, + { url = "https://files.pythonhosted.org/packages/94/ec/8c4a38020cd251572bd406ddcbde98ca052ec94b5684f9aa9ef1ddfcc68c/oracledb-3.4.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8d75e4f879b908be66cce05ba6c05791a5dbb4a15e39abc01aa25c8a2492bd9", size = 2424756, upload-time = "2026-01-28T17:26:07.35Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7d/c251c2a8567151ccfcfbe3467ea9a60fb5480dc4719342e2e6b7a9679e5d/oracledb-3.4.2-cp312-cp312-win32.whl", hash = "sha256:31b7ee83c23d0439778303de8a675717f805f7e8edb5556d48c4d8343bcf14f5", size = 1453486, upload-time = "2026-01-28T17:26:08.869Z" }, + { url = "https://files.pythonhosted.org/packages/4c/78/c939f3c16fb39400c4734d5a3340db5659ba4e9dce23032d7b33ccfd3fe5/oracledb-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:ac25a0448fc830fb7029ad50cd136cdbfcd06975d53967e269772cc5cb8c203a", size = 1794445, upload-time = "2026-01-28T17:26:10.66Z" }, + { url = "https://files.pythonhosted.org/packages/22/68/f7126f5d911c295b57720c6b1a0609a5a2667b4546946433552a4de46333/oracledb-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:643c25d301a289a371e37fcedb59e5fa5e54fb321708e5c12821c4b55bdd8a4d", size = 4205176, upload-time = "2026-01-28T17:26:12.463Z" }, + { url = "https://files.pythonhosted.org/packages/5d/93/2fced60f92dc82e66980a8a3ba5c1ea48110bf1dd81d030edb69d88f992e/oracledb-3.4.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55397e7eb43bb7017c03a981c736c25724182f5210951181dfe3fab0e5d457fb", size = 2231298, upload-time = "2026-01-28T17:26:14.497Z" }, + { url = "https://files.pythonhosted.org/packages/75/a7/4dd286f3a6348d786fef9e6ab2e6c9b74ca9195d9a756f2a67e45743cdf0/oracledb-3.4.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26a10f9c790bd141ffc8af68520803ed4a44a9258bf7d1eea9bfdd36bd6df7f", size = 2439430, upload-time = "2026-01-28T17:26:16.044Z" }, + { url = "https://files.pythonhosted.org/packages/19/28/94bc753e5e969c60ee5d9c914e2b4ef79999eaca8e91bcab2fbf0586b80b/oracledb-3.4.2-cp313-cp313-win32.whl", hash = "sha256:b974caec2c330c22bbe765705a5ac7d98ec3022811dec2042d561a3c65cb991b", size = 1458209, upload-time = "2026-01-28T17:26:17.652Z" }, + { url = "https://files.pythonhosted.org/packages/cb/2b/593a9b2d4c12c9de3289e67d84fe023336d99f36ba51442a5a0f5ce6acf7/oracledb-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:3df8eee1410d25360599968b1625b000f10c5ae0e47274031a7842a9dc418890", size = 1793558, upload-time = "2026-01-28T17:26:19.914Z" }, + { url = "https://files.pythonhosted.org/packages/42/20/1e98f84c1555911c46b4fa870fbef2a80617bf7e0a5f178078ecf466c917/oracledb-3.4.2-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:59ad6438f56a25e8e1a4a3dd1b42235a5d09ab9ba417ff2ad14eae6596f3d06f", size = 4247459, upload-time = "2026-01-28T17:26:22.356Z" }, + { url = "https://files.pythonhosted.org/packages/7d/74/95963e2d94f84b9937a562a9a2529f72d050afbc2ffd88f6661e3a876f7d/oracledb-3.4.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:404ec1451d0448653ee074213b87d6c5bd65eaa74b50083ddf2c9c3e11c71c71", size = 2271749, upload-time = "2026-01-28T17:26:24.078Z" }, + { url = "https://files.pythonhosted.org/packages/82/89/38ce85148a246087795379ee52c5b20726a00a69c87ba6ec266bcdad30fc/oracledb-3.4.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:19fa80ef84f85ad74077aa626067bbe697e527bd39604b4209f9d86cb2876b89", size = 2452031, upload-time = "2026-01-28T17:26:26.08Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/51fe907fdec0267ad7c6e9a62998cbe878efcd168ea6e39f162fab62fdaa/oracledb-3.4.2-cp314-cp314-win32.whl", hash = "sha256:d7ce75c498bff758548ec6e4424ab4271aa257e5887cc436a54bc947fd46199a", size = 1480973, upload-time = "2026-01-28T17:26:27.584Z" }, + { url = "https://files.pythonhosted.org/packages/48/22/a37354f19786774e5e4041338043b516db060aacfdfcd5aca8bb92c2539a/oracledb-3.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:5d7befb014174c5ae11c3a08f5ed6668a25ab2335d8e7104dca70d54d54a5b3a", size = 1837756, upload-time = "2026-01-28T17:26:29.032Z" }, + { url = "https://files.pythonhosted.org/packages/dd/22/153711194b5042aa8576ba4db5416143d1e842e536befd211752032bb114/oracledb-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1e4930d7f6584832dcc15b8ca415a7957b0c45f5aa7c4f88702e070e5c53bf93", size = 4251607, upload-time = "2026-01-28T17:26:30.649Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d6/12d9f228b8d081850b5b00e13784f4d847c95babdad00f0663206121d546/oracledb-3.4.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23aa07c1eaca17ae74c6fdc86b218f58484d56452958aead1aa460c0596a76c1", size = 2430130, upload-time = "2026-01-28T17:26:32.25Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c1/cf6c81567fb25d64a4094409d5fb915c257029bf7eb2244433e26a838e77/oracledb-3.4.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f8ea989965a4f636a309444bd696ab877bba373d5d67bf744785f9bd8c560865", size = 2605324, upload-time = "2026-01-28T17:26:33.825Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ee/2543ccfc3d7155baf5cfa9a3909768fa2a0781a92e5f28e3027c397e7bbe/oracledb-3.4.2-cp39-cp39-win32.whl", hash = "sha256:6d85622664cc88d5a82bbd7beccb62cd53bd272c550a5e15e7d5f8ae6b86f1f1", size = 1498691, upload-time = "2026-01-28T17:26:35.373Z" }, + { url = "https://files.pythonhosted.org/packages/f7/9c/07f0fa510358612188eebdf31930ac813246644c8dc8771370fd10226ea5/oracledb-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b1095d95d0c8b37e4d0e17cf1928919cb59222b6344362a1cf6a2f3ca205a28a", size = 1840401, upload-time = "2026-01-28T17:26:36.775Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -576,6 +773,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/47/fd/4feb52a55c1a4bd748f2acaed1903ab54a723c47f6d0242780f4d97104d4/psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7", size = 38252, upload-time = "2025-02-26T12:03:45.073Z" }, ] +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + [[package]] name = "pycron" version = "3.2.0" @@ -1012,8 +1233,12 @@ dependencies = [ all = [ { name = "aiosqlite" }, { name = "asyncpg" }, + { name = "oracledb" }, { name = "psycopg", extra = ["binary", "pool"] }, ] +oracle = [ + { name = "oracledb" }, +] postgresql = [ { name = "asyncpg" }, { name = "psycopg", extra = ["binary", "pool"] }, @@ -1027,6 +1252,7 @@ dev = [ { name = "aiosqlite" }, { name = "asyncpg" }, { name = "asyncpg-stubs" }, + { name = "oracledb" }, { name = "pre-commit" }, { name = "psycopg" }, { name = "pytest" }, @@ -1044,18 +1270,20 @@ requires-dist = [ { name = "aiosqlite", marker = "extra == 'sqlite'" }, { name = "anyio", specifier = ">=4" }, { name = "asyncpg", marker = "extra == 'postgresql'" }, + { name = "oracledb", marker = "extra == 'oracle'" }, { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgresql'" }, { name = "sqlalchemy", specifier = ">=2" }, { name = "taskiq", specifier = ">=0.11.7" }, - { name = "taskiq-sqlalchemy", extras = ["postgresql", "sqlite"], marker = "extra == 'all'" }, + { name = "taskiq-sqlalchemy", extras = ["postgresql", "sqlite", "oracle"], marker = "extra == 'all'" }, ] -provides-extras = ["all", "postgresql", "sqlite"] +provides-extras = ["all", "postgresql", "sqlite", "oracle"] [package.metadata.requires-dev] dev = [ { name = "aiosqlite" }, { name = "asyncpg" }, { name = "asyncpg-stubs" }, + { name = "oracledb" }, { name = "pre-commit" }, { name = "psycopg" }, { name = "pytest" }, From a245cbce20b6495c8f5444706d362baf43cd8d09 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sun, 29 Mar 2026 15:39:31 +0530 Subject: [PATCH 08/25] feat: Add support for oracle result backend --- docker-compose.yml | 30 +++++++++++++++++++++++------ taskiq_sqlalchemy/models.py | 14 ++++++++++---- taskiq_sqlalchemy/result_backend.py | 8 +++----- tests/result_backend/conftest.py | 11 ++++++++--- 4 files changed, 45 insertions(+), 18 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 2f3f5e0..c94625e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,29 +1,47 @@ services: postgres: - image: postgres:latest + image: postgres:10 restart: always environment: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres - POSTGRES_DB: taskiqasyncpg - + POSTGRES_DB: taskiq volumes: - .:/postgres_data:/var/lib/postgresql/data/ - ports: - 5432:5432 - healthcheck: - test: ["CMD-SHELL", "pg_isready", "-d", "taskiqasyncpg"] + test: ["CMD-SHELL", "pg_isready", "-U", "postgres", "-d", "taskiq"] interval: 3s timeout: 1s retries: 5 + networks: + - default + oracle: + image: gvenzl/oracle-xe:21-slim + restart: always + environment: + ORACLE_DATABASE: taskiq + APP_USER: oracle + APP_USER_PASSWORD: oracle + ORACLE_PASSWORD: true + volumes: + - .:/oracle-data:/opt/oracle/oradata + ports: + - 1521:1521 + - 5500:5500 + healthcheck: + test: ["CMD-SHELL", "healthcheck.sh"] + interval: 3s + timeout: 1s + retries: 5 networks: - default volumes: postgres_data: + oracle_data: networks: default: driver: bridge diff --git a/taskiq_sqlalchemy/models.py b/taskiq_sqlalchemy/models.py index 9a1901c..b821321 100644 --- a/taskiq_sqlalchemy/models.py +++ b/taskiq_sqlalchemy/models.py @@ -2,15 +2,21 @@ import typing as t import sqlalchemy as sa +from sqlalchemy.dialects import oracle from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.sql import expression class BaseMixin: - # Sqlite doesn't allow BIGINT to be used as a primary key with autoincrement. - # See: https://stackoverflow.com/questions/18835740 id: Mapped[int] = mapped_column( - (sa.BigInteger().with_variant(sa.Integer, "sqlite")), + ( + sa.BigInteger() + # Sqlite doesn't allow BIGINT to be used as a primary key with autoincrement. + # See: https://stackoverflow.com/questions/18835740 + .with_variant(sa.Integer, "sqlite") + .with_variant(oracle.NUMBER(38), "oracle") + ), + sa.Identity(), primary_key=True, ) @@ -45,7 +51,7 @@ class TaskiqScheduleMixin(BaseMixin): task_name: Mapped[str] = mapped_column(sa.String(255)) - schedule: Mapped[t.Any] = mapped_column(sa.JSON) + schedule: Mapped[t.Any] = mapped_column(sa.JSON().with_variant(sa.CLOB(), "oracle")) updated_at: Mapped[datetime.datetime] = mapped_column( sa.DateTime, diff --git a/taskiq_sqlalchemy/result_backend.py b/taskiq_sqlalchemy/result_backend.py index 47c6d7f..0b73346 100644 --- a/taskiq_sqlalchemy/result_backend.py +++ b/taskiq_sqlalchemy/result_backend.py @@ -6,7 +6,6 @@ import logging import typing as t -from typing import TypeVar import sqlalchemy as sa from taskiq import AsyncResultBackend @@ -16,7 +15,7 @@ from taskiq_sqlalchemy.manager import SQLAlchemyManager -_ReturnType = TypeVar("_ReturnType") +_ReturnType = t.TypeVar("_ReturnType") logger = logging.getLogger(__name__) @@ -112,8 +111,7 @@ async def get_result( async def is_result_ready(self, task_id: str) -> bool: async with self.manager.engine.connect() as conn: - exists_stmt = sa.select( + stmt = sa.select(sa.literal(True)).where( sa.exists().where(self.manager.result_cls.task_id == task_id) ) - - return (await conn.execute(exists_stmt)).scalar_one() + return bool(await conn.scalar(stmt)) diff --git a/tests/result_backend/conftest.py b/tests/result_backend/conftest.py index 2034521..b296a8d 100644 --- a/tests/result_backend/conftest.py +++ b/tests/result_backend/conftest.py @@ -18,15 +18,20 @@ id="sqlite+aiosqlite", ), pytest.param( - "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres", + "postgresql+asyncpg://postgres:postgres@localhost:5432/taskiq", id="postgresql+asyncpg", marks=pytest.mark.postgresql, ), pytest.param( - "postgresql+psycopg://postgres:postgres@localhost:5432/postgres", + "postgresql+psycopg://postgres:postgres@localhost:5432/taskiq", id="postgresql+psycopg", marks=pytest.mark.postgresql, ), + pytest.param( + "oracle+oracledb://oracle:oracle@localhost:1521/?service_name=taskiq", + id="oracle+oracledb", + marks=pytest.mark.oracle, + ), ] @@ -35,7 +40,7 @@ async def _try_connect(engine: AsyncEngine) -> t.AsyncGenerator[AsyncEngine, Non """Yield the engine; skip the test if the DB is unreachable.""" try: async with engine.connect() as conn: - await conn.execute(sa.text("SELECT 1")) + await conn.execute(sa.select(1)) yield engine except Exception as exc: await engine.dispose() From f4d937ef1b335c3d22a10b73b182be4e01dcf709 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sun, 29 Mar 2026 20:02:36 +0530 Subject: [PATCH 09/25] chore: Ignore log files --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 78a51c3..79fba3f 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,7 @@ wheels/ # Virtual environments .venv .vscode -.env \ No newline at end of file +.env + +# Log files +*.log From 279a6e50439d16cde2d4e0cceec814358db3e7e7 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sun, 29 Mar 2026 20:03:15 +0530 Subject: [PATCH 10/25] chore: Delete scripts dir --- scripts/release.py | 184 ------------------------------- scripts/test-all-drivers.sh | 211 ------------------------------------ 2 files changed, 395 deletions(-) delete mode 100644 scripts/release.py delete mode 100644 scripts/test-all-drivers.sh diff --git a/scripts/release.py b/scripts/release.py deleted file mode 100644 index 0355575..0000000 --- a/scripts/release.py +++ /dev/null @@ -1,184 +0,0 @@ -#!/usr/bin/env python3 -"""Release automation script for taskiq-postgresql.""" - -import argparse -import re -import subprocess -import sys -from pathlib import Path - - -def run_command(cmd: list[str], check: bool = True) -> subprocess.CompletedProcess: - """Run a command and return the result.""" - print(f"Running: {' '.join(cmd)}") - return subprocess.run(cmd, check=check, capture_output=True, text=True) - - -def get_current_version() -> str: - """Get the current version from pyproject.toml.""" - pyproject_path = Path("pyproject.toml") - content = pyproject_path.read_text() - - version_match = re.search(r'version = "([^"]+)"', content) - if not version_match: - raise ValueError("Could not find version in pyproject.toml") - - return version_match.group(1) - - -def update_version(new_version: str) -> None: - """Update version in pyproject.toml.""" - pyproject_path = Path("pyproject.toml") - content = pyproject_path.read_text() - - # Update version - content = re.sub( - r'version = "[^"]+"', - f'version = "{new_version}"', - content, - ) - - pyproject_path.write_text(content) - print(f"Updated version to {new_version} in pyproject.toml") - - -def validate_version(version: str) -> bool: - """Validate version format (semantic versioning).""" - pattern = r"^\d+\.\d+\.\d+(?:-[a-zA-Z0-9]+(?:\.\d+)?)?$" - return bool(re.match(pattern, version)) - - -def check_git_status() -> None: - """Check if git working directory is clean.""" - result = run_command(["git", "status", "--porcelain"]) - if result.stdout.strip(): - print("Error: Git working directory is not clean") - print("Please commit or stash your changes before releasing") - sys.exit(1) - - -def run_tests() -> None: - """Run the test suite.""" - print("Running tests...") - result = run_command(["uv", "run", "pytest", "tests/", "-v"], check=False) - if result.returncode != 0: - print("Tests failed! Please fix them before releasing.") - sys.exit(1) - print("All tests passed!") - - -def run_linting() -> None: - """Run linting and formatting checks.""" - print("Running linting checks...") - - # Check formatting - result = run_command(["uv", "run", "ruff", "format", "--check", "."], check=False) - if result.returncode != 0: - print("Code formatting issues found. Run 'uv run ruff format .' to fix.") - sys.exit(1) - - # Check linting - result = run_command(["uv", "run", "ruff", "check", "."], check=False) - if result.returncode != 0: - print("Linting issues found. Please fix them before releasing.") - sys.exit(1) - - print("All linting checks passed!") - - -def build_package() -> None: - """Build the package.""" - print("Building package...") - run_command(["uv", "build"]) - print("Package built successfully!") - - -def create_git_tag(version: str) -> None: - """Create a git tag for the release.""" - tag_name = f"v{version}" - run_command(["git", "add", "pyproject.toml"]) - run_command(["git", "commit", "-m", f"Bump version to {version}"]) - run_command(["git", "tag", "-a", tag_name, "-m", f"Release {tag_name}"]) - print(f"Created git tag: {tag_name}") - - -def push_release(version: str) -> None: - """Push the release to GitHub.""" - tag_name = f"v{version}" - run_command(["git", "push", "origin", "main"]) - run_command(["git", "push", "origin", tag_name]) - print(f"Pushed release {tag_name} to GitHub") - - -def main() -> None: - """Main release function.""" - parser = argparse.ArgumentParser( - description="Release automation for taskiq-postgresql", - ) - parser.add_argument("version", help="New version to release (e.g., 1.0.0)") - parser.add_argument("--skip-tests", action="store_true", help="Skip running tests") - parser.add_argument( - "--skip-checks", - action="store_true", - help="Skip linting checks", - ) - parser.add_argument( - "--dry-run", - action="store_true", - help="Dry run (don't actually release)", - ) - - args = parser.parse_args() - - # Validate version format - if not validate_version(args.version): - print(f"Error: Invalid version format: {args.version}") - print("Version should follow semantic versioning (e.g., 1.0.0)") - sys.exit(1) - - current_version = get_current_version() - print(f"Current version: {current_version}") - print(f"New version: {args.version}") - - if args.version == current_version: - print("Error: New version is the same as current version") - sys.exit(1) - - if args.dry_run: - print("DRY RUN: Would perform the following actions:") - print(f"1. Update version from {current_version} to {args.version}") - print("2. Run tests and linting") - print("3. Build package") - print("4. Create git tag") - print("5. Push to GitHub") - return - - # Check git status - check_git_status() - - # Run tests - if not args.skip_tests: - run_tests() - - # Run linting - if not args.skip_checks: - run_linting() - - # Update version - update_version(args.version) - - # Build package - build_package() - - # Create git tag and push - create_git_tag(args.version) - - # Push to GitHub (this will trigger CD pipeline) - push_release(args.version) - - print(f"✅ Release {args.version} completed successfully!") - print("GitHub Actions will now build and publish the package to PyPI.") - - -if __name__ == "__main__": - main() diff --git a/scripts/test-all-drivers.sh b/scripts/test-all-drivers.sh deleted file mode 100644 index bc520f4..0000000 --- a/scripts/test-all-drivers.sh +++ /dev/null @@ -1,211 +0,0 @@ -#!/bin/bash - -# Test script for all PostgreSQL drivers -# This script runs tests against all supported drivers - -set -e - -DRIVERS=("asyncpg" "psycopg" "psqlpy") -PYTHON_VERSIONS=("3.9" "3.10" "3.11" "3.12" "3.13") - -echo "🧪 Testing taskiq-postgresql with all drivers" -echo "==============================================" - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -NC='\033[0m' # No Color - -# Function to print colored output -print_status() { - local status=$1 - local message=$2 - case $status in - "success") - echo -e "${GREEN}✅ $message${NC}" - ;; - "error") - echo -e "${RED}❌ $message${NC}" - ;; - "info") - echo -e "${YELLOW}ℹ️ $message${NC}" - ;; - esac -} - -# Check if PostgreSQL is running -check_postgres() { - print_status "info" "Checking PostgreSQL connection..." - if ! pg_isready -h localhost -p 5432 -U postgres &>/dev/null; then - print_status "error" "PostgreSQL is not running or not accessible" - print_status "info" "Start PostgreSQL with: docker-compose up -d" - exit 1 - fi - print_status "success" "PostgreSQL is running" -} - -# Test with specific driver -test_driver() { - local driver=$1 - local python_version=${2:-"3.12"} - - print_status "info" "Testing with $driver driver (Python $python_version)" - - # Install driver dependencies - if ! uv sync --group "$driver" &>/dev/null; then - print_status "error" "Failed to install $driver dependencies" - return 1 - fi - - # Set environment variables - export TEST_DRIVER="$driver" - export TEST_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/postgres" - - # Run tests - if uv run pytest tests/ -v --tb=short -x; then - print_status "success" "$driver tests passed" - return 0 - else - print_status "error" "$driver tests failed" - return 1 - fi -} - -# Test all drivers -test_all_drivers() { - local failed_drivers=() - - for driver in "${DRIVERS[@]}"; do - echo - echo "----------------------------------------" - if test_driver "$driver"; then - print_status "success" "$driver: ALL TESTS PASSED" - else - print_status "error" "$driver: TESTS FAILED" - failed_drivers+=("$driver") - fi - done - - echo - echo "==========================================" - echo "📊 Test Summary" - echo "==========================================" - - for driver in "${DRIVERS[@]}"; do - if [[ " ${failed_drivers[*]} " =~ " ${driver} " ]]; then - print_status "error" "$driver: FAILED" - else - print_status "success" "$driver: PASSED" - fi - done - - if [ ${#failed_drivers[@]} -eq 0 ]; then - print_status "success" "All drivers passed tests! 🎉" - return 0 - else - print_status "error" "Some drivers failed tests" - return 1 - fi -} - -# Test with specific Python version -test_with_python() { - local python_version=$1 - print_status "info" "Testing with Python $python_version" - - if ! uv python install "$python_version" &>/dev/null; then - print_status "error" "Failed to install Python $python_version" - return 1 - fi - - # Set Python version for UV - uv python pin "$python_version" - - test_all_drivers -} - -# Performance test -run_performance_tests() { - print_status "info" "Running performance tests..." - - export TEST_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/postgres" - - if uv run pytest tests/ -v -m "performance" --tb=short; then - print_status "success" "Performance tests passed" - else - print_status "error" "Performance tests failed" - fi -} - -# Integration test -run_integration_tests() { - print_status "info" "Running integration tests..." - - export TEST_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/postgres" - - if uv run pytest tests/ -v -m "integration" --tb=short; then - print_status "success" "Integration tests passed" - else - print_status "error" "Integration tests failed" - fi -} - -# Main script logic -main() { - case "${1:-all}" in - "all") - check_postgres - test_all_drivers - ;; - "driver") - if [ -z "$2" ]; then - echo "Usage: $0 driver " - echo "Available drivers: ${DRIVERS[*]}" - exit 1 - fi - check_postgres - test_driver "$2" - ;; - "python") - if [ -z "$2" ]; then - echo "Usage: $0 python " - echo "Available versions: ${PYTHON_VERSIONS[*]}" - exit 1 - fi - check_postgres - test_with_python "$2" - ;; - "performance") - check_postgres - run_performance_tests - ;; - "integration") - check_postgres - run_integration_tests - ;; - "help"|"--help"|"-h") - echo "Usage: $0 [command] [options]" - echo - echo "Commands:" - echo " all Test all drivers (default)" - echo " driver Test specific driver" - echo " python Test with specific Python version" - echo " performance Run performance tests" - echo " integration Run integration tests" - echo " help Show this help" - echo - echo "Available drivers: ${DRIVERS[*]}" - echo "Available Python versions: ${PYTHON_VERSIONS[*]}" - exit 0 - ;; - *) - print_status "error" "Unknown command: $1" - echo "Use '$0 help' for usage information" - exit 1 - ;; - esac -} - -# Run main function -main "$@" \ No newline at end of file From 62483c7973e14f8b4d52946a044bd40f8f007311 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sun, 29 Mar 2026 20:04:58 +0530 Subject: [PATCH 11/25] refactor: Move docker compose + init scripts to docker/ --- .../docker-compose.yml | 15 +++++----- docker/initdb.d/01-oracle-grants.sql | 24 +++++++++++++++ taskiq_sqlalchemy/models.py | 2 +- tests/conftest.py | 8 +++++ tests/result_backend/conftest.py | 29 +++---------------- 5 files changed, 44 insertions(+), 34 deletions(-) rename docker-compose.yml => docker/docker-compose.yml (67%) create mode 100644 docker/initdb.d/01-oracle-grants.sql diff --git a/docker-compose.yml b/docker/docker-compose.yml similarity index 67% rename from docker-compose.yml rename to docker/docker-compose.yml index c94625e..ca1d8a6 100644 --- a/docker-compose.yml +++ b/docker/docker-compose.yml @@ -3,15 +3,15 @@ services: image: postgres:10 restart: always environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres + POSTGRES_USER: taskiq_user + POSTGRES_PASSWORD: taskiq_pwd POSTGRES_DB: taskiq volumes: - - .:/postgres_data:/var/lib/postgresql/data/ + - postgres_data:/var/lib/postgresql/data/ ports: - 5432:5432 healthcheck: - test: ["CMD-SHELL", "pg_isready", "-U", "postgres", "-d", "taskiq"] + test: ["CMD-SHELL", "pg_isready", "-U", "taskiq_user", "-d", "taskiq"] interval: 3s timeout: 1s retries: 5 @@ -23,11 +23,10 @@ services: restart: always environment: ORACLE_DATABASE: taskiq - APP_USER: oracle - APP_USER_PASSWORD: oracle - ORACLE_PASSWORD: true + ORACLE_PASSWORD: "PasswoRd" volumes: - - .:/oracle-data:/opt/oracle/oradata + - oracle_data:/opt/oracle/oradata + - ./initdb.d:/container-entrypoint-initdb.d:ro ports: - 1521:1521 - 5500:5500 diff --git a/docker/initdb.d/01-oracle-grants.sql b/docker/initdb.d/01-oracle-grants.sql new file mode 100644 index 0000000..128321a --- /dev/null +++ b/docker/initdb.d/01-oracle-grants.sql @@ -0,0 +1,24 @@ +-- init/01-oracle-grants.sql + +ALTER SESSION SET CONTAINER=taskiq; + +CREATE USER taskiq_user IDENTIFIED BY taskiq_pwd; + +-- Basic permissions +GRANT CREATE SESSION TO taskiq_user; +GRANT CREATE TABLE TO taskiq_user; +GRANT CREATE VIEW TO taskiq_user; +GRANT CREATE SEQUENCE TO taskiq_user; +ALTER USER taskiq_user quota unlimited on USERS; + +-- AQ permissions +-- TODO: Are all these permissions required? Can we reduce them? +GRANT EXECUTE ON dbms_aq TO taskiq_user; +GRANT RESOURCE TO taskiq_user; +GRANT CONNECT TO taskiq_user; +GRANT EXECUTE ANY PROCEDURE TO taskiq_user; +GRANT aq_administrator_role TO taskiq_user; +GRANT aq_user_role TO taskiq_user; +GRANT EXECUTE ON dbms_aqadm TO taskiq_user; +GRANT EXECUTE ON dbms_aq TO taskiq_user; +GRANT EXECUTE ON dbms_aqin TO taskiq_user; diff --git a/taskiq_sqlalchemy/models.py b/taskiq_sqlalchemy/models.py index b821321..cff3a64 100644 --- a/taskiq_sqlalchemy/models.py +++ b/taskiq_sqlalchemy/models.py @@ -11,7 +11,7 @@ class BaseMixin: id: Mapped[int] = mapped_column( ( sa.BigInteger() - # Sqlite doesn't allow BIGINT to be used as a primary key with autoincrement. + # Sqlite doesn't allow BIGINT to be used as a primary key with autoincrement # See: https://stackoverflow.com/questions/18835740 .with_variant(sa.Integer, "sqlite") .with_variant(oracle.NUMBER(38), "oracle") diff --git a/tests/conftest.py b/tests/conftest.py index b3ef23b..f65e6f4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,9 +5,17 @@ (e.g. tests/result_backend/conftest.py). """ +import uuid + import pytest @pytest.fixture(scope="session") def anyio_backend() -> str: return "asyncio" + + +@pytest.fixture +def task_id() -> str: + """A fresh UUID string for each test.""" + return str(uuid.uuid4()) diff --git a/tests/result_backend/conftest.py b/tests/result_backend/conftest.py index b296a8d..23c089b 100644 --- a/tests/result_backend/conftest.py +++ b/tests/result_backend/conftest.py @@ -1,9 +1,7 @@ import typing as t import uuid -from contextlib import asynccontextmanager import pytest -import sqlalchemy as sa from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine from sqlalchemy.orm import DeclarativeBase from taskiq import TaskiqResult @@ -18,35 +16,23 @@ id="sqlite+aiosqlite", ), pytest.param( - "postgresql+asyncpg://postgres:postgres@localhost:5432/taskiq", + "postgresql+asyncpg://taskiq_user:taskiq_pwd@localhost:5432/taskiq", id="postgresql+asyncpg", marks=pytest.mark.postgresql, ), pytest.param( - "postgresql+psycopg://postgres:postgres@localhost:5432/taskiq", + "postgresql+psycopg://taskiq_user:taskiq_pwd@localhost:5432/taskiq", id="postgresql+psycopg", marks=pytest.mark.postgresql, ), pytest.param( - "oracle+oracledb://oracle:oracle@localhost:1521/?service_name=taskiq", + "oracle+oracledb://taskiq_user:taskiq_pwd@localhost:1521/?service_name=taskiq", id="oracle+oracledb", marks=pytest.mark.oracle, ), ] -@asynccontextmanager -async def _try_connect(engine: AsyncEngine) -> t.AsyncGenerator[AsyncEngine, None]: - """Yield the engine; skip the test if the DB is unreachable.""" - try: - async with engine.connect() as conn: - await conn.execute(sa.select(1)) - yield engine - except Exception as exc: - await engine.dispose() - pytest.skip(f"Database not reachable ({engine.url.drivername}): {exc}") - - @pytest.fixture(params=_ENGINE_PARAMS) async def async_engine( request: pytest.FixtureRequest, @@ -61,8 +47,7 @@ async def async_engine( url: str = request.param engine = create_async_engine(url) try: - async with _try_connect(engine): - yield engine + yield engine finally: await engine.dispose() @@ -140,12 +125,6 @@ class _Base(DeclarativeBase): await conn.run_sync(_Base.metadata.drop_all) -@pytest.fixture -def task_id() -> str: - """A fresh UUID string for each test.""" - return str(uuid.uuid4()) - - @pytest.fixture def another_task_id() -> str: """A second, distinct UUID string.""" From 27d2bd78bc1bf1bbcb0df2df623dbdf61596a552 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Sun, 29 Mar 2026 20:09:24 +0530 Subject: [PATCH 12/25] feat: Add postgres,oracle,polling brokers + unit tests --- pyproject.toml | 3 +- taskiq_sqlalchemy/__init__.py | 3 +- taskiq_sqlalchemy/adapters/__init__.py | 16 ++ taskiq_sqlalchemy/adapters/abc.py | 47 +++++ taskiq_sqlalchemy/adapters/oracle.py | 253 +++++++++++++++++++++++ taskiq_sqlalchemy/adapters/polling.py | 54 +++++ taskiq_sqlalchemy/adapters/postgresql.py | 121 +++++++++++ taskiq_sqlalchemy/broker.py | 131 ++++++++++++ tests/broker/__init__.py | 0 tests/broker/adapters/__init__.py | 0 tests/broker/adapters/test_oracle.py | 164 +++++++++++++++ tests/broker/adapters/test_polling.py | 202 ++++++++++++++++++ tests/broker/adapters/test_postgresql.py | 113 ++++++++++ tests/broker/conftest.py | 175 ++++++++++++++++ tests/broker/test_fetch_message.py | 164 +++++++++++++++ tests/broker/test_kick.py | 162 +++++++++++++++ tests/broker/test_listen.py | 204 ++++++++++++++++++ tests/broker/test_resolve_adapter.py | 50 +++++ 18 files changed, 1860 insertions(+), 2 deletions(-) create mode 100644 taskiq_sqlalchemy/adapters/__init__.py create mode 100644 taskiq_sqlalchemy/adapters/abc.py create mode 100644 taskiq_sqlalchemy/adapters/oracle.py create mode 100644 taskiq_sqlalchemy/adapters/polling.py create mode 100644 taskiq_sqlalchemy/adapters/postgresql.py create mode 100644 taskiq_sqlalchemy/broker.py create mode 100644 tests/broker/__init__.py create mode 100644 tests/broker/adapters/__init__.py create mode 100644 tests/broker/adapters/test_oracle.py create mode 100644 tests/broker/adapters/test_polling.py create mode 100644 tests/broker/adapters/test_postgresql.py create mode 100644 tests/broker/conftest.py create mode 100644 tests/broker/test_fetch_message.py create mode 100644 tests/broker/test_kick.py create mode 100644 tests/broker/test_listen.py create mode 100644 tests/broker/test_resolve_adapter.py diff --git a/pyproject.toml b/pyproject.toml index 5281965..4cfd167 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,7 +93,8 @@ universal = true [tool.pytest.ini_options] markers = [ - "postgresql: tests that require a running PostgreSQL server (skip if unavailable)", + "postgresql: tests that require a running PostgreSQL server", + "oracle: tests that require a running OracleDB server", ] [tool.ruff.lint] diff --git a/taskiq_sqlalchemy/__init__.py b/taskiq_sqlalchemy/__init__.py index a4be39a..b4f9bfa 100644 --- a/taskiq_sqlalchemy/__init__.py +++ b/taskiq_sqlalchemy/__init__.py @@ -3,6 +3,7 @@ TaskIQ. """ +from taskiq_sqlalchemy.broker import SQLAlchemyBroker from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend -__all__ = ["SQLAlchemyResultBackend"] +__all__ = ["SQLAlchemyBroker", "SQLAlchemyResultBackend"] diff --git a/taskiq_sqlalchemy/adapters/__init__.py b/taskiq_sqlalchemy/adapters/__init__.py new file mode 100644 index 0000000..4917bdd --- /dev/null +++ b/taskiq_sqlalchemy/adapters/__init__.py @@ -0,0 +1,16 @@ +from taskiq_sqlalchemy.manager import SQLAlchemyManager + + +def resolve_adapter(manager: SQLAlchemyManager): + if manager.engine.dialect.name == "postgresql": + from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter + + return PostgresDialectAdapter(manager.engine) + if manager.engine.dialect.name == "oracle": + from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter + + return OracleDialectAdapter(manager.engine) + + from taskiq_sqlalchemy.adapters.polling import PollingAdapter + + return PollingAdapter(manager.engine, queue_cls=manager.queue_cls) diff --git a/taskiq_sqlalchemy/adapters/abc.py b/taskiq_sqlalchemy/adapters/abc.py new file mode 100644 index 0000000..d72b605 --- /dev/null +++ b/taskiq_sqlalchemy/adapters/abc.py @@ -0,0 +1,47 @@ +from abc import ABC, abstractmethod +from typing import AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncEngine + + +class DialectAdapter(ABC): + """ + Abstract interface for database-specific pub/sub operations. + """ + + def __init__(self, engine: AsyncEngine) -> None: + self.engine = engine + + async def broker_startup(self) -> None: + return + + async def broker_shutdown(self) -> None: + return + + async def worker_startup(self) -> None: + return + + async def worker_shutdown(self) -> None: + return + + async def client_startup(self) -> None: + return + + async def client_shutdown(self) -> None: + return + + @abstractmethod + async def notify(self, channel: str, payload: str) -> None: + """ + Send a pub/sub notification on *channel* with *payload*. + + Must not block; may be called from any coroutine. + """ + + @abstractmethod + def listen(self, channel: str) -> AsyncGenerator[str, None]: + """ + Return an async generator that yields string payloads as they arrive + on *channel*. The generator runs indefinitely; the broker cancels it + on shutdown. + """ diff --git a/taskiq_sqlalchemy/adapters/oracle.py b/taskiq_sqlalchemy/adapters/oracle.py new file mode 100644 index 0000000..33526dd --- /dev/null +++ b/taskiq_sqlalchemy/adapters/oracle.py @@ -0,0 +1,253 @@ +"""taskiq_sqlalchemy.oracle_adapter + +OracleDialectAdapter — pub/sub for Oracle Database using Advanced Queuing (AQ). +""" + +import logging +import typing as t + +import anyio +import anyio.to_thread +import oracledb +import sqlalchemy as sa +from oracledb import DEQ_FIRST_MSG, DEQ_NO_WAIT +from sqlalchemy.ext.asyncio import AsyncEngine + +from taskiq_sqlalchemy.adapters.abc import DialectAdapter + +logger = logging.getLogger(__name__) + +# Maximum characters in an Oracle AQ queue name (safe for all Oracle versions) +_MAX_QUEUE_NAME = 24 +_QUEUE_TABLE_SUFFIX = "_QT" # AQ requires a separate queue table object + + +def _oracle_queue_name(channel: str) -> str: + """ + Derive a safe Oracle AQ queue name from a taskiq channel name. + + Rules: + - Uppercase (Oracle identifiers are case-insensitive but uppercase by convention) + - Max _MAX_QUEUE_NAME characters for the queue name itself + - Queue table name = queue name + _QT (Oracle AQ requirement) + """ + raw = channel.upper().replace("-", "_").replace(".", "_") + # Reserve room for the _QT suffix on the queue table name + return raw[:_MAX_QUEUE_NAME] + + +class OracleDialectAdapter(DialectAdapter): + """ + Oracle Advanced Queuing (AQ) adapter for taskiq-sqlalchemy. + """ + + def __init__( + self, + engine: AsyncEngine, + ) -> None: + super().__init__(engine) + + self._stop_event = anyio.Event() + + async def broker_startup(self) -> None: + if oracledb.is_thin_mode() is False: + raise RuntimeError( + "OracleDialectAdapter requires python-oracledb Thin mode. " + "Do not call oracledb.init_oracle_client() before using this adapter. " + "Asyncio support is only available in Thin mode." + ) + + await self.ensure_queue("taskiq") + + async def broker_shutdown(self) -> None: + self._stop_event.set() + + async def ensure_queue(self, channel: str) -> None: + """ + Create the AQ queue table and queue if they do not already exist. + + Called automatically by listen() and notify() on first use. + Safe to call multiple times — uses IF NOT EXISTS semantics via + exception handling on the DBMS_AQADM calls. + + This is the Oracle equivalent of the Postgres CREATE TABLE IF NOT EXISTS + in _create_table(). It must run before any enqueue or dequeue. + """ + queue_name = _oracle_queue_name(channel) + queue_table = queue_name + _QUEUE_TABLE_SUFFIX + + async with self.engine.begin() as conn: + await conn.execute( + sa.text(""" + DECLARE + v_qt_count NUMBER; + v_q_count NUMBER; + BEGIN + SELECT COUNT(*) INTO v_qt_count + FROM user_queue_tables + WHERE queue_table = :queue_table; + + IF v_qt_count = 0 THEN + DBMS_AQADM.CREATE_QUEUE_TABLE( + queue_table => :queue_table, + queue_payload_type => 'RAW' + ); + END IF; + + SELECT COUNT(*) INTO v_q_count + FROM user_queues + WHERE name = :queue_name; + + IF v_q_count = 0 THEN + DBMS_AQADM.CREATE_QUEUE( + queue_name => :queue_name, + queue_table => :queue_table, + max_retries => :max_retries + ); + DBMS_AQADM.START_QUEUE( + queue_name => :queue_name + ); + END IF; + END; + """), + { + "queue_table": queue_table, + "queue_name": queue_name, + "max_retries": 5, + }, + ) + logger.info( + "OracleDialectAdapter: ensured AQ queue table %r and queue %r", + queue_table, + queue_name, + ) + + async def purge_queue(self, channel: str) -> None: + """Purge all messages in the queue.""" + queue_name = _oracle_queue_name(channel) + queue_table = queue_name + _QUEUE_TABLE_SUFFIX + + async with self.engine.begin() as conn: + await conn.execute( + sa.text(""" + DECLARE + v_qt_count NUMBER; + v_purge_options DBMS_AQADM.AQ$_PURGE_OPTIONS_T; + BEGIN + SELECT COUNT(*) INTO v_qt_count + FROM user_queue_tables + WHERE queue_table = :queue_table; + + IF v_qt_count > 0 THEN + v_purge_options.block := TRUE; + v_purge_options.delivery_mode := DBMS_AQADM.PERSISTENT_OR_BUFFERED; + DBMS_AQADM.PURGE_QUEUE_TABLE( + queue_table => :queue_table, + purge_condition => NULL, + purge_options => v_purge_options + ); + END IF; + END; + """), + { + "queue_table": queue_table, + }, + ) + logger.info("OracleDialectAdapter: purged AQ queue table %r", queue_table) + + async def notify(self, channel: str, payload: str) -> None: + """ + Enqueue a message into the Oracle AQ queue for this channel. + """ + queue_name = _oracle_queue_name(channel) + + async with self.engine.begin() as conn: + driver_connection = (await conn.get_raw_connection()).driver_connection + queue = await anyio.to_thread.run_sync(driver_connection.queue, queue_name) + # ENQ_IMMEDIATE: commit on enqueue without waiting for connection commit. + # This ensures the signal is visible to dequeuing workers immediately. + queue.enqoptions.visibility = oracledb.ENQ_IMMEDIATE + await queue.enqone( + driver_connection.msgproperties(payload=payload.encode()) + ) + logger.debug( + "OracleDialectAdapter: enqueued task_id=%r on queue %r", + payload, + queue_name, + ) + + def listen(self, channel: str) -> t.AsyncGenerator[str, None]: + return self._listen_gen(channel) + + async def _listen_gen(self, channel: str) -> t.AsyncGenerator[str, None]: + """ + Blocking dequeue loop on the dedicated listener connection. + + deqone() with DEQ_WAIT_FOREVER blocks at the network level until + Oracle delivers a message — no polling, no sleep(). This is the async + equivalent of asyncpg's add_listener callback model: the coroutine + suspends at `await queue.deqone()` and resumes only when a message + arrives. + + We use a finite dequeue_wait instead of DEQ_WAIT_FOREVER to allow + clean shutdown: if dequeue_wait=5, the worst-case shutdown delay is + 5 seconds. When stop_event is set, the generator exits. + + After dequeue we commit immediately (DEQ_ON_COMMIT default) so that + the message is removed from the AQ queue. The actual task processing + happens via the broker's _fetch_message() on the queue TABLE — AQ is + only the transport/signal layer. + """ + + queue_name = _oracle_queue_name(channel) + + async with self.engine.connect() as conn: + driver_connection = (await conn.get_raw_connection()).driver_connection + + queue = await anyio.to_thread.run_sync(driver_connection.queue, queue_name) + + queue.deqoptions.wait = DEQ_NO_WAIT + queue.deqoptions.navigation = DEQ_FIRST_MSG + + logger.debug( + "OracleDialectAdapter: starting dequeue loop on queue %r", + queue_name, + ) + + while not self._stop_event.is_set(): + try: + message = await queue.deqone() + except Exception as exc: + # ORA-25228: timeout/end-of-wait with no message available. + # This is the normal "no messages yet" signal when using a + # finite wait. Loop back and check stop_event. + if "ORA-25228" in str(exc): + await anyio.sleep(0.5) + continue + # Any other error - log and re-raise to let the broker decide + # whether to restart the listener. + logger.exception( + "OracleDialectAdapter: dequeue error on queue %r", queue_name + ) + raise + + if message is None: + # deqone() returned None — finite wait expired, no message. + await anyio.sleep(0.5) + continue + + # Commit to remove the message from the AQ queue. + await conn.commit() + + # Decode the RAW payload back to a string (task_id) + payload = message.payload.decode() + logger.debug( + "OracleDialectAdapter: dequeued task_id=%r from queue %r", + payload, + queue_name, + ) + yield payload + + logger.debug( + "OracleDialectAdapter: dequeue loop exited for queue %r", queue_name + ) diff --git a/taskiq_sqlalchemy/adapters/polling.py b/taskiq_sqlalchemy/adapters/polling.py new file mode 100644 index 0000000..3e67351 --- /dev/null +++ b/taskiq_sqlalchemy/adapters/polling.py @@ -0,0 +1,54 @@ +import typing as t + +import anyio +import sqlalchemy as sa +from sqlalchemy.ext.asyncio import AsyncEngine + +from taskiq_sqlalchemy.adapters.abc import DialectAdapter +from taskiq_sqlalchemy.models import TaskiqQueueMixin + + +class PollingAdapter(DialectAdapter): + """ + A pure-SQL adapter that polls the queue table instead of using native + pub/sub. Used when no specific adapter is registered for a dialect. + + Not ideal for high-throughput scenarios, but means the library works on + *any* SQLAlchemy-supported database out of the box. + """ + + POLL_INTERVAL_SECS = 2 + POLL_RESULT_LIMIT = 25 + + def __init__(self, engine: AsyncEngine, queue_cls: type[TaskiqQueueMixin]) -> None: + super().__init__(engine) + self.queue_cls = queue_cls + self._stop_event = anyio.Event() + + async def worker_shutdown(self) -> None: + self._stop_event.set() + + async def notify(self, channel: str, payload: str) -> None: + # No-op for polling mode — the broker writes to the queue table directly + # and the poller reads it. notify() is still called by the broker but + # is a no-op here because the poll loop does the wakeup. + pass + + def listen(self, channel: str) -> t.AsyncGenerator[str, None]: + return self._poll_gen(channel) + + async def _poll_gen(self, channel: str) -> t.AsyncGenerator[str, None]: + while not self._stop_event.is_set(): + async with self.engine.begin() as conn: + result = await conn.execute( + sa.select(self.queue_cls.task_id) + .filter_by(channel=channel) + .limit(self.POLL_RESULT_LIMIT) + .with_for_update(skip_locked=True) + ) + rows = result.fetchall() + for row in rows: + yield str(row.task_id) + if not rows: + with anyio.move_on_after(self.POLL_INTERVAL_SECS): + await self._stop_event.wait() diff --git a/taskiq_sqlalchemy/adapters/postgresql.py b/taskiq_sqlalchemy/adapters/postgresql.py new file mode 100644 index 0000000..a045e79 --- /dev/null +++ b/taskiq_sqlalchemy/adapters/postgresql.py @@ -0,0 +1,121 @@ +from __future__ import annotations + +import logging +import math +import typing as t +from typing import AsyncGenerator + +import anyio +import anyio.abc +import sqlalchemy as sa +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from sqlalchemy.ext.asyncio import AsyncEngine + +from taskiq_sqlalchemy.adapters.abc import DialectAdapter + +logger = logging.getLogger(__name__) + + +class PostgresDialectAdapter(DialectAdapter): + """ + Postgres LISTEN/NOTIFY via a dedicated asyncpg connection. + + We intentionally hold *one* raw asyncpg connection for listening from the + SQLAlchemy pool. That connection must not be returned to the pool while + the listener is active. + """ + + _listener_conn: t.Optional[sa.PoolProxiedConnection] + + # send/receive for the subscribed channel. + _send_stream: t.Optional[MemoryObjectSendStream[str]] + _recv_stream: t.Optional[MemoryObjectReceiveStream[str]] + + def __init__(self, engine: AsyncEngine) -> None: + super().__init__(engine) + self._listener_conn = None + self._send_stream = None + self._recv_stream = None + self._stop_event = anyio.Event() + + async def worker_startup(self) -> None: + self._listener_conn = await self.engine.raw_connection() + logger.debug("PostgresDialectAdapter: raw asyncpg connection acquired") + + async def worker_shutdown(self) -> None: + self._stop_event.set() + + if self._send_stream is not None: + await self._send_stream.aclose() + + if self._listener_conn is not None: + try: + self._listener_conn.close() + except Exception: + logger.exception( + "PostgresDialectAdapter: error closing listener conn", + exc_info=True, + ) + self._listener_conn = None + + async def notify(self, channel: str, payload: str) -> None: + async with self.engine.begin() as conn: + await conn.execute( + sa.text("SELECT pg_notify(:channel, :payload)"), + {"channel": channel, "payload": payload}, + ) + + def listen(self, channel: str) -> AsyncGenerator[str, None]: + return self._listen_gen(channel) + + async def _listen_gen(self, channel: str) -> AsyncGenerator[str, None]: + if self._listener_conn is None or self._listener_conn.driver_connection is None: + raise RuntimeError("Connection not available for listen()") + + # Unbounded buffer: asyncpg delivers notifications synchronously, so we + # must never block the callback. If the consumer is slow the buffer + # grows; that's acceptable — tasks are short-lived string IDs. + self._send_stream, self._recv_stream = anyio.create_memory_object_stream[str]( + max_buffer_size=math.inf + ) + + def _callback(conn: object, pid: int, channel_: str, payload: str) -> None: + # Called synchronously by asyncpg on the event-loop thread. + # send_nowait() is safe here — it never suspends. + try: + self._send_stream.send_nowait(payload) + except anyio.WouldBlock: + # Should never happen with max_buffer_size=inf, but guard + # defensively to prevent a silent drop crashing the callback. + logger.warning( + "PostgresDialectAdapter: notification buffer full on channel %r; " + "payload %r dropped", + channel_, + payload, + ) + + await self._listener_conn.driver_connection.add_listener(channel, _callback) + logger.debug("PostgresDialectAdapter: listening on channel %r", channel) + + try: + async with self._recv_stream: + async for payload in self._recv_stream: + if self._stop_event.is_set(): + return + yield payload + finally: + # Always remove the asyncpg listener and clean up, even if the + # generator is garbage-collected or cancelled mid-iteration. + if self._listener_conn is not None: + try: + await self._listener_conn.driver_connection.remove_listener( + channel, _callback + ) + except Exception: + logger.debug( + "PostgresDialectAdapter: could not remove listener for %r " + "(connection may already be closed)", + channel, + exc_info=True, + ) + await self._send_stream.aclose() diff --git a/taskiq_sqlalchemy/broker.py b/taskiq_sqlalchemy/broker.py new file mode 100644 index 0000000..a23cf26 --- /dev/null +++ b/taskiq_sqlalchemy/broker.py @@ -0,0 +1,131 @@ +"""taskiq_sqlalchemy.broker + +SQLAlchemy-backed TaskIQ broker. + +The broker owns the queue table and delegates all pub/sub to the DialectAdapter. +""" + +import logging +import typing as t + +import sqlalchemy as sa +from taskiq import AckableMessage, AsyncBroker, BrokerMessage, TaskiqEvents, TaskiqState + +from taskiq_sqlalchemy.adapters import resolve_adapter +from taskiq_sqlalchemy.adapters.abc import DialectAdapter +from taskiq_sqlalchemy.manager import SQLAlchemyManager + +logger = logging.getLogger(__name__) + + +class SQLAlchemyBroker(AsyncBroker): + """ + TaskIQ broker backed by any SQLAlchemy async engine. + """ + + _adapter: DialectAdapter + + def __init__( + self, + manager: SQLAlchemyManager, + *, + channel_name: str = "taskiq", + adapter: t.Optional[DialectAdapter] = None, + **kwargs: t.Any, + ) -> None: + super().__init__(**kwargs) + self.manager = manager + self.channel_name = channel_name + self._adapter = adapter or resolve_adapter(manager) + + self.add_event_handler(TaskiqEvents.WORKER_STARTUP, self.on_worker_startup) + self.add_event_handler(TaskiqEvents.WORKER_SHUTDOWN, self.on_worker_shutdown) + self.add_event_handler(TaskiqEvents.CLIENT_STARTUP, self.on_client_startup) + self.add_event_handler(TaskiqEvents.CLIENT_SHUTDOWN, self.on_client_shutdown) + + async def startup(self) -> None: + await super().startup() + await self._adapter.broker_startup() + logger.info( + "SQLAlchemyBroker started (dialect=%s, channel=%r)", + self.manager.engine.dialect.name, + self.channel_name, + ) + + async def shutdown(self) -> None: + await self._adapter.broker_shutdown() + await super().shutdown() + logger.info("SQLAlchemyBroker shut down") + + async def kick(self, message: BrokerMessage) -> None: + """ + Persist the message then notify listening workers. + The two steps are deliberately *not* in the same transaction: + we want the row committed before we send the notify so that workers + that wake up immediately can actually read it. + """ + serialised = message.model_dump_json().encode() + + async with self.manager.engine.begin() as conn: + await conn.execute( + sa.insert(self.manager.queue_cls).values( + task_id=message.task_id, + channel=self.channel_name, + task_name=message.task_name, + message=serialised, + ) + ) + + # Notify outside the transaction so the row is visible to workers + await self._adapter.notify(self.channel_name, message.task_id) + logger.debug("Kicked task %s (%s)", message.task_id, message.task_name) + + async def listen(self) -> t.AsyncGenerator[AckableMessage, None]: + """ + Yield ``AckableMessage`` objects as they arrive. + + For Postgres (and other native-pub/sub dialects) this is push-based. + For polling adapters each yielded item triggers a table read. + """ + async for task_id in self._adapter.listen(self.channel_name): + message = await self._fetch_message(task_id) + if message is not None: + yield message + + async def _fetch_message(self, task_id: str) -> t.Optional[AckableMessage]: + """ + Fetch and deserialise one message, then delete it from the queue. + """ + async with self.manager.engine.begin() as conn: + result = await conn.execute( + sa.delete(self.manager.queue_cls) + .filter_by(task_id=task_id, channel=self.channel_name) + .returning(self.manager.queue_cls.message) + ) + row = result.first() + if row is None: + # Another worker already claimed it + return None + + async def ack() -> None: + # No-op: the row was already deleted when claimed. + return None + + try: + broker_message = BrokerMessage.model_validate_json(row.message) + return AckableMessage(data=broker_message.message, ack=ack) + except Exception: + logger.exception("Failed to deserialise message for task %s", task_id) + return None + + async def on_worker_startup(self, state: TaskiqState) -> None: + await self._adapter.worker_startup() + + async def on_worker_shutdown(self, state: TaskiqState) -> None: + await self._adapter.worker_shutdown() + + async def on_client_startup(self, state: TaskiqState) -> None: + await self._adapter.client_startup() + + async def on_client_shutdown(self, state: TaskiqState) -> None: + await self._adapter.client_shutdown() diff --git a/tests/broker/__init__.py b/tests/broker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/broker/adapters/__init__.py b/tests/broker/adapters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/broker/adapters/test_oracle.py b/tests/broker/adapters/test_oracle.py new file mode 100644 index 0000000..18bb206 --- /dev/null +++ b/tests/broker/adapters/test_oracle.py @@ -0,0 +1,164 @@ +"""tests/broker/adapters/test_oracle.py + +Tests for ``OracleDialectAdapter`` in isolation. + +Architecture of Oracle AQ +-------------------------- +* ``ensure_queue(channel)`` creates a RAW queue table + queue via DBMS_AQADM. + Idempotent: ORA-24001 / ORA-24006 are silently ignored. +* ``notify(channel, payload)`` enqueues a RAW message with ENQ_IMMEDIATE. +* ``listen(channel)`` dequeues in a loop with DEQ_NO_WAIT; sleeps 0.5 s when + empty; exits when ``_stop_event`` is set. +""" + +import typing as t +from unittest.mock import MagicMock, patch + +import anyio +import anyio.to_thread +import oracledb +import pytest +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine + +from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter, _oracle_queue_name + +_ORA_URL = ( + "oracle+oracledb://taskiq_user:taskiq_pwd@localhost:1521/?service_name=taskiq" +) + + +@pytest.fixture +async def ora_engine() -> t.AsyncGenerator[AsyncEngine, None]: + engine = create_async_engine(_ORA_URL) + try: + yield engine + finally: + await engine.dispose() + + +@pytest.fixture +async def ora_adapter( + ora_engine: AsyncEngine, +) -> t.AsyncGenerator[OracleDialectAdapter, None]: + adapter = OracleDialectAdapter(ora_engine) + await adapter.ensure_queue("test_ora_channel") + try: + yield adapter + finally: + await adapter.purge_queue("test_ora_channel") + adapter._stop_event.set() + + +@pytest.mark.parametrize( + ("channel", "expected"), + [ + ("taskiq", "TASKIQ"), + ("my-channel", "MY_CHANNEL"), + ("my.channel.name", "MY_CHANNEL_NAME"), + ("a" * 30, "A" * 24), # truncated to _MAX_QUEUE_NAME = 24 + ("ALREADY_UPPER", "ALREADY_UPPER"), + ], +) +def test_oracle_queue_name(channel: str, expected: str) -> None: + """_oracle_queue_name: uppercase, separator replacement, truncation.""" + assert _oracle_queue_name(channel) == expected + + +@pytest.mark.anyio +async def test_broker_startup_raises_in_thick_mode() -> None: + """ + broker_startup() must raise RuntimeError when oracledb is NOT in thin mode. + Verified via mock — no real thick Oracle client required. + """ + adapter = OracleDialectAdapter(MagicMock()) + + with ( + patch( + "taskiq_sqlalchemy.adapters.oracle.oracledb.is_thin_mode", + return_value=False, + ), + pytest.raises(RuntimeError, match="Thin mode"), + ): + await adapter.broker_startup() + + +@pytest.mark.oracle +@pytest.mark.anyio +async def test_ensure_queue_idempotent( + ora_adapter: OracleDialectAdapter, +) -> None: + """Calling ensure_queue() a second time must not raise ORA-24001/ORA-24006.""" + await ora_adapter.ensure_queue("test_ora_channel") + + +@pytest.mark.oracle +@pytest.mark.anyio +async def test_notify_enqueues_message( + ora_adapter: OracleDialectAdapter, + ora_engine: AsyncEngine, +) -> None: + """ + notify() must place a RAW message on the AQ queue. + Verified by performing a raw dequeue immediately after. + """ + channel = "test_ora_channel" + payload = "task-id-oracle-notify" + queue_name = _oracle_queue_name(channel) + + await ora_adapter.notify(channel, payload) + + async with ora_engine.begin() as conn: + raw = await conn.get_raw_connection() + driver_conn = raw.driver_connection + queue = await anyio.to_thread.run_sync(driver_conn.queue, queue_name) + queue.deqoptions.wait = oracledb.DEQ_NO_WAIT + queue.deqoptions.navigation = oracledb.DEQ_FIRST_MSG + msg = await queue.deqone() + + assert msg is not None, "Expected a message in the AQ queue after notify()" + assert msg.payload.decode() == payload + + +@pytest.mark.oracle +@pytest.mark.anyio +async def test_listen_yields_notified_payload( + ora_adapter: OracleDialectAdapter, +) -> None: + """listen() must yield the payload enqueued by notify().""" + channel = "test_ora_channel" + payload = "task-id-listen-test" + received: list[str] = [] + + await ora_adapter.notify(channel, payload) + + async def _collect() -> None: + async for p in ora_adapter.listen(channel): + received.append(p) + ora_adapter._stop_event.set() + return + + with anyio.fail_after(10.0): + await _collect() + + assert received == [payload] + + +@pytest.mark.oracle +@pytest.mark.anyio +async def test_listen_stops_on_stop_event( + ora_engine: AsyncEngine, +) -> None: + """ + A pre-set _stop_event causes listen() to exit immediately + without yielding anything (empty queue). + """ + adapter = OracleDialectAdapter(ora_engine) + await adapter.ensure_queue("test_ora_channel") + adapter._stop_event.set() + + collected: list[str] = [] + with anyio.fail_after(5.0): + async for p in adapter.listen("test_ora_channel"): + collected.append(p) + + assert collected == [] diff --git a/tests/broker/adapters/test_polling.py b/tests/broker/adapters/test_polling.py new file mode 100644 index 0000000..12d13ca --- /dev/null +++ b/tests/broker/adapters/test_polling.py @@ -0,0 +1,202 @@ +"""tests/broker/adapters/test_polling.py + +Tests for ``PollingAdapter`` in isolation. + +All tests use SQLite (aiosqlite) — zero external services required. +The schema lifecycle mirrors result_backend: create_all before, drop_all after. +""" + +import typing as t +import uuid + +import anyio +import pytest +import sqlalchemy as sa +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine +from sqlalchemy.orm import DeclarativeBase + +from taskiq_sqlalchemy.adapters.polling import PollingAdapter +from taskiq_sqlalchemy.manager import SQLAlchemyManager + +pytestmark = pytest.mark.anyio + + +@pytest.fixture +async def sqlite_engine() -> t.AsyncGenerator[AsyncEngine, None]: + engine = create_async_engine("sqlite+aiosqlite:///:memory:", echo=False) + try: + yield engine + finally: + await engine.dispose() + + +@pytest.fixture +async def polling_manager( + sqlite_engine: AsyncEngine, +) -> t.AsyncGenerator[SQLAlchemyManager, None]: + class _Base(DeclarativeBase): + pass + + manager = SQLAlchemyManager() + manager.register_tables(base_classes=(_Base,)) + manager.configure(engine=sqlite_engine) + + async with sqlite_engine.begin() as conn: + await conn.run_sync(_Base.metadata.create_all) + + try: + yield manager + finally: + async with sqlite_engine.begin() as conn: + await conn.run_sync(_Base.metadata.drop_all) + + +@pytest.fixture +def adapter(polling_manager: SQLAlchemyManager) -> PollingAdapter: + a = PollingAdapter( + polling_manager.engine, + queue_cls=polling_manager.queue_cls, + ) + a.poll_interval = 0.05 # speed up for tests + return a + + +async def _insert_queue_rows( + manager: SQLAlchemyManager, + channel: str, + count: int, +) -> list[str]: + """Insert ``count`` rows into the queue; return their task_ids.""" + ids = [str(uuid.uuid4()) for _ in range(count)] + async with manager.engine.begin() as conn: + for tid in ids: + await conn.execute( + sa.insert(manager.queue_cls).values( + task_id=tid, + channel=channel, + task_name="tests.fake_task", + message=b"{}", + ) + ) + return ids + + +async def test_polling_notify_is_noop( + adapter: PollingAdapter, + polling_manager: SQLAlchemyManager, +) -> None: + """notify() completes without error and does not write to the DB.""" + # No rows before + async with polling_manager.engine.connect() as conn: + count_before = ( + await conn.execute( + sa.select(sa.func.count()).select_from(polling_manager.queue_cls) + ) + ).scalar() + + await adapter.notify("any_channel", "any_payload") + + # Still no rows + async with polling_manager.engine.connect() as conn: + count_after = ( + await conn.execute( + sa.select(sa.func.count()).select_from(polling_manager.queue_cls) + ) + ).scalar() + + assert count_before == count_after == 0 + + +async def test_poll_yields_task_ids_from_db( + adapter: PollingAdapter, + polling_manager: SQLAlchemyManager, +) -> None: + """ + Pre-insert rows; the first poll cycle must yield all their task_ids. + The broker deletes the row when it claims it, but the adapter only yields + the id — we collect directly from the generator here. + """ + channel = "test_channel" + insert_count = 3 + inserted_ids = await _insert_queue_rows( + polling_manager, channel=channel, count=insert_count + ) + + collected: list[str] = [] + + async def _drain() -> None: + async for task_id in adapter.listen(channel): + collected.append(task_id) + if len(collected) >= insert_count: + adapter._stop_event.set() + return + + with anyio.fail_after(5.0): + await _drain() + + assert set(collected) == set(inserted_ids) + + +async def test_poll_skips_other_channels( + adapter: PollingAdapter, + polling_manager: SQLAlchemyManager, +) -> None: + """Rows on channel_b must not appear when polling channel_a.""" + await _insert_queue_rows(polling_manager, channel="channel_b", count=2) + + collected: list[str] = [] + + # Poll channel_a for a short window — should get nothing + async def _drain() -> None: + async for task_id in adapter.listen("channel_a"): + collected.append(task_id) + return + + with anyio.move_on_after(0.3): + await _drain() + + assert collected == [], "channel_b rows must not bleed into channel_a" + + +async def test_poll_stops_on_stop_event( + adapter: PollingAdapter, +) -> None: + """Setting _stop_event causes the listen() generator to exit cleanly.""" + # Set the stop event before starting — generator should exit immediately + adapter._stop_event.set() + + collected: list[str] = [] + with anyio.fail_after(2.0): + async for task_id in adapter.listen("any_channel"): + collected.append(task_id) + + # The generator exited without hanging + assert collected == [] + + +async def test_poll_respects_limit( + adapter: PollingAdapter, + polling_manager: SQLAlchemyManager, +) -> None: + """ + Insert 30 rows; the first poll cycle must yield at most 25 (the .limit(25) + guard in _poll_gen). We verify this by counting what one pass sees before + the stop event fires. + """ + channel = "limit_test" + await _insert_queue_rows(polling_manager, channel=channel, count=30) + + collected: list[str] = [] + + async def _one_pass() -> None: + async for task_id in adapter.listen(channel): + collected.append(task_id) + if len(collected) >= adapter.POLL_RESULT_LIMIT: + adapter._stop_event.set() + return + + with anyio.fail_after(5.0): + await _one_pass() + + # We stopped after exactly 25; the batch must have had exactly 25 items + assert len(collected) == adapter.POLL_RESULT_LIMIT diff --git a/tests/broker/adapters/test_postgresql.py b/tests/broker/adapters/test_postgresql.py new file mode 100644 index 0000000..eb27dcb --- /dev/null +++ b/tests/broker/adapters/test_postgresql.py @@ -0,0 +1,113 @@ +"""tests/broker/adapters/test_postgresql.py + +Tests for ``PostgresDialectAdapter`` in isolation. +""" + +import asyncio +import typing as t + +import anyio +import pytest +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine + +from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter + +pytestmark = [pytest.mark.anyio, pytest.mark.postgresql] + +_PG_URL = "postgresql+asyncpg://taskiq_user:taskiq_pwd@localhost:5432/taskiq" + + +@pytest.fixture +async def pg_engine() -> t.AsyncGenerator[AsyncEngine, None]: + engine = create_async_engine(_PG_URL, echo=False) + + try: + yield engine + finally: + await engine.dispose() + + +@pytest.fixture +async def pg_adapter( + pg_engine: AsyncEngine, +) -> t.AsyncGenerator[PostgresDialectAdapter, None]: + adapter = PostgresDialectAdapter(pg_engine) + await adapter.worker_startup() + try: + yield adapter + finally: + await adapter.worker_shutdown() + + +async def test_notify_sends_pg_notify(pg_engine: AsyncEngine) -> None: + """ + notify() must deliver a payload via pg_notify. + We verify by holding a raw asyncpg LISTEN and asserting the notification + arrives within a short timeout. + """ + channel = "test_notify_channel" + payload = "hello-from-notify" + received: list[str] = [] + + # Acquire a raw asyncpg connection to LISTEN + raw_conn = await pg_engine.raw_connection() + driver_conn = raw_conn.driver_connection + + def _on_notification(conn: object, pid: int, ch: str, p: str) -> None: + received.append(p) + + await driver_conn.add_listener(channel, _on_notification) + + try: + adapter = PostgresDialectAdapter(pg_engine) + await adapter.notify(channel, payload) + + # Give Postgres a moment to deliver the notification + with anyio.fail_after(3.0): + while payload not in received: + await asyncio.sleep(0.05) + finally: + await driver_conn.remove_listener(channel, _on_notification) + raw_conn.close() + + assert payload in received + + +async def test_listen_receives_notification(pg_adapter: PostgresDialectAdapter) -> None: + """ + After worker_startup(), listen() must yield the payload sent by notify(). + """ + channel = "test_listen_channel" + payload = "task-id-xyz" + received: list[str] = [] + + async def _collect() -> None: + async for p in pg_adapter.listen(channel): + received.append(p) + return # stop after first message + + # Start listener in background, then notify + async with anyio.create_task_group() as tg: + tg.start_soon(_collect) + # Small delay to let the LISTEN register before we notify + await asyncio.sleep(0.1) + await pg_adapter.notify(channel, payload) + + with anyio.fail_after(5.0): + while not received: + await asyncio.sleep(0.05) + tg.cancel_scope.cancel() + + assert received == [payload] + + +async def test_worker_shutdown_releases_connection( + pg_engine: AsyncEngine, +) -> None: + """After worker_shutdown(), _listener_conn is None.""" + adapter = PostgresDialectAdapter(pg_engine) + await adapter.worker_startup() + assert adapter._listener_conn is not None + + await adapter.worker_shutdown() + assert adapter._listener_conn is None diff --git a/tests/broker/conftest.py b/tests/broker/conftest.py new file mode 100644 index 0000000..e3bac79 --- /dev/null +++ b/tests/broker/conftest.py @@ -0,0 +1,175 @@ +"""tests/broker/conftest.py + +Fixtures for broker tests. +""" + +import asyncio +from typing import AsyncGenerator + +import pytest +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine +from sqlalchemy.orm import DeclarativeBase +from taskiq import BrokerMessage + +from taskiq_sqlalchemy.adapters.abc import DialectAdapter +from taskiq_sqlalchemy.broker import SQLAlchemyBroker +from taskiq_sqlalchemy.manager import SQLAlchemyManager + + +class FakeAdapter(DialectAdapter): + """ + Test double for DialectAdapter. + + ``notify_calls`` captures every (channel, payload) pair sent via notify(). + ``_inbound`` is an asyncio.Queue; tests push strings into it to simulate + incoming messages that listen() will yield. + """ + + def __init__(self, engine: AsyncEngine) -> None: + super().__init__(engine) + self.notify_calls: list[tuple[str, str]] = [] + self._inbound: asyncio.Queue[str] = asyncio.Queue() + + async def notify(self, channel: str, payload: str) -> None: + self.notify_calls.append((channel, payload)) + + def listen(self, channel: str) -> AsyncGenerator[str, None]: + return self._listen_gen(channel) + + async def _listen_gen(self, channel: str) -> AsyncGenerator[str, None]: + while True: + yield await self._inbound.get() + + def feed(self, task_id: str) -> None: + """Push a task_id into the inbound queue (simulates a notify).""" + self._inbound.put_nowait(task_id) + + +# Map of (pytest param id) → (SQLAlchemy async URL) +_ENGINE_PARAMS: list = [ + pytest.param( + "sqlite+aiosqlite:///:memory:", + id="sqlite+aiosqlite", + ), + pytest.param( + "postgresql+asyncpg://taskiq_user:taskiq_pwd@localhost:5432/taskiq", + id="postgresql+asyncpg", + marks=pytest.mark.postgresql, + ), + pytest.param( + "postgresql+psycopg://taskiq_user:taskiq_pwd@localhost:5432/taskiq", + id="postgresql+psycopg", + marks=pytest.mark.postgresql, + ), + pytest.param( + "oracle+oracledb://taskiq_user:taskiq_pwd@localhost:1521/?service_name=taskiq", + id="oracle+oracledb", + marks=pytest.mark.oracle, + ), +] + + +@pytest.fixture(params=_ENGINE_PARAMS) +async def async_engine( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncEngine, None]: + """One AsyncEngine per dialect/driver; skipped if DB unreachable.""" + url: str = request.param + engine = create_async_engine(url) + try: + yield engine + finally: + await engine.dispose() + + +@pytest.fixture +async def manager_with_schema( + async_engine: AsyncEngine, +) -> AsyncGenerator[SQLAlchemyManager, None]: + """ + Creates all taskiq tables (queue + result + schedule), yields a configured + manager, then drops all tables — giving every test a clean slate. + """ + + class _Base(DeclarativeBase): + pass + + manager = SQLAlchemyManager() + manager.register_tables(base_classes=(_Base,)) + manager.configure(engine=async_engine) + + async with async_engine.begin() as conn: + await conn.run_sync(_Base.metadata.create_all) + + try: + yield manager + finally: + async with async_engine.begin() as conn: + await conn.run_sync(_Base.metadata.drop_all) + + +@pytest.fixture +def fake_adapter(async_engine: AsyncEngine) -> FakeAdapter: + """Fresh FakeAdapter bound to the same engine as the current test.""" + return FakeAdapter(async_engine) + + +@pytest.fixture +async def broker( + manager_with_schema: SQLAlchemyManager, + fake_adapter: FakeAdapter, +) -> AsyncGenerator[SQLAlchemyBroker, None]: + """ + Broker wired with the FakeAdapter — suitable for kick/fetch tests that + don't need real pub/sub delivery. + """ + b = SQLAlchemyBroker( + manager_with_schema, + channel_name="test_channel", + adapter=fake_adapter, + ) + await b.startup() + try: + yield b + finally: + await b.shutdown() + + +@pytest.fixture +async def polling_broker( + manager_with_schema: SQLAlchemyManager, +) -> AsyncGenerator[SQLAlchemyBroker, None]: + """ + Broker wired with the real PollingAdapter — for end-to-end listen() tests. + Works with any engine (SQLite always, Postgres when available). + """ + from taskiq_sqlalchemy.adapters.polling import PollingAdapter + + adapter = PollingAdapter( + manager_with_schema.engine, + queue_cls=manager_with_schema.queue_cls, + ) + # Speed up polling for tests + adapter.poll_interval = 0.05 + + b = SQLAlchemyBroker( + manager_with_schema, + channel_name="test_channel", + adapter=adapter, + ) + await b.startup() + try: + yield b + finally: + await b.shutdown() + + +@pytest.fixture +def broker_message(task_id: str) -> BrokerMessage: + """A minimal valid BrokerMessage.""" + return BrokerMessage( + task_id=task_id, + task_name="tests.fake_task", + message=b'{"args": [], "kwargs": {}}', + labels={}, + ) diff --git a/tests/broker/test_fetch_message.py b/tests/broker/test_fetch_message.py new file mode 100644 index 0000000..08503f1 --- /dev/null +++ b/tests/broker/test_fetch_message.py @@ -0,0 +1,164 @@ +"""tests/broker/test_fetch_message.py + +Tests for ``SQLAlchemyBroker._fetch_message()``. +""" + +import asyncio +import typing as t + +import pytest +import sqlalchemy as sa +from taskiq import BrokerMessage + +from taskiq_sqlalchemy.broker import SQLAlchemyBroker +from taskiq_sqlalchemy.manager import SQLAlchemyManager + +pytestmark = pytest.mark.anyio + + +async def _insert_queue_row( + manager: SQLAlchemyManager, + *, + task_id: str, + channel: str, + message_bytes: bytes, + task_name: str = "tests.fake_task", +) -> None: + async with manager.engine.begin() as conn: + await conn.execute( + sa.insert(manager.queue_cls).values( + task_id=task_id, + channel=channel, + task_name=task_name, + message=message_bytes, + ) + ) + + +def _valid_message_bytes(broker_message: BrokerMessage) -> bytes: + return broker_message.model_dump_json().encode() + + +async def test_fetch_message_returns_ackable( + broker: SQLAlchemyBroker, + manager_with_schema: SQLAlchemyManager, + broker_message: BrokerMessage, +) -> None: + """_fetch_message returns AckableMessage with correct payload; row is gone.""" + raw = _valid_message_bytes(broker_message) + await _insert_queue_row( + manager_with_schema, + task_id=broker_message.task_id, + channel=broker.channel_name, + message_bytes=raw, + ) + + result = await broker._fetch_message(broker_message.task_id) + + assert result is not None, "Expected an AckableMessage" + assert result.data == broker_message.message + + # Row must have been deleted + async with manager_with_schema.engine.connect() as conn: + row = ( + await conn.execute( + sa.select(manager_with_schema.queue_cls).where( + manager_with_schema.queue_cls.task_id == broker_message.task_id + ) + ) + ).fetchone() + assert row is None, "Row should be deleted after _fetch_message" + + +async def test_fetch_message_missing_returns_none( + broker: SQLAlchemyBroker, + task_id: str, +) -> None: + """Returns None for a task_id that is not in the queue.""" + result = await broker._fetch_message(task_id) + assert result is None + + +async def test_fetch_message_wrong_channel_returns_none( + broker: SQLAlchemyBroker, + manager_with_schema: SQLAlchemyManager, + broker_message: BrokerMessage, +) -> None: + """A row on a different channel is invisible to this broker.""" + raw = _valid_message_bytes(broker_message) + await _insert_queue_row( + manager_with_schema, + task_id=broker_message.task_id, + channel="other_channel", # different from broker.channel_name + message_bytes=raw, + ) + + result = await broker._fetch_message(broker_message.task_id) + assert result is None, "Should not claim a row on a different channel" + + +async def test_fetch_message_ack_is_noop( + broker: SQLAlchemyBroker, + manager_with_schema: SQLAlchemyManager, + broker_message: BrokerMessage, +) -> None: + """Calling ack() after _fetch_message completes without error.""" + raw = _valid_message_bytes(broker_message) + await _insert_queue_row( + manager_with_schema, + task_id=broker_message.task_id, + channel=broker.channel_name, + message_bytes=raw, + ) + + result = await broker._fetch_message(broker_message.task_id) + assert result is not None + # Must not raise + await result.ack() + + +async def test_fetch_message_bad_json_returns_none( + broker: SQLAlchemyBroker, + manager_with_schema: SQLAlchemyManager, + task_id: str, +) -> None: + """A row with invalid JSON in message returns None instead of raising.""" + await _insert_queue_row( + manager_with_schema, + task_id=task_id, + channel=broker.channel_name, + message_bytes=b"this is not json {{{", + ) + + result = await broker._fetch_message(task_id) + assert result is None, "Corrupt message should be swallowed and return None" + + +async def test_fetch_message_atomic_double_claim( + broker: SQLAlchemyBroker, + manager_with_schema: SQLAlchemyManager, + broker_message: BrokerMessage, +) -> None: + """ + Two concurrent _fetch_message() calls for the same task_id: + exactly one must succeed and the other must return None. + + This tests the DELETE … RETURNING atomicity guarantee. + """ + raw = _valid_message_bytes(broker_message) + await _insert_queue_row( + manager_with_schema, + task_id=broker_message.task_id, + channel=broker.channel_name, + message_bytes=raw, + ) + + results: t.Sequence[t.Any] = await asyncio.gather( + broker._fetch_message(broker_message.task_id), + broker._fetch_message(broker_message.task_id), + ) + + non_none = [r for r in results if r is not None] + assert len(non_none) == 1, ( + f"Exactly one claim should succeed; got {len(non_none)} non-None results" + ) diff --git a/tests/broker/test_kick.py b/tests/broker/test_kick.py new file mode 100644 index 0000000..2198a2c --- /dev/null +++ b/tests/broker/test_kick.py @@ -0,0 +1,162 @@ +"""tests/broker/test_kick.py + +Tests for ``SQLAlchemyBroker.kick()``. +""" + +import uuid + +import pytest +import sqlalchemy as sa +from taskiq import BrokerMessage + +from taskiq_sqlalchemy.broker import SQLAlchemyBroker +from taskiq_sqlalchemy.manager import SQLAlchemyManager + +from .conftest import FakeAdapter + +pytestmark = pytest.mark.anyio + + +async def test_kick_inserts_row( + broker: SQLAlchemyBroker, + manager_with_schema: SQLAlchemyManager, + broker_message: BrokerMessage, +) -> None: + """kick() persists a row in taskiq_queue with the expected field values.""" + await broker.kick(broker_message) + + async with manager_with_schema.engine.connect() as conn: + row = ( + await conn.execute( + sa.select(manager_with_schema.queue_cls).where( + manager_with_schema.queue_cls.task_id == broker_message.task_id + ) + ) + ).fetchone() + + assert row is not None, "Expected a row in taskiq_queue after kick()" + assert row.task_id == broker_message.task_id + assert row.task_name == broker_message.task_name + assert row.channel == broker.channel_name + assert row.message is not None + + +async def test_kick_serializes_broker_message( + broker: SQLAlchemyBroker, + manager_with_schema: SQLAlchemyManager, + broker_message: BrokerMessage, +) -> None: + """The bytes stored in message column round-trip to the original BrokerMessage.""" + await broker.kick(broker_message) + + async with manager_with_schema.engine.connect() as conn: + row = ( + await conn.execute( + sa.select(manager_with_schema.queue_cls).where( + manager_with_schema.queue_cls.task_id == broker_message.task_id + ) + ) + ).fetchone() + + assert row is not None + recovered = BrokerMessage.model_validate_json(row.message) + assert recovered.task_id == broker_message.task_id + assert recovered.task_name == broker_message.task_name + assert recovered.message == broker_message.message + + +async def test_kick_calls_adapter_notify( + broker: SQLAlchemyBroker, + fake_adapter: FakeAdapter, + broker_message: BrokerMessage, +) -> None: + """notify() is called exactly once with the correct channel and task_id.""" + assert fake_adapter.notify_calls == [] + + await broker.kick(broker_message) + + assert len(fake_adapter.notify_calls) == 1 + channel, payload = fake_adapter.notify_calls[0] + assert channel == broker.channel_name + assert payload == broker_message.task_id + + +async def test_kick_notify_outside_transaction( + manager_with_schema: SQLAlchemyManager, + fake_adapter: FakeAdapter, + broker_message: BrokerMessage, +) -> None: + """ + The row must be visible in the DB at the moment notify() fires. + + We verify this by subclassing FakeAdapter to run a DB read *inside* + notify() and asserting the row already exists. + """ + row_visible_during_notify = False + + class _SpyAdapter(FakeAdapter): + async def notify(self, channel: str, payload: str) -> None: + nonlocal row_visible_during_notify + async with self.engine.connect() as conn: + row = ( + await conn.execute( + sa.select(manager_with_schema.queue_cls).where( + manager_with_schema.queue_cls.task_id == payload + ) + ) + ).fetchone() + row_visible_during_notify = row is not None + await super().notify(channel, payload) + + spy = _SpyAdapter(manager_with_schema.engine) + b = SQLAlchemyBroker( + manager_with_schema, + channel_name="test_channel", + adapter=spy, + ) + await b.startup() + try: + await b.kick(broker_message) + finally: + await b.shutdown() + + assert row_visible_during_notify, ( + "Row must be committed to DB before notify() is called" + ) + + +async def test_kick_multiple_messages( + broker: SQLAlchemyBroker, + manager_with_schema: SQLAlchemyManager, + fake_adapter: FakeAdapter, +) -> None: + """Kicking N messages creates exactly N rows with distinct task_ids.""" + + messages = [ + BrokerMessage( + task_id=str(uuid.uuid4()), + task_name="tests.fake_task", + message=b'{"args": [], "kwargs": {}}', + labels={}, + ) + for _ in range(5) + ] + + for msg in messages: + await broker.kick(msg) + + async with manager_with_schema.engine.connect() as conn: + rows = ( + await conn.execute( + sa.select(manager_with_schema.queue_cls).where( + manager_with_schema.queue_cls.channel == broker.channel_name + ) + ) + ).fetchall() + + assert len(rows) == 5 + stored_ids = {row.task_id for row in rows} + expected_ids = {msg.task_id for msg in messages} + assert stored_ids == expected_ids + # notify() called once per message + assert len(fake_adapter.notify_calls) == 5 diff --git a/tests/broker/test_listen.py b/tests/broker/test_listen.py new file mode 100644 index 0000000..38daa13 --- /dev/null +++ b/tests/broker/test_listen.py @@ -0,0 +1,204 @@ +"""tests/broker/test_listen.py + +End-to-end tests for ``SQLAlchemyBroker.listen()`` using the real PollingAdapter +against a SQLite in-memory database. + +We deliberately do NOT parametrize over PostgreSQL here: Postgres has its own +push-based adapter (PostgresDialectAdapter) that is tested separately. The +PollingAdapter + SQLite combination gives us full functional coverage of the +broker's listen() / kick() / _fetch_message() pipeline with zero external +services. +""" + +import typing as t +import uuid + +import anyio +import pytest +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine +from sqlalchemy.orm import DeclarativeBase +from taskiq import AckableMessage, BrokerMessage + +from taskiq_sqlalchemy.adapters.polling import PollingAdapter +from taskiq_sqlalchemy.broker import SQLAlchemyBroker +from taskiq_sqlalchemy.manager import SQLAlchemyManager + +pytestmark = pytest.mark.anyio + + +@pytest.fixture +async def sqlite_engine() -> t.AsyncGenerator[AsyncEngine, None]: + engine = create_async_engine("sqlite+aiosqlite:///:memory:", echo=False) + try: + yield engine + finally: + await engine.dispose() + + +@pytest.fixture +async def manager_with_schema( # type: ignore[override] + sqlite_engine: AsyncEngine, +) -> t.AsyncGenerator[SQLAlchemyManager, None]: + class _Base(DeclarativeBase): + pass + + manager = SQLAlchemyManager() + manager.register_tables(base_classes=(_Base,)) + manager.configure(engine=sqlite_engine) + + async with sqlite_engine.begin() as conn: + await conn.run_sync(_Base.metadata.create_all) + + try: + yield manager + finally: + async with sqlite_engine.begin() as conn: + await conn.run_sync(_Base.metadata.drop_all) + + +@pytest.fixture +async def polling_broker( # type: ignore[override] + manager_with_schema: SQLAlchemyManager, +) -> t.AsyncGenerator[SQLAlchemyBroker, None]: + adapter = PollingAdapter( + manager_with_schema.engine, + queue_cls=manager_with_schema.queue_cls, + ) + adapter.POLL_INTERVAL_SECS = 0.05 + b = SQLAlchemyBroker( + manager_with_schema, + channel_name="test_channel", + adapter=adapter, + ) + await b.startup() + try: + yield b + finally: + await b.shutdown() + + +async def _collect( + broker: SQLAlchemyBroker, + n: int, + timeout: float = 5.0, +) -> list[AckableMessage]: + """ + Drive broker.listen() and collect the first ``n`` messages. + Raises TimeoutError if they don't arrive within ``timeout`` seconds. + """ + collected: list[AckableMessage] = [] + + async def _drain() -> None: + async for msg in broker.listen(): + collected.append(msg) + if len(collected) >= n: + return + + with anyio.fail_after(timeout): + await _drain() + + return collected + + +async def test_listen_yields_kicked_message( + polling_broker: SQLAlchemyBroker, + broker_message: BrokerMessage, +) -> None: + """ + kick() a single message; the first listen() yield must carry the same + payload. + """ + await polling_broker.kick(broker_message) + + messages = await _collect(polling_broker, n=1) + + assert len(messages) == 1 + assert messages[0].data == broker_message.message + + +async def test_listen_multiple_messages_in_order( + polling_broker: SQLAlchemyBroker, +) -> None: + """ + Kick 3 messages; collect 3 via listen(); all payloads present, no duplicates. + """ + messages_sent = [ + BrokerMessage( + task_id=str(uuid.uuid4()), + task_name="tests.fake_task", + message=f"payload-{i}".encode(), + labels={}, + ) + for i in range(3) + ] + + for msg in messages_sent: + await polling_broker.kick(msg) + + received = await _collect(polling_broker, n=3) + + assert len(received) == 3 + received_payloads = {msg.data for msg in received} + expected_payloads = {msg.message for msg in messages_sent} + assert received_payloads == expected_payloads + + +async def test_listen_channel_isolation( + manager_with_schema: SQLAlchemyManager, +) -> None: + """ + Messages kicked on channel A must NOT be delivered to a broker listening + on channel B. + """ + + def _make_broker(channel: str) -> SQLAlchemyBroker: + adapter = PollingAdapter( + manager_with_schema.engine, + queue_cls=manager_with_schema.queue_cls, + ) + adapter.POLL_INTERVAL_SECS = 0.05 + return SQLAlchemyBroker( + manager_with_schema, + channel_name=channel, + adapter=adapter, + ) + + broker_a = _make_broker("channel_a") + broker_b = _make_broker("channel_b") + + await broker_a.startup() + await broker_b.startup() + + try: + # Kick one message on channel_a + msg_a = BrokerMessage( + task_id=str(uuid.uuid4()), + task_name="tests.fake_task", + message=b"for-channel-a", + labels={}, + ) + await broker_a.kick(msg_a) + + # broker_b should yield nothing within a short window + received_by_b: list[AckableMessage] = [] + + async def _try_listen_b() -> None: + async for msg in broker_b.listen(): + received_by_b.append(msg) + return + + # Give broker_b a brief window to (incorrectly) receive the message + with anyio.move_on_after(0.3): + await _try_listen_b() + + assert received_by_b == [], ( + "broker_b must not receive messages sent on channel_a" + ) + + # Confirm broker_a does receive it + received_by_a = await _collect(broker_a, n=1, timeout=3.0) + assert received_by_a[0].data == msg_a.message + + finally: + await broker_a.shutdown() + await broker_b.shutdown() diff --git a/tests/broker/test_resolve_adapter.py b/tests/broker/test_resolve_adapter.py new file mode 100644 index 0000000..916cbc8 --- /dev/null +++ b/tests/broker/test_resolve_adapter.py @@ -0,0 +1,50 @@ +from unittest.mock import MagicMock + +from taskiq_sqlalchemy.adapters import resolve_adapter +from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter +from taskiq_sqlalchemy.adapters.polling import PollingAdapter +from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter +from taskiq_sqlalchemy.manager import SQLAlchemyManager + + +def _mock_manager(dialect_name: str) -> SQLAlchemyManager: + """Build a SQLAlchemyManager whose engine reports the given dialect name.""" + engine = MagicMock() + engine.dialect.name = dialect_name + + queue_cls = MagicMock() + + manager = SQLAlchemyManager.__new__(SQLAlchemyManager) + manager.engine = engine + manager.queue_cls = queue_cls + manager.result_cls = MagicMock() + manager.schedule_cls = MagicMock() + return manager + + +def test_resolve_postgres_adapter() -> None: + """postgresql dialect resolves to PostgresDialectAdapter.""" + manager = _mock_manager("postgresql") + adapter = resolve_adapter(manager) + assert isinstance(adapter, PostgresDialectAdapter) + + +def test_resolve_oracle_adapter() -> None: + """oracle dialect resolves to OracleDialectAdapter.""" + manager = _mock_manager("oracle") + adapter = resolve_adapter(manager) + assert isinstance(adapter, OracleDialectAdapter) + + +def test_resolve_polling_adapter_for_sqlite() -> None: + """sqlite dialect falls back to PollingAdapter.""" + manager = _mock_manager("sqlite") + adapter = resolve_adapter(manager) + assert isinstance(adapter, PollingAdapter) + + +def test_resolve_polling_adapter_for_unknown_dialect() -> None: + """Any unrecognised dialect (e.g. mssql) also falls back to PollingAdapter.""" + manager = _mock_manager("mysql") + adapter = resolve_adapter(manager) + assert isinstance(adapter, PollingAdapter) From c0eca22c21dd87175f76c28e06aee6bd7f1a7215 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 08:10:13 +0530 Subject: [PATCH 13/25] chore: Handle optional deps similar to sqlalchemy --- pyproject.toml | 34 +++++++++------------------------- uv.lock | 20 +++++++++++--------- 2 files changed, 20 insertions(+), 34 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4cfd167..e8e4562 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,29 +33,23 @@ classifiers = [ "Typing :: Typed", "Operating System :: OS Independent", ] -keywords = [ - "taskiq", - "tasks", - "distributed", - "async", - "postgresql", - "asyncpg", - "psycopg3", - "sqlalchemy", -] - dependencies = ["taskiq>=0.11.7", 'sqlalchemy>=2', 'anyio>=4'] [project.optional-dependencies] -all = ["taskiq_sqlalchemy[postgresql,sqlite,oracle]"] +all = [ + "taskiq_sqlalchemy[postgresql-asyncpg,postgresql-psycopg,sqlite-aiosqlite,oracle-oracledb]", +] -postgresql = ["asyncpg", "psycopg[binary,pool]"] -sqlite = ["aiosqlite"] -oracle = ["oracledb"] +postgresql-asyncpg = ["asyncpg"] +postgresql-psycopg = ["psycopg[binary,pool]"] +sqlite-aiosqlite = ["aiosqlite"] +oracle-oracledb = ["oracledb"] [project.urls] Homepage = "https://github.com/corridor/taskiq-sqlalchemy" Documentation = "https://github.com/corridor/taskiq-sqlalchemy" +"Source Code" = "https://github.com/corridor/taskiq-sqlalchemy" +"Issue Tracker" = "https://github.com/corridor/taskiq-sqlalchemy/issues" [dependency-groups] dev = [ @@ -152,13 +146,3 @@ extend-immutable-calls = ["taskiq_dependencies.Depends", "taskiq.TaskiqDepends"] "S311", # Standard pseudo-random generators are not suitable for security/cryptographic purposes "D101", # Missing docstring in public class ] -"scripts/*" = [ - "S101", # Use of assert detected - "S301", # Use of pickle detected - "D103", # Missing docstring in public function - "SLF001", # Private member accessed - "S311", # Standard pseudo-random generators are not suitable for security/cryptographic purposes - "D101", # Missing docstring in public class - "T201", # Use of assert detected - "S603", # Use of subprocess.run detected -] diff --git a/uv.lock b/uv.lock index 51a924e..a92d771 100644 --- a/uv.lock +++ b/uv.lock @@ -1236,14 +1236,16 @@ all = [ { name = "oracledb" }, { name = "psycopg", extra = ["binary", "pool"] }, ] -oracle = [ +oracle-oracledb = [ { name = "oracledb" }, ] -postgresql = [ +postgresql-asyncpg = [ { name = "asyncpg" }, +] +postgresql-psycopg = [ { name = "psycopg", extra = ["binary", "pool"] }, ] -sqlite = [ +sqlite-aiosqlite = [ { name = "aiosqlite" }, ] @@ -1267,16 +1269,16 @@ dev = [ [package.metadata] requires-dist = [ - { name = "aiosqlite", marker = "extra == 'sqlite'" }, + { name = "aiosqlite", marker = "extra == 'sqlite-aiosqlite'" }, { name = "anyio", specifier = ">=4" }, - { name = "asyncpg", marker = "extra == 'postgresql'" }, - { name = "oracledb", marker = "extra == 'oracle'" }, - { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgresql'" }, + { name = "asyncpg", marker = "extra == 'postgresql-asyncpg'" }, + { name = "oracledb", marker = "extra == 'oracle-oracledb'" }, + { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgresql-psycopg'" }, { name = "sqlalchemy", specifier = ">=2" }, { name = "taskiq", specifier = ">=0.11.7" }, - { name = "taskiq-sqlalchemy", extras = ["postgresql", "sqlite", "oracle"], marker = "extra == 'all'" }, + { name = "taskiq-sqlalchemy", extras = ["postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb"], marker = "extra == 'all'" }, ] -provides-extras = ["all", "postgresql", "sqlite", "oracle"] +provides-extras = ["all", "postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb"] [package.metadata.requires-dev] dev = [ From 7b9c39ae3a9ca06bb2c4e561a4796385c90bd05c Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 08:22:43 +0530 Subject: [PATCH 14/25] refactor: Handle typing better in SQLAlchemy manager --- taskiq_sqlalchemy/adapters/__init__.py | 9 ++++- taskiq_sqlalchemy/manager.py | 47 ++++++++++++++++++-------- tests/broker/adapters/test_polling.py | 3 +- tests/broker/conftest.py | 3 +- tests/broker/test_listen.py | 3 +- tests/broker/test_resolve_adapter.py | 2 +- tests/result_backend/conftest.py | 6 ++-- 7 files changed, 46 insertions(+), 27 deletions(-) diff --git a/taskiq_sqlalchemy/adapters/__init__.py b/taskiq_sqlalchemy/adapters/__init__.py index 4917bdd..31f68f2 100644 --- a/taskiq_sqlalchemy/adapters/__init__.py +++ b/taskiq_sqlalchemy/adapters/__init__.py @@ -1,7 +1,14 @@ +import typing as t + +from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter +from taskiq_sqlalchemy.adapters.polling import PollingAdapter +from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter from taskiq_sqlalchemy.manager import SQLAlchemyManager -def resolve_adapter(manager: SQLAlchemyManager): +def resolve_adapter( + manager: SQLAlchemyManager, +) -> t.Union[PostgresDialectAdapter, OracleDialectAdapter, PollingAdapter]: if manager.engine.dialect.name == "postgresql": from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter diff --git a/taskiq_sqlalchemy/manager.py b/taskiq_sqlalchemy/manager.py index 7304ef7..cea4428 100644 --- a/taskiq_sqlalchemy/manager.py +++ b/taskiq_sqlalchemy/manager.py @@ -10,46 +10,63 @@ class SQLAlchemyManager: - engine: t.Optional[AsyncEngine] + _engine: t.Optional[AsyncEngine] - queue_cls: t.Optional[type[TaskiqQueueMixin]] - result_cls: t.Optional[type[TaskiqResultMixin]] - schedule_cls: t.Optional[type[TaskiqScheduleMixin]] + queue_cls: type[TaskiqQueueMixin] + result_cls: type[TaskiqResultMixin] + schedule_cls: type[TaskiqScheduleMixin] def __init__( self, + base_classes: t.Sequence[t.Any] = (), queue_cls: t.Optional[type[TaskiqQueueMixin]] = None, result_cls: t.Optional[type[TaskiqResultMixin]] = None, schedule_cls: t.Optional[type[TaskiqScheduleMixin]] = None, ) -> None: - self.engine = None + self._engine = None - self.queue_cls = queue_cls - self.result_cls = result_cls - self.schedule_cls = schedule_cls - - def register_tables(self, base_classes: t.Sequence[t.Any]) -> None: - - if self.queue_cls is None: + if queue_cls is None: + if len(base_classes) == 0: + raise ValueError( + "base_classes and queue_cls cannot be empty at the same time", + ) class TaskiqQueue(*base_classes, TaskiqQueueMixin): pass self.queue_cls = TaskiqQueue + else: + self.queue_cls = queue_cls - if self.result_cls is None: + if result_cls is None: + if len(base_classes) == 0: + raise ValueError( + "base_classes and result_cls cannot be empty at the same time", + ) class TaskiqResult(*base_classes, TaskiqResultMixin): pass self.result_cls = TaskiqResult + else: + self.result_cls = result_cls - if self.schedule_cls is None: + if schedule_cls is None: + if len(base_classes) == 0: + raise ValueError("base_classes and schedule_cls cannot be empty at the same time") class TaskiqSchedule(*base_classes, TaskiqScheduleMixin): pass self.schedule_cls = TaskiqSchedule + else: + self.schedule_cls = schedule_cls + + @property + def engine(self) -> AsyncEngine: + if self._engine is None: + raise RuntimeError("This manager is not bound to any SQLAlchemy engine") + return self._engine def configure(self, engine: AsyncEngine) -> None: - self.engine = engine + self._engine = engine diff --git a/tests/broker/adapters/test_polling.py b/tests/broker/adapters/test_polling.py index 12d13ca..7f28fd9 100644 --- a/tests/broker/adapters/test_polling.py +++ b/tests/broker/adapters/test_polling.py @@ -37,8 +37,7 @@ async def polling_manager( class _Base(DeclarativeBase): pass - manager = SQLAlchemyManager() - manager.register_tables(base_classes=(_Base,)) + manager = SQLAlchemyManager(base_classes=(_Base,)) manager.configure(engine=sqlite_engine) async with sqlite_engine.begin() as conn: diff --git a/tests/broker/conftest.py b/tests/broker/conftest.py index e3bac79..d132e3e 100644 --- a/tests/broker/conftest.py +++ b/tests/broker/conftest.py @@ -94,8 +94,7 @@ async def manager_with_schema( class _Base(DeclarativeBase): pass - manager = SQLAlchemyManager() - manager.register_tables(base_classes=(_Base,)) + manager = SQLAlchemyManager(base_classes=(_Base,)) manager.configure(engine=async_engine) async with async_engine.begin() as conn: diff --git a/tests/broker/test_listen.py b/tests/broker/test_listen.py index 38daa13..d5b0263 100644 --- a/tests/broker/test_listen.py +++ b/tests/broker/test_listen.py @@ -42,8 +42,7 @@ async def manager_with_schema( # type: ignore[override] class _Base(DeclarativeBase): pass - manager = SQLAlchemyManager() - manager.register_tables(base_classes=(_Base,)) + manager = SQLAlchemyManager(base_classes=(_Base,)) manager.configure(engine=sqlite_engine) async with sqlite_engine.begin() as conn: diff --git a/tests/broker/test_resolve_adapter.py b/tests/broker/test_resolve_adapter.py index 916cbc8..d4f6f3c 100644 --- a/tests/broker/test_resolve_adapter.py +++ b/tests/broker/test_resolve_adapter.py @@ -15,7 +15,7 @@ def _mock_manager(dialect_name: str) -> SQLAlchemyManager: queue_cls = MagicMock() manager = SQLAlchemyManager.__new__(SQLAlchemyManager) - manager.engine = engine + manager._engine = engine manager.queue_cls = queue_cls manager.result_cls = MagicMock() manager.schedule_cls = MagicMock() diff --git a/tests/result_backend/conftest.py b/tests/result_backend/conftest.py index 23c089b..a8bf23e 100644 --- a/tests/result_backend/conftest.py +++ b/tests/result_backend/conftest.py @@ -68,8 +68,7 @@ async def manager_and_backend( class _Base(DeclarativeBase): pass - manager = SQLAlchemyManager() - manager.register_tables(base_classes=(_Base,)) + manager = SQLAlchemyManager(base_classes=(_Base,)) manager.configure(engine=async_engine) # Create schema @@ -107,8 +106,7 @@ async def keep_results_false_backend( class _Base(DeclarativeBase): pass - manager = SQLAlchemyManager() - manager.register_tables(base_classes=(_Base,)) + manager = SQLAlchemyManager(base_classes=(_Base,)) manager.configure(engine=async_engine) async with async_engine.begin() as conn: From 59b870b163b4b89e6685bee1b35cf81046b57be9 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 08:45:49 +0530 Subject: [PATCH 15/25] refactor: Add more ruff rules and resolve errors --- pyproject.toml | 96 +++++++++++++------- taskiq_sqlalchemy/__init__.py | 1 + taskiq_sqlalchemy/adapters/__init__.py | 6 +- taskiq_sqlalchemy/adapters/abc.py | 2 +- taskiq_sqlalchemy/adapters/oracle.py | 11 ++- taskiq_sqlalchemy/adapters/polling.py | 2 +- taskiq_sqlalchemy/adapters/postgresql.py | 20 ++-- taskiq_sqlalchemy/broker.py | 7 +- taskiq_sqlalchemy/models.py | 6 +- taskiq_sqlalchemy/result_backend.py | 23 +++-- tests/broker/adapters/test_oracle.py | 8 +- tests/broker/adapters/test_polling.py | 14 +-- tests/broker/adapters/test_postgresql.py | 7 +- tests/broker/conftest.py | 6 +- tests/broker/test_fetch_message.py | 11 +-- tests/broker/test_kick.py | 28 +++--- tests/broker/test_listen.py | 16 ++-- tests/result_backend/conftest.py | 4 +- tests/result_backend/test_get_result.py | 5 +- tests/result_backend/test_is_result_ready.py | 1 + tests/result_backend/test_set_result.py | 17 ++-- 21 files changed, 165 insertions(+), 126 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e8e4562..f145c57 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,44 +91,59 @@ markers = [ "oracle: tests that require a running OracleDB server", ] +[tool.ruff] +line-length = 120 +output-format = "grouped" +target-version = "py39" + [tool.ruff.lint] select = [ - "E", # Error - "F", # Pyflakes - "W", # Pycodestyle - "C90", # McCabe complexity - "I", # Isort - "N", # pep8-naming - "ANN", # Pytype annotations - "S", # Bandit - "B", # Bugbear - "COM", # Commas - "C4", # Comprehensions - "ISC", # Implicit string concat - "PIE", # Unnecessary code - "T20", # Catch prints - "PYI", # validate pyi files - "Q", # Checks for quotes - "RSE", # Checks raise statements - "RET", # Checks return statements - "SLF", # Self checks - "SIM", # Simplificator - "PTH", # Pathlib checks - "ERA", # Checks for commented out code - "PL", # PyLint checks - "RUF", # Specific to Ruff checks + "A", # flake8-builtins + "ASYNC", # flake8-async + "ANN", # flake8-annotations + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "COM", # flake8-commas + "E", # pycodestyle - errors + "EXE", # flake8-executable + "F", # pyflakes + "FBT", # flake8-boolean-trap + "FLY", # flynt + "G", # flake8-logging-format + "I", # isort + "ICN", # flake8-import-conventions + "ISC", # flake8-implicit-str-concat + "LOG", # flake8-logging + "N", # pep8-naming + "PERF", # perflint + "PIE", # flake8-pie + "PGH", # pygrep-hooks + "PLC", # pylint-convention + "PLE", # pylint-error + "PLR", # pylint-refactor + "PT", # flake8-pytest-style + "PTH", # flake8-use-pathlib + "PYI024", # flake8-pyi + "Q", # flake8-quotes + "RET", # flake8-return + "RSE", # flake8-raise + "RUF", # ruff specific rules + "S", # flake8-bandit + "SIM", # flake8-simplify + "SLOT", # flake8-slots + "T10", # flake8-debugger + "TCH", # flake8-type-checking + "TID", # flake8-tidy-imports + "TRY", # tryceratops + "UP", # pyupgrade + "W", # pycodestyle - warnings + "YTT", # flake8-2020 ] ignore = [ - "D105", # Missing docstring in magic method - "D107", # Missing docstring in __init__ - "D212", # Multi-line docstring summary should start at the first line - "D401", # First line should be in imperative mood - "D104", # Missing docstring in public package - "D100", # Missing docstring in public module - "ANN401", # typing.Any are disallowed in `**kwargs - "PLR0913", # Too many arguments for function call - "D106", # Missing docstring in public nested class - "COM812", # Use of assert detected + 'TRY003', # Allow exceptions messages outside of Exception classes + "ANN002", # Skip typing for *args as they are hard to type + "ANN003", # Skip typing for **kwargs as they are hard to type + "COM812", # Do not force trailing commas ] [tool.ruff.lint.pydocstyle] @@ -137,6 +152,19 @@ convention = "google" [tool.ruff.lint.flake8-bugbear] extend-immutable-calls = ["taskiq_dependencies.Depends", "taskiq.TaskiqDepends"] +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"__future__.annotations".msg = "__future__.annotations is discouraged. See PEP-649 and PEP-749" + +[tool.ruff.lint.flake8-import-conventions.extend-aliases] +"typing" = "t" + +[tool.ruff.lint.isort] +known-first-party = ["taskiq_sqlalchemy*"] +combine-as-imports = true +order-by-type = false +lines-after-imports = 2 +case-sensitive = true + [tool.ruff.lint.per-file-ignores] "tests/*" = [ "S101", # Use of assert detected diff --git a/taskiq_sqlalchemy/__init__.py b/taskiq_sqlalchemy/__init__.py index b4f9bfa..765f706 100644 --- a/taskiq_sqlalchemy/__init__.py +++ b/taskiq_sqlalchemy/__init__.py @@ -6,4 +6,5 @@ from taskiq_sqlalchemy.broker import SQLAlchemyBroker from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + __all__ = ["SQLAlchemyBroker", "SQLAlchemyResultBackend"] diff --git a/taskiq_sqlalchemy/adapters/__init__.py b/taskiq_sqlalchemy/adapters/__init__.py index 31f68f2..3eb703b 100644 --- a/taskiq_sqlalchemy/adapters/__init__.py +++ b/taskiq_sqlalchemy/adapters/__init__.py @@ -10,14 +10,14 @@ def resolve_adapter( manager: SQLAlchemyManager, ) -> t.Union[PostgresDialectAdapter, OracleDialectAdapter, PollingAdapter]: if manager.engine.dialect.name == "postgresql": - from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter + from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter # noqa: PLC0415 return PostgresDialectAdapter(manager.engine) if manager.engine.dialect.name == "oracle": - from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter + from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter # noqa: PLC0415 return OracleDialectAdapter(manager.engine) - from taskiq_sqlalchemy.adapters.polling import PollingAdapter + from taskiq_sqlalchemy.adapters.polling import PollingAdapter # noqa: PLC0415 return PollingAdapter(manager.engine, queue_cls=manager.queue_cls) diff --git a/taskiq_sqlalchemy/adapters/abc.py b/taskiq_sqlalchemy/adapters/abc.py index d72b605..59c8dcd 100644 --- a/taskiq_sqlalchemy/adapters/abc.py +++ b/taskiq_sqlalchemy/adapters/abc.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import AsyncGenerator +from collections.abc import AsyncGenerator from sqlalchemy.ext.asyncio import AsyncEngine diff --git a/taskiq_sqlalchemy/adapters/oracle.py b/taskiq_sqlalchemy/adapters/oracle.py index 33526dd..50c785e 100644 --- a/taskiq_sqlalchemy/adapters/oracle.py +++ b/taskiq_sqlalchemy/adapters/oracle.py @@ -15,6 +15,7 @@ from taskiq_sqlalchemy.adapters.abc import DialectAdapter + logger = logging.getLogger(__name__) # Maximum characters in an Oracle AQ queue name (safe for all Oracle versions) @@ -54,7 +55,7 @@ async def broker_startup(self) -> None: raise RuntimeError( "OracleDialectAdapter requires python-oracledb Thin mode. " "Do not call oracledb.init_oracle_client() before using this adapter. " - "Asyncio support is only available in Thin mode." + "Asyncio support is only available in Thin mode.", ) await self.ensure_queue("taskiq") @@ -168,7 +169,7 @@ async def notify(self, channel: str, payload: str) -> None: # This ensures the signal is visible to dequeuing workers immediately. queue.enqoptions.visibility = oracledb.ENQ_IMMEDIATE await queue.enqone( - driver_connection.msgproperties(payload=payload.encode()) + driver_connection.msgproperties(payload=payload.encode()), ) logger.debug( "OracleDialectAdapter: enqueued task_id=%r on queue %r", @@ -227,7 +228,8 @@ async def _listen_gen(self, channel: str) -> t.AsyncGenerator[str, None]: # Any other error - log and re-raise to let the broker decide # whether to restart the listener. logger.exception( - "OracleDialectAdapter: dequeue error on queue %r", queue_name + "OracleDialectAdapter: dequeue error on queue %r", + queue_name, ) raise @@ -249,5 +251,6 @@ async def _listen_gen(self, channel: str) -> t.AsyncGenerator[str, None]: yield payload logger.debug( - "OracleDialectAdapter: dequeue loop exited for queue %r", queue_name + "OracleDialectAdapter: dequeue loop exited for queue %r", + queue_name, ) diff --git a/taskiq_sqlalchemy/adapters/polling.py b/taskiq_sqlalchemy/adapters/polling.py index 3e67351..5a6d95b 100644 --- a/taskiq_sqlalchemy/adapters/polling.py +++ b/taskiq_sqlalchemy/adapters/polling.py @@ -44,7 +44,7 @@ async def _poll_gen(self, channel: str) -> t.AsyncGenerator[str, None]: sa.select(self.queue_cls.task_id) .filter_by(channel=channel) .limit(self.POLL_RESULT_LIMIT) - .with_for_update(skip_locked=True) + .with_for_update(skip_locked=True), ) rows = result.fetchall() for row in rows: diff --git a/taskiq_sqlalchemy/adapters/postgresql.py b/taskiq_sqlalchemy/adapters/postgresql.py index a045e79..d14a556 100644 --- a/taskiq_sqlalchemy/adapters/postgresql.py +++ b/taskiq_sqlalchemy/adapters/postgresql.py @@ -1,9 +1,6 @@ -from __future__ import annotations - import logging import math import typing as t -from typing import AsyncGenerator import anyio import anyio.abc @@ -13,6 +10,7 @@ from taskiq_sqlalchemy.adapters.abc import DialectAdapter + logger = logging.getLogger(__name__) @@ -54,7 +52,6 @@ async def worker_shutdown(self) -> None: except Exception: logger.exception( "PostgresDialectAdapter: error closing listener conn", - exc_info=True, ) self._listener_conn = None @@ -65,10 +62,10 @@ async def notify(self, channel: str, payload: str) -> None: {"channel": channel, "payload": payload}, ) - def listen(self, channel: str) -> AsyncGenerator[str, None]: + def listen(self, channel: str) -> t.AsyncGenerator[str, None]: return self._listen_gen(channel) - async def _listen_gen(self, channel: str) -> AsyncGenerator[str, None]: + async def _listen_gen(self, channel: str) -> t.AsyncGenerator[str, None]: if self._listener_conn is None or self._listener_conn.driver_connection is None: raise RuntimeError("Connection not available for listen()") @@ -76,7 +73,7 @@ async def _listen_gen(self, channel: str) -> AsyncGenerator[str, None]: # must never block the callback. If the consumer is slow the buffer # grows; that's acceptable — tasks are short-lived string IDs. self._send_stream, self._recv_stream = anyio.create_memory_object_stream[str]( - max_buffer_size=math.inf + max_buffer_size=math.inf, ) def _callback(conn: object, pid: int, channel_: str, payload: str) -> None: @@ -88,8 +85,7 @@ def _callback(conn: object, pid: int, channel_: str, payload: str) -> None: # Should never happen with max_buffer_size=inf, but guard # defensively to prevent a silent drop crashing the callback. logger.warning( - "PostgresDialectAdapter: notification buffer full on channel %r; " - "payload %r dropped", + "PostgresDialectAdapter: notification buffer full on channel %r; payload %r dropped", channel_, payload, ) @@ -109,12 +105,12 @@ def _callback(conn: object, pid: int, channel_: str, payload: str) -> None: if self._listener_conn is not None: try: await self._listener_conn.driver_connection.remove_listener( - channel, _callback + channel, + _callback, ) except Exception: logger.debug( - "PostgresDialectAdapter: could not remove listener for %r " - "(connection may already be closed)", + "PostgresDialectAdapter: could not remove listener for %r (connection may already be closed)", channel, exc_info=True, ) diff --git a/taskiq_sqlalchemy/broker.py b/taskiq_sqlalchemy/broker.py index a23cf26..342b531 100644 --- a/taskiq_sqlalchemy/broker.py +++ b/taskiq_sqlalchemy/broker.py @@ -15,6 +15,7 @@ from taskiq_sqlalchemy.adapters.abc import DialectAdapter from taskiq_sqlalchemy.manager import SQLAlchemyManager + logger = logging.getLogger(__name__) @@ -31,7 +32,7 @@ def __init__( *, channel_name: str = "taskiq", adapter: t.Optional[DialectAdapter] = None, - **kwargs: t.Any, + **kwargs, ) -> None: super().__init__(**kwargs) self.manager = manager @@ -73,7 +74,7 @@ async def kick(self, message: BrokerMessage) -> None: channel=self.channel_name, task_name=message.task_name, message=serialised, - ) + ), ) # Notify outside the transaction so the row is visible to workers @@ -100,7 +101,7 @@ async def _fetch_message(self, task_id: str) -> t.Optional[AckableMessage]: result = await conn.execute( sa.delete(self.manager.queue_cls) .filter_by(task_id=task_id, channel=self.channel_name) - .returning(self.manager.queue_cls.message) + .returning(self.manager.queue_cls.message), ) row = result.first() if row is None: diff --git a/taskiq_sqlalchemy/models.py b/taskiq_sqlalchemy/models.py index cff3a64..66ed98d 100644 --- a/taskiq_sqlalchemy/models.py +++ b/taskiq_sqlalchemy/models.py @@ -21,7 +21,8 @@ class BaseMixin: ) created_at: Mapped[datetime.datetime] = mapped_column( - sa.DateTime, server_default=sa.func.now() + sa.DateTime, + server_default=sa.func.now(), ) @@ -42,7 +43,8 @@ class TaskiqResultMixin(BaseMixin): result: Mapped[t.Optional[bytes]] = mapped_column(sa.LargeBinary) is_err: Mapped[bool] = mapped_column( - sa.Boolean(name="bool_is_err"), server_default=expression.false() + sa.Boolean(name="bool_is_err"), + server_default=expression.false(), ) diff --git a/taskiq_sqlalchemy/result_backend.py b/taskiq_sqlalchemy/result_backend.py index 0b73346..ad56cef 100644 --- a/taskiq_sqlalchemy/result_backend.py +++ b/taskiq_sqlalchemy/result_backend.py @@ -15,6 +15,7 @@ from taskiq_sqlalchemy.manager import SQLAlchemyManager + _ReturnType = t.TypeVar("_ReturnType") logger = logging.getLogger(__name__) @@ -50,7 +51,7 @@ async def set_result(self, task_id: str, result: TaskiqResult[_ReturnType]) -> N # called at most once per task_id. dialect = async_engine.dialect.name if dialect in ("postgresql",): - from sqlalchemy.dialects.postgresql import insert as pg_insert + from sqlalchemy.dialects.postgresql import insert as pg_insert # noqa: PLC0415 stmt = ( pg_insert(self.manager.result_cls) @@ -72,8 +73,8 @@ async def set_result(self, task_id: str, result: TaskiqResult[_ReturnType]) -> N # written exactly once per task_id in normal operation) await conn.execute( sa.delete(self.manager.result_cls).where( - self.manager.result_cls.task_id == task_id - ) + self.manager.result_cls.task_id == task_id, + ), ) stmt = sa.insert(self.manager.result_cls).values( task_id=task_id, @@ -83,14 +84,16 @@ async def set_result(self, task_id: str, result: TaskiqResult[_ReturnType]) -> N await conn.execute(stmt) async def get_result( - self, task_id: str, with_logs: bool = False + self, + task_id: str, + with_logs: bool = False, # noqa: FBT001, FBT002 -- Keep same signature as parent ) -> TaskiqResult[_ReturnType]: async with self.manager.engine.begin() as conn: row = ( await conn.execute( sa.select(self.manager.result_cls).where( - self.manager.result_cls.task_id == task_id - ) + self.manager.result_cls.task_id == task_id, + ), ) ).fetchone() @@ -100,8 +103,8 @@ async def get_result( if not self.keep_results: await conn.execute( sa.delete(self.manager.result_cls).where( - self.manager.result_cls.task_id == task_id - ) + self.manager.result_cls.task_id == task_id, + ), ) result: TaskiqResult[_ReturnType] = self.serializer.loadb(row.result) @@ -111,7 +114,7 @@ async def get_result( async def is_result_ready(self, task_id: str) -> bool: async with self.manager.engine.connect() as conn: - stmt = sa.select(sa.literal(True)).where( - sa.exists().where(self.manager.result_cls.task_id == task_id) + stmt = sa.select(sa.literal(value=True)).where( + sa.exists().where(self.manager.result_cls.task_id == task_id), ) return bool(await conn.scalar(stmt)) diff --git a/tests/broker/adapters/test_oracle.py b/tests/broker/adapters/test_oracle.py index 18bb206..400824f 100644 --- a/tests/broker/adapters/test_oracle.py +++ b/tests/broker/adapters/test_oracle.py @@ -22,9 +22,8 @@ from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter, _oracle_queue_name -_ORA_URL = ( - "oracle+oracledb://taskiq_user:taskiq_pwd@localhost:1521/?service_name=taskiq" -) + +_ORA_URL = "oracle+oracledb://taskiq_user:taskiq_pwd@localhost:1521/?service_name=taskiq" @pytest.fixture @@ -158,7 +157,6 @@ async def test_listen_stops_on_stop_event( collected: list[str] = [] with anyio.fail_after(5.0): - async for p in adapter.listen("test_ora_channel"): - collected.append(p) + collected.extend([p async for p in adapter.listen("test_ora_channel")]) assert collected == [] diff --git a/tests/broker/adapters/test_polling.py b/tests/broker/adapters/test_polling.py index 7f28fd9..76f2b75 100644 --- a/tests/broker/adapters/test_polling.py +++ b/tests/broker/adapters/test_polling.py @@ -18,6 +18,7 @@ from taskiq_sqlalchemy.adapters.polling import PollingAdapter from taskiq_sqlalchemy.manager import SQLAlchemyManager + pytestmark = pytest.mark.anyio @@ -75,7 +76,7 @@ async def _insert_queue_rows( channel=channel, task_name="tests.fake_task", message=b"{}", - ) + ), ) return ids @@ -89,7 +90,7 @@ async def test_polling_notify_is_noop( async with polling_manager.engine.connect() as conn: count_before = ( await conn.execute( - sa.select(sa.func.count()).select_from(polling_manager.queue_cls) + sa.select(sa.func.count()).select_from(polling_manager.queue_cls), ) ).scalar() @@ -99,7 +100,7 @@ async def test_polling_notify_is_noop( async with polling_manager.engine.connect() as conn: count_after = ( await conn.execute( - sa.select(sa.func.count()).select_from(polling_manager.queue_cls) + sa.select(sa.func.count()).select_from(polling_manager.queue_cls), ) ).scalar() @@ -118,7 +119,9 @@ async def test_poll_yields_task_ids_from_db( channel = "test_channel" insert_count = 3 inserted_ids = await _insert_queue_rows( - polling_manager, channel=channel, count=insert_count + polling_manager, + channel=channel, + count=insert_count, ) collected: list[str] = [] @@ -166,8 +169,7 @@ async def test_poll_stops_on_stop_event( collected: list[str] = [] with anyio.fail_after(2.0): - async for task_id in adapter.listen("any_channel"): - collected.append(task_id) + collected.extend([task_id async for task_id in adapter.listen("any_channel")]) # The generator exited without hanging assert collected == [] diff --git a/tests/broker/adapters/test_postgresql.py b/tests/broker/adapters/test_postgresql.py index eb27dcb..1bbfc9b 100644 --- a/tests/broker/adapters/test_postgresql.py +++ b/tests/broker/adapters/test_postgresql.py @@ -12,6 +12,7 @@ from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter + pytestmark = [pytest.mark.anyio, pytest.mark.postgresql] _PG_URL = "postgresql+asyncpg://taskiq_user:taskiq_pwd@localhost:5432/taskiq" @@ -65,7 +66,8 @@ def _on_notification(conn: object, pid: int, ch: str, p: str) -> None: # Give Postgres a moment to deliver the notification with anyio.fail_after(3.0): while payload not in received: - await asyncio.sleep(0.05) + with anyio.move_on_after(0.05): + await anyio.Event().wait() finally: await driver_conn.remove_listener(channel, _on_notification) raw_conn.close() @@ -95,7 +97,8 @@ async def _collect() -> None: with anyio.fail_after(5.0): while not received: - await asyncio.sleep(0.05) + with anyio.move_on_after(0.05): + await anyio.Event().wait() tg.cancel_scope.cancel() assert received == [payload] diff --git a/tests/broker/conftest.py b/tests/broker/conftest.py index d132e3e..08b9f6d 100644 --- a/tests/broker/conftest.py +++ b/tests/broker/conftest.py @@ -4,7 +4,7 @@ """ import asyncio -from typing import AsyncGenerator +from collections.abc import AsyncGenerator import pytest from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine @@ -12,6 +12,7 @@ from taskiq import BrokerMessage from taskiq_sqlalchemy.adapters.abc import DialectAdapter +from taskiq_sqlalchemy.adapters.polling import PollingAdapter from taskiq_sqlalchemy.broker import SQLAlchemyBroker from taskiq_sqlalchemy.manager import SQLAlchemyManager @@ -142,14 +143,13 @@ async def polling_broker( Broker wired with the real PollingAdapter — for end-to-end listen() tests. Works with any engine (SQLite always, Postgres when available). """ - from taskiq_sqlalchemy.adapters.polling import PollingAdapter adapter = PollingAdapter( manager_with_schema.engine, queue_cls=manager_with_schema.queue_cls, ) # Speed up polling for tests - adapter.poll_interval = 0.05 + adapter.POLL_INTERVAL_SECS = 0.05 b = SQLAlchemyBroker( manager_with_schema, diff --git a/tests/broker/test_fetch_message.py b/tests/broker/test_fetch_message.py index 08503f1..0f03ab7 100644 --- a/tests/broker/test_fetch_message.py +++ b/tests/broker/test_fetch_message.py @@ -13,6 +13,7 @@ from taskiq_sqlalchemy.broker import SQLAlchemyBroker from taskiq_sqlalchemy.manager import SQLAlchemyManager + pytestmark = pytest.mark.anyio @@ -31,7 +32,7 @@ async def _insert_queue_row( channel=channel, task_name=task_name, message=message_bytes, - ) + ), ) @@ -63,8 +64,8 @@ async def test_fetch_message_returns_ackable( row = ( await conn.execute( sa.select(manager_with_schema.queue_cls).where( - manager_with_schema.queue_cls.task_id == broker_message.task_id - ) + manager_with_schema.queue_cls.task_id == broker_message.task_id, + ), ) ).fetchone() assert row is None, "Row should be deleted after _fetch_message" @@ -159,6 +160,4 @@ async def test_fetch_message_atomic_double_claim( ) non_none = [r for r in results if r is not None] - assert len(non_none) == 1, ( - f"Exactly one claim should succeed; got {len(non_none)} non-None results" - ) + assert len(non_none) == 1, f"Exactly one claim should succeed; got {len(non_none)} non-None results" diff --git a/tests/broker/test_kick.py b/tests/broker/test_kick.py index 2198a2c..c05fe1c 100644 --- a/tests/broker/test_kick.py +++ b/tests/broker/test_kick.py @@ -14,6 +14,7 @@ from .conftest import FakeAdapter + pytestmark = pytest.mark.anyio @@ -29,8 +30,8 @@ async def test_kick_inserts_row( row = ( await conn.execute( sa.select(manager_with_schema.queue_cls).where( - manager_with_schema.queue_cls.task_id == broker_message.task_id - ) + manager_with_schema.queue_cls.task_id == broker_message.task_id, + ), ) ).fetchone() @@ -53,8 +54,8 @@ async def test_kick_serializes_broker_message( row = ( await conn.execute( sa.select(manager_with_schema.queue_cls).where( - manager_with_schema.queue_cls.task_id == broker_message.task_id - ) + manager_with_schema.queue_cls.task_id == broker_message.task_id, + ), ) ).fetchone() @@ -101,8 +102,8 @@ async def notify(self, channel: str, payload: str) -> None: row = ( await conn.execute( sa.select(manager_with_schema.queue_cls).where( - manager_with_schema.queue_cls.task_id == payload - ) + manager_with_schema.queue_cls.task_id == payload, + ), ) ).fetchone() row_visible_during_notify = row is not None @@ -120,9 +121,7 @@ async def notify(self, channel: str, payload: str) -> None: finally: await b.shutdown() - assert row_visible_during_notify, ( - "Row must be committed to DB before notify() is called" - ) + assert row_visible_during_notify, "Row must be committed to DB before notify() is called" async def test_kick_multiple_messages( @@ -132,6 +131,7 @@ async def test_kick_multiple_messages( ) -> None: """Kicking N messages creates exactly N rows with distinct task_ids.""" + message_count = 5 messages = [ BrokerMessage( task_id=str(uuid.uuid4()), @@ -139,7 +139,7 @@ async def test_kick_multiple_messages( message=b'{"args": [], "kwargs": {}}', labels={}, ) - for _ in range(5) + for _ in range(message_count) ] for msg in messages: @@ -149,14 +149,14 @@ async def test_kick_multiple_messages( rows = ( await conn.execute( sa.select(manager_with_schema.queue_cls).where( - manager_with_schema.queue_cls.channel == broker.channel_name - ) + manager_with_schema.queue_cls.channel == broker.channel_name, + ), ) ).fetchall() - assert len(rows) == 5 + assert len(rows) == message_count stored_ids = {row.task_id for row in rows} expected_ids = {msg.task_id for msg in messages} assert stored_ids == expected_ids # notify() called once per message - assert len(fake_adapter.notify_calls) == 5 + assert len(fake_adapter.notify_calls) == message_count diff --git a/tests/broker/test_listen.py b/tests/broker/test_listen.py index d5b0263..d4e913c 100644 --- a/tests/broker/test_listen.py +++ b/tests/broker/test_listen.py @@ -23,6 +23,7 @@ from taskiq_sqlalchemy.broker import SQLAlchemyBroker from taskiq_sqlalchemy.manager import SQLAlchemyManager + pytestmark = pytest.mark.anyio @@ -79,7 +80,7 @@ async def polling_broker( # type: ignore[override] async def _collect( broker: SQLAlchemyBroker, n: int, - timeout: float = 5.0, + collect_timeout: float = 5.0, ) -> list[AckableMessage]: """ Drive broker.listen() and collect the first ``n`` messages. @@ -93,7 +94,7 @@ async def _drain() -> None: if len(collected) >= n: return - with anyio.fail_after(timeout): + with anyio.fail_after(collect_timeout): await _drain() return collected @@ -121,6 +122,7 @@ async def test_listen_multiple_messages_in_order( """ Kick 3 messages; collect 3 via listen(); all payloads present, no duplicates. """ + message_count = 3 messages_sent = [ BrokerMessage( task_id=str(uuid.uuid4()), @@ -128,7 +130,7 @@ async def test_listen_multiple_messages_in_order( message=f"payload-{i}".encode(), labels={}, ) - for i in range(3) + for i in range(message_count) ] for msg in messages_sent: @@ -136,7 +138,7 @@ async def test_listen_multiple_messages_in_order( received = await _collect(polling_broker, n=3) - assert len(received) == 3 + assert len(received) == message_count received_payloads = {msg.data for msg in received} expected_payloads = {msg.message for msg in messages_sent} assert received_payloads == expected_payloads @@ -190,12 +192,10 @@ async def _try_listen_b() -> None: with anyio.move_on_after(0.3): await _try_listen_b() - assert received_by_b == [], ( - "broker_b must not receive messages sent on channel_a" - ) + assert received_by_b == [], "broker_b must not receive messages sent on channel_a" # Confirm broker_a does receive it - received_by_a = await _collect(broker_a, n=1, timeout=3.0) + received_by_a = await _collect(broker_a, n=1, collect_timeout=3.0) assert received_by_a[0].data == msg_a.message finally: diff --git a/tests/result_backend/conftest.py b/tests/result_backend/conftest.py index a8bf23e..1051ce5 100644 --- a/tests/result_backend/conftest.py +++ b/tests/result_backend/conftest.py @@ -9,6 +9,7 @@ from taskiq_sqlalchemy.manager import SQLAlchemyManager from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + # Map of (pytest param id) → (SQLAlchemy async URL) _ENGINE_PARAMS: list = [ pytest.param( @@ -113,7 +114,8 @@ class _Base(DeclarativeBase): await conn.run_sync(_Base.metadata.create_all) backend: SQLAlchemyResultBackend[t.Any] = SQLAlchemyResultBackend( - manager, keep_results=False + manager, + keep_results=False, ) try: diff --git a/tests/result_backend/test_get_result.py b/tests/result_backend/test_get_result.py index 96417c5..08e6328 100644 --- a/tests/result_backend/test_get_result.py +++ b/tests/result_backend/test_get_result.py @@ -7,6 +7,7 @@ from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + pytestmark = pytest.mark.anyio @@ -58,9 +59,7 @@ async def test_get_result_logs_preserved_when_requested( recovered = await result_backend.get_result(task_id=task_id, with_logs=True) - assert recovered.log == result_with_logs.log, ( - "Log should be present when with_logs=True" - ) + assert recovered.log == result_with_logs.log, "Log should be present when with_logs=True" async def test_get_result_keep_results_false_deletes_row( diff --git a/tests/result_backend/test_is_result_ready.py b/tests/result_backend/test_is_result_ready.py index cdecbc2..6d830e7 100644 --- a/tests/result_backend/test_is_result_ready.py +++ b/tests/result_backend/test_is_result_ready.py @@ -5,6 +5,7 @@ from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + pytestmark = pytest.mark.anyio diff --git a/tests/result_backend/test_set_result.py b/tests/result_backend/test_set_result.py index 846648a..d6723d9 100644 --- a/tests/result_backend/test_set_result.py +++ b/tests/result_backend/test_set_result.py @@ -8,6 +8,7 @@ from taskiq_sqlalchemy.manager import SQLAlchemyManager from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend + pytestmark = pytest.mark.anyio @@ -26,8 +27,8 @@ async def test_set_result_stores_row( row = ( await conn.execute( sa.select(manager.result_cls).where( - manager.result_cls.task_id == task_id - ) + manager.result_cls.task_id == task_id, + ), ) ).fetchone() @@ -52,8 +53,8 @@ async def test_set_result_error_flag( row = ( await conn.execute( sa.select(manager.result_cls).where( - manager.result_cls.task_id == task_id - ) + manager.result_cls.task_id == task_id, + ), ) ).fetchone() @@ -83,8 +84,8 @@ async def test_set_result_idempotent_overwrite( rows = ( await conn.execute( sa.select(manager.result_cls).where( - manager.result_cls.task_id == task_id - ) + manager.result_cls.task_id == task_id, + ), ) ).fetchall() @@ -111,8 +112,8 @@ async def test_set_result_serialization_roundtrip( row = ( await conn.execute( sa.select(manager.result_cls).where( - manager.result_cls.task_id == task_id - ) + manager.result_cls.task_id == task_id, + ), ) ).fetchone() From c6639c8126bc8d327201d6fe074cbcf20018de3b Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 08:53:49 +0530 Subject: [PATCH 16/25] chore: Setup linting workflow --- .github/workflows/ci.yml | 119 +++--------------------------------- .github/workflows/docs.yml | 121 ------------------------------------- 2 files changed, 8 insertions(+), 232 deletions(-) delete mode 100644 .github/workflows/docs.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6d393ac..126dc21 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,20 +2,19 @@ name: CI on: push: - branches: [ main, develop ] + branches: + - main pull_request: - branches: [ main, develop ] - + branches: + - main jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" + uses: astral-sh/setup-uv@v7 - name: Set up Python run: uv python install 3.12 @@ -24,109 +23,7 @@ jobs: run: uv sync --dev - name: Run ruff check - run: uv run ruff check --output-format=github . + run: uvx ruff@latest check --output-format=github . - name: Run ruff format - run: uv run ruff format --check . - - - test: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] - driver: ["asyncpg", "psycopg", "psqlpy"] - include: - - driver: asyncpg - extra: asyncpg - - driver: psycopg - extra: psycopg - - driver: psqlpy - extra: psqlpy - - services: - postgres: - image: postgres:16 - env: - POSTGRES_PASSWORD: postgres - POSTGRES_USER: postgres - POSTGRES_DB: taskiq_test - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - - - name: Set up Python ${{ matrix.python-version }} - run: uv python install ${{ matrix.python-version }} - - - name: Install dependencies - run: | - uv sync --dev --group ${{ matrix.extra }} - - - name: Run tests with pytest - env: - TEST_DATABASE_URL: postgresql://postgres:postgres@localhost:5432/taskiq_test - TEST_DRIVER: ${{ matrix.driver }} - run: | - uv run pytest tests/ -v --cov=taskiq_postgresql --cov-report=xml --cov-report=term-missing - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v5 - if: matrix.python-version == '3.12' && matrix.driver == 'asyncpg' && secrets.CODECOV_TOKEN != '' - with: - file: ./coverage.xml - fail_ci_if_error: false - token: ${{ secrets.CODECOV_TOKEN }} - - integration-test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - - - name: Set up Python - run: uv python install 3.12 - - - name: Start services - run: docker-compose up -d - - - name: Wait for PostgreSQL - run: | - for i in {1..30}; do - if docker-compose exec -T postgres pg_isready -U postgres; then - echo "PostgreSQL is ready" - break - fi - echo "Waiting for PostgreSQL... ($i/30)" - sleep 2 - done - - - name: Install all driver dependencies - run: | - uv sync --dev --group asyncpg --group psycopg --group psqlpy - - - name: Run integration tests - env: - TEST_DATABASE_URL: postgresql://postgres:postgres@localhost:5432/postgres - run: | - uv run pytest tests/ -v -m "integration" --tb=short - - - name: Stop services - if: always() - run: docker-compose down \ No newline at end of file + run: uvx ruff@latest format --check . diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml deleted file mode 100644 index 6247416..0000000 --- a/.github/workflows/docs.yml +++ /dev/null @@ -1,121 +0,0 @@ -name: Documentation - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - check-readme: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - - - name: Set up Python - run: uv python install 3.12 - - - name: Install dependencies - run: uv sync --dev - - - name: Check README links - run: | - # Install markdown-link-check if needed - npm install -g markdown-link-check - markdown-link-check README.md --config .github/markdown-link-check.json || true - - - name: Validate code examples in README - run: | - # Extract and validate Python code blocks from README - python -c " - import re - import tempfile - import subprocess - import sys - - with open('README.md', 'r') as f: - content = f.read() - - # Find Python code blocks - python_blocks = re.findall(r'```python\n(.*?)\n```', content, re.DOTALL) - - for i, block in enumerate(python_blocks): - if 'import' in block and 'taskiq_postgresql' in block: - # Create temporary file - with tempfile.NamedTemporaryFile(mode='w', suffix='.py', delete=False) as tmp: - tmp.write(block) - tmp_path = tmp.name - - # Check syntax - result = subprocess.run(['python', '-m', 'py_compile', tmp_path], - capture_output=True, text=True) - - if result.returncode != 0: - print(f'Syntax error in code block {i+1}:') - print(result.stderr) - sys.exit(1) - - print(f'Code block {i+1} is valid') - - print('All Python code blocks are syntactically valid') - " - - check-docstrings: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - - - name: Set up Python - run: uv python install 3.12 - - - name: Install dependencies - run: uv sync --dev - - - name: Check docstring coverage - run: | - uv run python -c " - import ast - import os - - def check_docstrings(file_path): - with open(file_path, 'r') as file: - content = file.read() - - tree = ast.parse(content) - missing_docstrings = [] - - for node in ast.walk(tree): - if isinstance(node, (ast.FunctionDef, ast.ClassDef)): - if not ast.get_docstring(node): - missing_docstrings.append(f'{node.name} at line {node.lineno}') - - return missing_docstrings - - # Check all Python files in taskiq_postgresql - missing_total = [] - for root, dirs, files in os.walk('taskiq_postgresql'): - for file in files: - if file.endswith('.py') and not file.startswith('__'): - file_path = os.path.join(root, file) - missing = check_docstrings(file_path) - if missing: - missing_total.extend([f'{file_path}: {item}' for item in missing]) - - if missing_total: - print('Missing docstrings in:') - for item in missing_total: - print(f' - {item}') - print(f'Total: {len(missing_total)} missing docstrings') - else: - print('All public functions and classes have docstrings!') - " \ No newline at end of file From 1e7c3449426cb14e415a15be409973c46eaa77e4 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 13:16:29 +0530 Subject: [PATCH 17/25] deps: Add aioodbc to mssql extras --- pyproject.toml | 4 ++- uv.lock | 92 ++++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 93 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f145c57..e346e7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,13 +37,14 @@ dependencies = ["taskiq>=0.11.7", 'sqlalchemy>=2', 'anyio>=4'] [project.optional-dependencies] all = [ - "taskiq_sqlalchemy[postgresql-asyncpg,postgresql-psycopg,sqlite-aiosqlite,oracle-oracledb]", + "taskiq_sqlalchemy[postgresql-asyncpg,postgresql-psycopg,sqlite-aiosqlite,oracle-oracledb,mssql-aioodbc]", ] postgresql-asyncpg = ["asyncpg"] postgresql-psycopg = ["psycopg[binary,pool]"] sqlite-aiosqlite = ["aiosqlite"] oracle-oracledb = ["oracledb"] +mssql-aioodbc = ["aioodbc"] [project.urls] Homepage = "https://github.com/corridor/taskiq-sqlalchemy" @@ -89,6 +90,7 @@ universal = true markers = [ "postgresql: tests that require a running PostgreSQL server", "oracle: tests that require a running OracleDB server", + "mssql: tests that require a running SQL Server instance", ] [tool.ruff] diff --git a/uv.lock b/uv.lock index a92d771..f0ea300 100644 --- a/uv.lock +++ b/uv.lock @@ -10,6 +10,18 @@ required-markers = [ "python_full_version == '3.10.*'", ] +[[package]] +name = "aioodbc" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyodbc" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/45/87/3a7580938f217212a574ba0d1af78203fc278fc439815f3fc515a7fdc12b/aioodbc-0.5.0.tar.gz", hash = "sha256:cbccd89ce595c033a49c9e6b4b55bbace7613a104b8a46e3d4c58c4bc4f25075", size = 41298, upload-time = "2023-10-28T21:37:29.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/80/4d1565bc16b53cd603c73dc4bc770e2e6418d957417e05031314760dc28c/aioodbc-0.5.0-py3-none-any.whl", hash = "sha256:bcaf16f007855fa4bf0ce6754b1f72c6c5a3d544188849577ddd55c5dc42985e", size = 19449, upload-time = "2023-10-28T21:37:28.51Z" }, +] + [[package]] name = "aiosqlite" version = "0.22.1" @@ -939,6 +951,77 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pyodbc" +version = "5.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/85/44b10070a769a56bd910009bb185c0c0a82daff8d567cd1a116d7d730c7d/pyodbc-5.3.0.tar.gz", hash = "sha256:2fe0e063d8fb66efd0ac6dc39236c4de1a45f17c33eaded0d553d21c199f4d05", size = 121770, upload-time = "2025-10-17T18:04:09.43Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/cd/d0ac9e8963cf43f3c0e8ebd284cd9c5d0e17457be76c35abe4998b7b6df2/pyodbc-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6682cdec78f1302d0c559422c8e00991668e039ed63dece8bf99ef62173376a5", size = 71888, upload-time = "2025-10-17T18:02:58.285Z" }, + { url = "https://files.pythonhosted.org/packages/cb/7b/95ea2795ea8a0db60414e14f117869a5ba44bd52387886c1a210da637315/pyodbc-5.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9cd3f0a9796b3e1170a9fa168c7e7ca81879142f30e20f46663b882db139b7d2", size = 71813, upload-time = "2025-10-17T18:02:59.722Z" }, + { url = "https://files.pythonhosted.org/packages/95/c9/6f4644b60af513ea1c9cab1ff4af633e8f300e8468f4ae3507f04524e641/pyodbc-5.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46185a1a7f409761716c71de7b95e7bbb004390c650d00b0b170193e3d6224bb", size = 318556, upload-time = "2025-10-17T18:03:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/19/3f/24876d9cb9c6ce1bd2b6f43f69ebc00b8eb47bf1ed99ee95e340bf90ed79/pyodbc-5.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:349a9abae62a968b98f6bbd23d2825151f8d9de50b3a8f5f3271b48958fdb672", size = 322048, upload-time = "2025-10-17T18:03:02.522Z" }, + { url = "https://files.pythonhosted.org/packages/1f/27/faf17353605ac60f80136bc3172ed2d69d7defcb9733166293fc14ac2c52/pyodbc-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ac23feb7ddaa729f6b840639e92f83ff0ccaa7072801d944f1332cd5f5b05f47", size = 1286123, upload-time = "2025-10-17T18:03:04.157Z" }, + { url = "https://files.pythonhosted.org/packages/d4/61/c9d407d2aa3e89f9bb68acf6917b0045a788ae8c3f4045c34759cb77af63/pyodbc-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8aa396c6d6af52ccd51b8c8a5bffbb46fd44e52ce07ea4272c1d28e5e5b12722", size = 1343502, upload-time = "2025-10-17T18:03:05.485Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9f/f1b0f3238d873d4930aa2a2b8d5ba97132f6416764bf0c87368f8d6f2139/pyodbc-5.3.0-cp310-cp310-win32.whl", hash = "sha256:46869b9a6555ff003ed1d8ebad6708423adf2a5c88e1a578b9f029fb1435186e", size = 62968, upload-time = "2025-10-17T18:03:06.933Z" }, + { url = "https://files.pythonhosted.org/packages/d8/26/5f8ebdca4735aad0119aaaa6d5d73b379901b7a1dbb643aaa636040b27cf/pyodbc-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:705903acf6f43c44fc64e764578d9a88649eb21bf7418d78677a9d2e337f56f2", size = 69397, upload-time = "2025-10-17T18:03:08.49Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c8/480a942fd2e87dd7df6d3c1f429df075695ed8ae34d187fe95c64219fd49/pyodbc-5.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:c68d9c225a97aedafb7fff1c0e1bfe293093f77da19eaf200d0e988fa2718d16", size = 64446, upload-time = "2025-10-17T18:03:09.333Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c7/534986d97a26cb8f40ef456dfcf00d8483161eade6d53fa45fcf2d5c2b87/pyodbc-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ebc3be93f61ea0553db88589e683ace12bf975baa954af4834ab89f5ee7bf8ae", size = 71958, upload-time = "2025-10-17T18:03:10.163Z" }, + { url = "https://files.pythonhosted.org/packages/69/3c/6fe3e9eae6db1c34d6616a452f9b954b0d5516c430f3dd959c9d8d725f2a/pyodbc-5.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9b987a25a384f31e373903005554230f5a6d59af78bce62954386736a902a4b3", size = 71843, upload-time = "2025-10-17T18:03:11.058Z" }, + { url = "https://files.pythonhosted.org/packages/44/0e/81a0315d0bf7e57be24338dbed616f806131ab706d87c70f363506dc13d5/pyodbc-5.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:676031723aac7dcbbd2813bddda0e8abf171b20ec218ab8dfb21d64a193430ea", size = 327191, upload-time = "2025-10-17T18:03:11.93Z" }, + { url = "https://files.pythonhosted.org/packages/43/ae/b95bb2068f911950322a97172c68675c85a3e87dc04a98448c339fcbef21/pyodbc-5.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5c30c5cd40b751f77bbc73edd32c4498630939bcd4e72ee7e6c9a4b982cc5ca", size = 332228, upload-time = "2025-10-17T18:03:13.096Z" }, + { url = "https://files.pythonhosted.org/packages/dc/21/2433625f7d5922ee9a34e3805805fa0f1355d01d55206c337bb23ec869bf/pyodbc-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2035c7dfb71677cd5be64d3a3eb0779560279f0a8dc6e33673499498caa88937", size = 1296469, upload-time = "2025-10-17T18:03:14.61Z" }, + { url = "https://files.pythonhosted.org/packages/3a/f4/c760caf7bb9b3ab988975d84bd3e7ebda739fe0075c82f476d04ee97324c/pyodbc-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5cbe4d753723c8a8f65020b7a259183ef5f14307587165ce37e8c7e251951852", size = 1353163, upload-time = "2025-10-17T18:03:16.272Z" }, + { url = "https://files.pythonhosted.org/packages/14/ad/f9ca1e9e44fd91058f6e35b233b1bb6213d590185bfcc2a2c4f1033266e7/pyodbc-5.3.0-cp311-cp311-win32.whl", hash = "sha256:d255f6b117d05cfc046a5201fdf39535264045352ea536c35777cf66d321fbb8", size = 62925, upload-time = "2025-10-17T18:03:17.649Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/52b9b94efd8cfd11890ae04f31f50561710128d735e4e38a8fbb964cd2c2/pyodbc-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:f1ad0e93612a6201621853fc661209d82ff2a35892b7d590106fe8f97d9f1f2a", size = 69329, upload-time = "2025-10-17T18:03:18.474Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6f/bf5433bb345007f93003fa062e045890afb42e4e9fc6bd66acc2c3bd12ca/pyodbc-5.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:0df7ff47fab91ea05548095b00e5eb87ed88ddf4648c58c67b4db95ea4913e23", size = 64447, upload-time = "2025-10-17T18:03:19.691Z" }, + { url = "https://files.pythonhosted.org/packages/f5/0c/7ecf8077f4b932a5d25896699ff5c394ffc2a880a9c2c284d6a3e6ea5949/pyodbc-5.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5ebf6b5d989395efe722b02b010cb9815698a4d681921bf5db1c0e1195ac1bde", size = 72994, upload-time = "2025-10-17T18:03:20.551Z" }, + { url = "https://files.pythonhosted.org/packages/03/78/9fbde156055d88c1ef3487534281a5b1479ee7a2f958a7e90714968749ac/pyodbc-5.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:197bb6ddafe356a916b8ee1b8752009057fce58e216e887e2174b24c7ab99269", size = 72535, upload-time = "2025-10-17T18:03:21.423Z" }, + { url = "https://files.pythonhosted.org/packages/9f/f9/8c106dcd6946e95fee0da0f1ba58cd90eb872eebe8968996a2ea1f7ac3c1/pyodbc-5.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6ccb5315ec9e081f5cbd66f36acbc820ad172b8fa3736cf7f993cdf69bd8a96", size = 333565, upload-time = "2025-10-17T18:03:22.695Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/2c70f47a76a4fafa308d148f786aeb35a4d67a01d41002f1065b465d9994/pyodbc-5.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5dd3d5e469f89a3112cf8b0658c43108a4712fad65e576071e4dd44d2bd763c7", size = 340283, upload-time = "2025-10-17T18:03:23.691Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b2/0631d84731606bfe40d3b03a436b80cbd16b63b022c7b13444fb30761ca8/pyodbc-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b180bc5e49b74fd40a24ef5b0fe143d0c234ac1506febe810d7434bf47cb925b", size = 1302767, upload-time = "2025-10-17T18:03:25.311Z" }, + { url = "https://files.pythonhosted.org/packages/74/b9/707c5314cca9401081b3757301241c167a94ba91b4bd55c8fa591bf35a4a/pyodbc-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e3c39de3005fff3ae79246f952720d44affc6756b4b85398da4c5ea76bf8f506", size = 1361251, upload-time = "2025-10-17T18:03:26.538Z" }, + { url = "https://files.pythonhosted.org/packages/97/7c/893036c8b0c8d359082a56efdaa64358a38dda993124162c3faa35d1924d/pyodbc-5.3.0-cp312-cp312-win32.whl", hash = "sha256:d32c3259762bef440707098010035bbc83d1c73d81a434018ab8c688158bd3bb", size = 63413, upload-time = "2025-10-17T18:03:27.903Z" }, + { url = "https://files.pythonhosted.org/packages/c0/70/5e61b216cc13c7f833ef87f4cdeab253a7873f8709253f5076e9bb16c1b3/pyodbc-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe77eb9dcca5fc1300c9121f81040cc9011d28cff383e2c35416e9ec06d4bc95", size = 70133, upload-time = "2025-10-17T18:03:28.746Z" }, + { url = "https://files.pythonhosted.org/packages/aa/85/e7d0629c9714a85eb4f85d21602ce6d8a1ec0f313fde8017990cf913e3b4/pyodbc-5.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:afe7c4ac555a8d10a36234788fc6cfc22a86ce37fc5ba88a1f75b3e6696665dc", size = 64700, upload-time = "2025-10-17T18:03:29.638Z" }, + { url = "https://files.pythonhosted.org/packages/0c/1d/9e74cbcc1d4878553eadfd59138364b38656369eb58f7e5b42fb344c0ce7/pyodbc-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e9ab0b91de28a5ab838ac4db0253d7cc8ce2452efe4ad92ee6a57b922bf0c24", size = 72975, upload-time = "2025-10-17T18:03:30.466Z" }, + { url = "https://files.pythonhosted.org/packages/37/c7/27d83f91b3144d3e275b5b387f0564b161ddbc4ce1b72bb3b3653e7f4f7a/pyodbc-5.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6132554ffbd7910524d643f13ce17f4a72f3a6824b0adef4e9a7f66efac96350", size = 72541, upload-time = "2025-10-17T18:03:31.348Z" }, + { url = "https://files.pythonhosted.org/packages/1b/33/2bb24e7fc95e98a7b11ea5ad1f256412de35d2e9cc339be198258c1d9a76/pyodbc-5.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1629af4706e9228d79dabb4863c11cceb22a6dab90700db0ef449074f0150c0d", size = 343287, upload-time = "2025-10-17T18:03:32.287Z" }, + { url = "https://files.pythonhosted.org/packages/fa/24/88cde8b6dc07a93a92b6c15520a947db24f55db7bd8b09e85956642b7cf3/pyodbc-5.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ceaed87ba2ea848c11223f66f629ef121f6ebe621f605cde9cfdee4fd9f4b68", size = 350094, upload-time = "2025-10-17T18:03:33.336Z" }, + { url = "https://files.pythonhosted.org/packages/c2/99/53c08562bc171a618fa1699297164f8885e66cde38c3b30f454730d0c488/pyodbc-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3cc472c8ae2feea5b4512e23b56e2b093d64f7cbc4b970af51da488429ff7818", size = 1301029, upload-time = "2025-10-17T18:03:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/d8/10/68a0b5549876d4b53ba4c46eed2a7aca32d589624ed60beef5bd7382619e/pyodbc-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c79df54bbc25bce9f2d87094e7b39089c28428df5443d1902b0cc5f43fd2da6f", size = 1361420, upload-time = "2025-10-17T18:03:35.958Z" }, + { url = "https://files.pythonhosted.org/packages/41/0f/9dfe4987283ffcb981c49a002f0339d669215eb4a3fe4ee4e14537c52852/pyodbc-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c2eb0b08e24fe5c40c7ebe9240c5d3bd2f18cd5617229acee4b0a0484dc226f2", size = 63399, upload-time = "2025-10-17T18:03:36.931Z" }, + { url = "https://files.pythonhosted.org/packages/56/03/15dcefe549d3888b649652af7cca36eda97c12b6196d92937ca6d11306e9/pyodbc-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:01166162149adf2b8a6dc21a212718f205cabbbdff4047dc0c415af3fd85867e", size = 70133, upload-time = "2025-10-17T18:03:38.47Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c1/c8b128ae59a14ecc8510e9b499208e342795aecc3af4c3874805c720b8db/pyodbc-5.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:363311bd40320b4a61454bebf7c38b243cd67c762ed0f8a5219de3ec90c96353", size = 64683, upload-time = "2025-10-17T18:03:39.68Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f2/c26d82a7ce1e90b8bbb8731d3d53de73814e2f6606b9db9d978303aa8d5f/pyodbc-5.3.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3f1bdb3ce6480a17afaaef4b5242b356d4997a872f39e96f015cabef00613797", size = 73513, upload-time = "2025-10-17T18:03:40.536Z" }, + { url = "https://files.pythonhosted.org/packages/82/d5/1ab1b7c4708cbd701990a8f7183c5bb5e0712d5e8479b919934e46dadab4/pyodbc-5.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7713c740a10f33df3cb08f49a023b7e1e25de0c7c99650876bbe717bc95ee780", size = 72631, upload-time = "2025-10-17T18:03:41.713Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/7e3831eeac2b09b31a77e6b3495491ce162035ff2903d7261b49d35aa3c2/pyodbc-5.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf18797a12e70474e1b7f5027deeeccea816372497e3ff2d46b15bec2d18a0cc", size = 344580, upload-time = "2025-10-17T18:03:42.67Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a6/71d26d626a3c45951620b7ff356ec920e420f0e09b0a924123682aa5e4ab/pyodbc-5.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:08b2439500e212625471d32f8fde418075a5ddec556e095e5a4ba56d61df2dc6", size = 350224, upload-time = "2025-10-17T18:03:43.731Z" }, + { url = "https://files.pythonhosted.org/packages/93/14/f702c5e8c2d595776266934498505f11b7f1545baf21ffec1d32c258e9d3/pyodbc-5.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:729c535341bb09c476f219d6f7ab194bcb683c4a0a368010f1cb821a35136f05", size = 1301503, upload-time = "2025-10-17T18:03:45.013Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b2/ad92ebdd1b5c7fec36b065e586d1d34b57881e17ba5beec5c705f1031058/pyodbc-5.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c67e7f2ce649155ea89beb54d3b42d83770488f025cf3b6f39ca82e9c598a02e", size = 1361050, upload-time = "2025-10-17T18:03:46.298Z" }, + { url = "https://files.pythonhosted.org/packages/19/40/dc84e232da07056cb5aaaf5f759ba4c874bc12f37569f7f1670fc71e7ae1/pyodbc-5.3.0-cp314-cp314-win32.whl", hash = "sha256:a48d731432abaee5256ed6a19a3e1528b8881f9cb25cb9cf72d8318146ea991b", size = 65670, upload-time = "2025-10-17T18:03:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/b8/79/c48be07e8634f764662d7a279ac204f93d64172162dbf90f215e2398b0bd/pyodbc-5.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:58635a1cc859d5af3f878c85910e5d7228fe5c406d4571bffcdd281375a54b39", size = 72177, upload-time = "2025-10-17T18:03:57.296Z" }, + { url = "https://files.pythonhosted.org/packages/fc/79/e304574446b2263f428ce14df590ba52c2e0e0205e8d34b235b582b7d57e/pyodbc-5.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:754d052030d00c3ac38da09ceb9f3e240e8dd1c11da8906f482d5419c65b9ef5", size = 66668, upload-time = "2025-10-17T18:03:58.174Z" }, + { url = "https://files.pythonhosted.org/packages/43/17/f4eabf443b838a2728773554017d08eee3aca353102934a7e3ba96fb0e31/pyodbc-5.3.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f927b440c38ade1668f0da64047ffd20ec34e32d817f9a60d07553301324b364", size = 75780, upload-time = "2025-10-17T18:03:47.273Z" }, + { url = "https://files.pythonhosted.org/packages/59/ea/e79e168c3d38c27d59d5d96273fd9e3c3ba55937cc944c4e60618f51de90/pyodbc-5.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:25c4cfb2c08e77bc6e82f666d7acd52f0e52a0401b1876e60f03c73c3b8aedc0", size = 75503, upload-time = "2025-10-17T18:03:48.171Z" }, + { url = "https://files.pythonhosted.org/packages/90/81/d1d7c125ec4a20e83fdc28e119b8321192b2bd694f432cf63e1199b2b929/pyodbc-5.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc834567c2990584b9726cba365834d039380c9dbbcef3030ddeb00c6541b943", size = 398356, upload-time = "2025-10-17T18:03:49.131Z" }, + { url = "https://files.pythonhosted.org/packages/5e/fc/f6be4b3cc3910f8c2aba37aa41671121fd6f37b402ae0fefe53a70ac7cd5/pyodbc-5.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8339d3094858893c1a68ee1af93efc4dff18b8b65de54d99104b99af6306320d", size = 397291, upload-time = "2025-10-17T18:03:50.18Z" }, + { url = "https://files.pythonhosted.org/packages/03/2e/0610b1ed05a5625528d52f6cece9610e84617d35f475c89c2a52f66d13f7/pyodbc-5.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74528fe148980d0c735c0ebb4a4dc74643ac4574337c43c1006ac4d09593f92d", size = 1353900, upload-time = "2025-10-17T18:03:51.339Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f1/43497e1d37f9f71b43b2b3172e7b1bdf50851e278390c3fb6b46a3630c53/pyodbc-5.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d89a7f2e24227150c13be8164774b7e1f9678321a4248f1356a465b9cc17d31e", size = 1406062, upload-time = "2025-10-17T18:03:52.546Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/88a1277c2f7d9ab1cec0a71e074ba24fd4a1710a43974682546da90a1343/pyodbc-5.3.0-cp314-cp314t-win32.whl", hash = "sha256:af4d8c9842fc4a6360c31c35508d6594d5a3b39922f61b282c2b4c9d9da99514", size = 70132, upload-time = "2025-10-17T18:03:53.715Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c7/ee98c62050de4aa8bafb6eb1e11b95e0b0c898bd5930137c6dc776e06a9b/pyodbc-5.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bfeb3e34795d53b7d37e66dd54891d4f9c13a3889a8f5fe9640e56a82d770955", size = 79452, upload-time = "2025-10-17T18:03:54.664Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8f/d8889efd96bbe8e5d43ff9701f6b1565a8e09c3e1f58c388d550724f777b/pyodbc-5.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:13656184faa3f2d5c6f19b701b8f247342ed581484f58bf39af7315c054e69db", size = 70142, upload-time = "2025-10-17T18:03:55.551Z" }, + { url = "https://files.pythonhosted.org/packages/98/21/879440a55360075137bf125103e01b2722e2fac8ff65ba5fe4fd4c5ec63a/pyodbc-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0263323fc47082c2bf02562f44149446bbbfe91450d271e44bffec0c3143bfb1", size = 71965, upload-time = "2025-10-17T18:03:59.128Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c9/cd3cf6b8070a0c604cf83d46319390d9fc56405a91c9bcf96706e0d4d507/pyodbc-5.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:452e7911a35ee12a56b111ac5b596d6ed865b83fcde8427127913df53132759e", size = 71919, upload-time = "2025-10-17T18:03:59.999Z" }, + { url = "https://files.pythonhosted.org/packages/10/6c/8df5a61060f49b82977668a850015e182353a1e953d03dc4ddd5854270d2/pyodbc-5.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b35b9983ad300e5aea82b8d1661fc9d3afe5868de527ee6bd252dd550e61ecd6", size = 314623, upload-time = "2025-10-17T18:04:00.963Z" }, + { url = "https://files.pythonhosted.org/packages/91/5e/793834aa203766008bbd503abdd86d610b01e35cff3d3f7680d91dbc353f/pyodbc-5.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e981db84fee4cebec67f41bd266e1e7926665f1b99c3f8f4ea73cd7f7666e381", size = 318809, upload-time = "2025-10-17T18:04:02.055Z" }, + { url = "https://files.pythonhosted.org/packages/9b/64/5b14a07efb7a3bbe7672572335d85af8805c2031853db416ffb6f01dfc7f/pyodbc-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:25b6766e56748eb1fc1d567d863e06cbb7b7c749a41dfed85db0031e696fa39a", size = 1282409, upload-time = "2025-10-17T18:04:03.604Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/efd1ab7351681610659986f379ba0778c2035eadc62f1260cf537be4b9d9/pyodbc-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2eb7151ed0a1959cae65b6ac0454f5c8bbcd2d8bafeae66483c09d58b0c7a7fc", size = 1340280, upload-time = "2025-10-17T18:04:05.228Z" }, + { url = "https://files.pythonhosted.org/packages/36/88/4b8fc797de1a792bc3de60bcf2845c3b802347d280bc6608425f06d703dc/pyodbc-5.3.0-cp39-cp39-win32.whl", hash = "sha256:fc5ac4f2165f7088e74ecec5413b5c304247949f9702c8853b0e43023b4187e8", size = 63065, upload-time = "2025-10-17T18:04:06.351Z" }, + { url = "https://files.pythonhosted.org/packages/41/43/87bfbeaa36f60ef4be56a5ce3869b35251bebfe443a0a357f1a32ce6794e/pyodbc-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:c25dc9c41f61573bdcf61a3408c34b65e4c0f821b8f861ca7531b1353b389804", size = 69586, upload-time = "2025-10-17T18:04:07.291Z" }, + { url = "https://files.pythonhosted.org/packages/99/2f/9c1ead06516e492b8cfab35605972bc8d306ad728adea53b280d6a7e4f87/pyodbc-5.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:101313a21d2654df856a60e4a13763e4d9f6c5d3fd974bcf3fc6b4e86d1bbe8e", size = 64467, upload-time = "2025-10-17T18:04:08.169Z" }, +] + [[package]] name = "pytest" version = "8.4.1" @@ -1231,11 +1314,15 @@ dependencies = [ [package.optional-dependencies] all = [ + { name = "aioodbc" }, { name = "aiosqlite" }, { name = "asyncpg" }, { name = "oracledb" }, { name = "psycopg", extra = ["binary", "pool"] }, ] +mssql-aioodbc = [ + { name = "aioodbc" }, +] oracle-oracledb = [ { name = "oracledb" }, ] @@ -1269,6 +1356,7 @@ dev = [ [package.metadata] requires-dist = [ + { name = "aioodbc", marker = "extra == 'mssql-aioodbc'" }, { name = "aiosqlite", marker = "extra == 'sqlite-aiosqlite'" }, { name = "anyio", specifier = ">=4" }, { name = "asyncpg", marker = "extra == 'postgresql-asyncpg'" }, @@ -1276,9 +1364,9 @@ requires-dist = [ { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgresql-psycopg'" }, { name = "sqlalchemy", specifier = ">=2" }, { name = "taskiq", specifier = ">=0.11.7" }, - { name = "taskiq-sqlalchemy", extras = ["postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb"], marker = "extra == 'all'" }, + { name = "taskiq-sqlalchemy", extras = ["postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb", "mssql-aioodbc"], marker = "extra == 'all'" }, ] -provides-extras = ["all", "postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb"] +provides-extras = ["all", "postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb", "mssql-aioodbc"] [package.metadata.requires-dev] dev = [ From 0025529cc4d63cc1d089f791208eb19f7cd78a97 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 13:18:00 +0530 Subject: [PATCH 18/25] feat: Add broker dialect for SQL Server using Service Brokers --- docker/docker-compose.yml | 42 +++- docker/initdb.d/01-mssql-init.sql | 12 ++ taskiq_sqlalchemy/adapters/__init__.py | 15 +- taskiq_sqlalchemy/adapters/mssql.py | 268 +++++++++++++++++++++++++ tests/broker/adapters/test_mssql.py | 205 +++++++++++++++++++ tests/result_backend/conftest.py | 5 + 6 files changed, 543 insertions(+), 4 deletions(-) create mode 100644 docker/initdb.d/01-mssql-init.sql create mode 100644 taskiq_sqlalchemy/adapters/mssql.py create mode 100644 tests/broker/adapters/test_mssql.py diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index ca1d8a6..847c253 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -26,7 +26,7 @@ services: ORACLE_PASSWORD: "PasswoRd" volumes: - oracle_data:/opt/oracle/oradata - - ./initdb.d:/container-entrypoint-initdb.d:ro + - ./initdb.d/01-oracle-grants.sql:/container-entrypoint-initdb.d/01-oracle-grants.sql:ro ports: - 1521:1521 - 5500:5500 @@ -38,9 +38,49 @@ services: networks: - default + mssql: + image: mcr.microsoft.com/mssql/server:2019-latest + restart: always + environment: + SA_PASSWORD: "gOxN5hbl7geTwgvS" + ACCEPT_EULA: "Y" + MSSQL_PID: "Developer" + volumes: + - mssql_data:/var/opt/mssql + - ./initdb.d/01-mssql-init.sql:/docker-entrypoint-initdb.d/01-mssql-init.sql:ro + ports: + - 1433:1433 + healthcheck: + test: + [ + "CMD-SHELL", + "/opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P gOxN5hbl7geTwgvS -C -Q 'SELECT 1' || exit 1", + ] + interval: 10s + retries: 10 + start_period: 10s + timeout: 3s + networks: + - default + mssql.configurator: + image: mcr.microsoft.com/mssql/server:2019-latest + volumes: + - ./initdb.d/01-mssql-init.sql:/docker-entrypoint-initdb.d/01-mssql-init.sql:ro + depends_on: + mssql: + condition: service_healthy + command: > + bash -c ' + /opt/mssql-tools18/bin/sqlcmd -S mssql -U sa -P gOxN5hbl7geTwgvS -C -i docker-entrypoint-initdb.d/01-mssql-init.sql; + echo "All done!"; + ' + networks: + - default + volumes: postgres_data: oracle_data: + mssql_data: networks: default: driver: bridge diff --git a/docker/initdb.d/01-mssql-init.sql b/docker/initdb.d/01-mssql-init.sql new file mode 100644 index 0000000..7de1062 --- /dev/null +++ b/docker/initdb.d/01-mssql-init.sql @@ -0,0 +1,12 @@ +IF NOT EXISTS (SELECT 1 FROM sys.databases WHERE name = 'taskiq') CREATE DATABASE taskiq; +GO + +USE [taskiq]; +GO + +IF NOT EXISTS (SELECT * FROM sys.sql_logins WHERE name = 'taskiq_user') +BEGIN + CREATE LOGIN [taskiq_user] WITH PASSWORD = 'gOxN5hbl7geTwgvS', CHECK_POLICY = OFF; + ALTER SERVER ROLE [sysadmin] ADD MEMBER [taskiq_user]; +END +GO diff --git a/taskiq_sqlalchemy/adapters/__init__.py b/taskiq_sqlalchemy/adapters/__init__.py index 3eb703b..f23e308 100644 --- a/taskiq_sqlalchemy/adapters/__init__.py +++ b/taskiq_sqlalchemy/adapters/__init__.py @@ -1,5 +1,6 @@ import typing as t +from taskiq_sqlalchemy.adapters.mssql import MSSQLDialectAdapter from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter from taskiq_sqlalchemy.adapters.polling import PollingAdapter from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter @@ -8,16 +9,24 @@ def resolve_adapter( manager: SQLAlchemyManager, -) -> t.Union[PostgresDialectAdapter, OracleDialectAdapter, PollingAdapter]: - if manager.engine.dialect.name == "postgresql": +) -> t.Union[PostgresDialectAdapter, OracleDialectAdapter, MSSQLDialectAdapter, PollingAdapter]: + dialect = manager.engine.dialect.name + + if dialect == "postgresql": from taskiq_sqlalchemy.adapters.postgresql import PostgresDialectAdapter # noqa: PLC0415 return PostgresDialectAdapter(manager.engine) - if manager.engine.dialect.name == "oracle": + + if dialect == "oracle": from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter # noqa: PLC0415 return OracleDialectAdapter(manager.engine) + if dialect == "mssql": + from taskiq_sqlalchemy.adapters.mssql import MSSQLDialectAdapter # noqa: PLC0415 + + return MSSQLDialectAdapter(manager.engine) + from taskiq_sqlalchemy.adapters.polling import PollingAdapter # noqa: PLC0415 return PollingAdapter(manager.engine, queue_cls=manager.queue_cls) diff --git a/taskiq_sqlalchemy/adapters/mssql.py b/taskiq_sqlalchemy/adapters/mssql.py new file mode 100644 index 0000000..1d5d5ff --- /dev/null +++ b/taskiq_sqlalchemy/adapters/mssql.py @@ -0,0 +1,268 @@ +"""taskiq_sqlalchemy.adapters.mssql + +MSSQLDialectAdapter — pub/sub for Microsoft SQL Server using Service Broker. +""" + +import logging +import typing as t + +import anyio +import sqlalchemy as sa +from sqlalchemy.ext.asyncio import AsyncEngine + +from taskiq_sqlalchemy.adapters.abc import DialectAdapter + + +logger = logging.getLogger(__name__) + +# Service Broker object name limits +_MAX_SB_NAME = 128 +_SB_QUEUE_PREFIX = "taskiq_sb_" +_SB_SERVICE_PREFIX = "taskiq_svc_" + +# How long WAITFOR RECEIVE blocks before timing out (milliseconds). +# Keep short so the stop-event check is responsive. +_WAITFOR_TIMEOUT_MS = 500 + + +def _sb_queue_name(channel: str) -> str: + """Derive a safe Service Broker queue name from a channel name.""" + raw = f"{_SB_QUEUE_PREFIX}{channel}" + return raw[:_MAX_SB_NAME] + + +def _sb_service_name(channel: str) -> str: + """Derive a safe Service Broker service name from a channel name.""" + raw = f"{_SB_SERVICE_PREFIX}{channel}" + return raw[:_MAX_SB_NAME] + + +class MSSQLDialectAdapter(DialectAdapter): + """ + SQL Server Service Broker adapter for taskiq-sqlalchemy. + + One long-lived conversation handle is maintained per channel. + """ + + # channel → GUID conversation handle (str) + _conv_handles: dict[str, str] + _stop_event: anyio.Event + + def __init__(self, engine: AsyncEngine) -> None: + super().__init__(engine) + self._conv_handles = {} + self._stop_event = anyio.Event() + + async def client_startup(self) -> None: + """Ensure the default 'taskiq' channel SB objects exist on startup.""" + await self.ensure_queue("taskiq") + + async def broker_shutdown(self) -> None: + self._stop_event.set() + + async def worker_startup(self) -> None: + """Open a long-lived conversation handle for the default channel.""" + await self.ensure_queue("taskiq") + await self._open_conversation("taskiq") + + async def worker_shutdown(self) -> None: + """End all open conversations and signal the listen() loops to stop.""" + self._stop_event.set() + for channel in list(self._conv_handles): + await self._end_conversation(channel) + + async def ensure_queue(self, channel: str) -> None: + """ + Create the Service Broker queue and service for *channel* if they do + not already exist. + + Safe to call multiple times — uses ``IF NOT EXISTS`` guards via + ``sys.service_queues`` / ``sys.services``. + """ + queue_name = _sb_queue_name(channel) + service_name = _sb_service_name(channel) + + async with self.engine.begin() as conn: + # Create the queue + await conn.execute( + sa.text( + "IF NOT EXISTS (" + " SELECT 1 FROM sys.service_queues" + " WHERE name = :queue_name" + ") " + "EXEC('CREATE QUEUE [{queue_name}]')".replace("{queue_name}", queue_name) + ), + {"queue_name": queue_name}, + ) + + # Create the service bound to the queue + await conn.execute( + sa.text( + "IF NOT EXISTS (" + " SELECT 1 FROM sys.services" + " WHERE name = :service_name" + ") " + "EXEC('CREATE SERVICE [{service_name}]" + " ON QUEUE [{queue_name}]" + " ([DEFAULT])')".replace("{service_name}", service_name).replace("{queue_name}", queue_name) + ), + { + "service_name": service_name, + "queue_name": queue_name, + }, + ) + + logger.info( + "MSSQLDialectAdapter: ensured SB queue %r and service %r", + queue_name, + service_name, + ) + + async def _open_conversation(self, channel: str) -> str: + """ + Begin a Service Broker dialog conversation and cache the handle. + + A conversation is required before ``SEND ON CONVERSATION`` can be + called. We open one per channel and reuse it for all ``notify()`` + calls on that channel. + """ + if channel in self._conv_handles: + return self._conv_handles[channel] + + service_name = _sb_service_name(channel) + + async with self.engine.begin() as conn: + result = await conn.execute( + sa.text( + "DECLARE @conv_handle UNIQUEIDENTIFIER; " + "BEGIN DIALOG CONVERSATION @conv_handle " + " FROM SERVICE :service_name " + " TO SERVICE :service_name " + " ON CONTRACT [DEFAULT] " + " WITH ENCRYPTION = OFF; " + "SELECT @conv_handle AS conv_handle;" + ), + {"service_name": service_name}, + ) + row = result.fetchone() + + if row is None: + raise RuntimeError(f"MSSQLDialectAdapter: failed to open conversation for channel {channel!r}") + + handle = str(row.conv_handle) + self._conv_handles[channel] = handle + logger.debug( + "MSSQLDialectAdapter: opened conversation %r on channel %r", + handle, + channel, + ) + return handle + + async def _end_conversation(self, channel: str) -> None: + """End the cached conversation for *channel* and remove it.""" + handle = self._conv_handles.pop(channel, None) + if handle is None: + return + + try: + async with self.engine.begin() as conn: + await conn.execute( + sa.text("END CONVERSATION :conv_handle WITH CLEANUP;"), + {"conv_handle": handle}, + ) + except Exception: + logger.exception("MSSQLDialectAdapter: could not end conversation %r (may already be closed)", handle) + else: + logger.debug( + "MSSQLDialectAdapter: ended conversation %r on channel %r", + handle, + channel, + ) + + async def notify(self, channel: str, payload: str) -> None: + """ + Send a Service Broker message on *channel* carrying *payload*. + + The payload is the ``task_id`` string, encoded as UTF-8. + The message is sent inside the same transaction that committed the + ``taskiq_queue`` row — but since ``kick()`` commits the queue row + first and then calls ``notify()``, we open a new short transaction + here. The SEND is committed immediately so the worker's WAITFOR + RECEIVE sees it without delay. + + If no conversation handle exists for the channel yet, one is opened + on the first call. This covers the case where ``notify()`` is + called from a client process that never called ``worker_startup()``. + """ + # Acquire or open a conversation handle + handle = await self._open_conversation(channel) + + async with self.engine.begin() as conn: + await conn.execute( + sa.text("SEND ON CONVERSATION :conv_handle MESSAGE TYPE [DEFAULT] (:payload);"), + { + "conv_handle": handle, + "payload": payload.encode(), + }, + ) + + logger.debug( + "MSSQLDialectAdapter: sent task_id=%r on channel %r (conv=%r)", + payload, + channel, + handle, + ) + + def listen(self, channel: str) -> t.AsyncGenerator[str, None]: + return self._listen_gen(channel) + + async def _listen_gen(self, channel: str) -> t.AsyncGenerator[str, None]: + """ + WAITFOR RECEIVE loop. + + Blocks at most ``_WAITFOR_TIMEOUT_MS`` milliseconds per iteration, + then checks the stop event. When a message arrives it is committed + (removing it from the SB queue) and the payload (task_id) is yielded. + """ + queue_name = _sb_queue_name(channel) + + logger.debug( + "MSSQLDialectAdapter: starting WAITFOR RECEIVE loop on queue %r", + queue_name, + ) + + while not self._stop_event.is_set(): + async with self.engine.begin() as conn: + result = await conn.execute( + sa.text( + "WAITFOR (" + " RECEIVE TOP(1)" + " conversation_handle," + " message_body" + " FROM [{queue_name}]" + "), TIMEOUT :timeout_ms;".replace("{queue_name}", queue_name) + ), + {"timeout_ms": _WAITFOR_TIMEOUT_MS}, + ) + row = result.fetchone() + # The transaction commits here, removing the message from the + # SB queue and preventing re-delivery. + + if row is None: + # Timeout — no message arrived; loop back and check stop event. + continue + + # Decode the RAW message body back to a task_id string. + raw_body: bytes = row.message_body + payload = raw_body.decode() + logger.debug( + "MSSQLDialectAdapter: received task_id=%r from queue %r", + payload, + queue_name, + ) + yield payload + + logger.debug( + "MSSQLDialectAdapter: WAITFOR loop exited for queue %r", + queue_name, + ) diff --git a/tests/broker/adapters/test_mssql.py b/tests/broker/adapters/test_mssql.py new file mode 100644 index 0000000..28460c4 --- /dev/null +++ b/tests/broker/adapters/test_mssql.py @@ -0,0 +1,205 @@ +"""tests/broker/adapters/test_mssql.py + +Tests for ``MSSQLDialectAdapter`` in isolation. +""" + +import typing as t +from unittest.mock import MagicMock + +import anyio +import pytest +import sqlalchemy as sa +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine + +from taskiq_sqlalchemy.adapters import resolve_adapter +from taskiq_sqlalchemy.adapters.mssql import ( + MSSQLDialectAdapter, + _MAX_SB_NAME, + _sb_queue_name, + _sb_service_name, +) +from taskiq_sqlalchemy.manager import SQLAlchemyManager + + +_MSSQL_URL = "mssql+aioodbc://sa:gOxN5hbl7geTwgvS@localhost:1433/taskiq?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes" + + +@pytest.fixture +async def mssql_engine() -> t.AsyncGenerator[AsyncEngine, None]: + + engine = create_async_engine(_MSSQL_URL) + try: + yield engine + finally: + await engine.dispose() + + +@pytest.fixture +def mssql_channel(task_id: str) -> str: + """ + Derive a test-unique SB channel name from the test's task_id UUID. + + SB names are capped at 128 chars. We take the last 16 hex digits of the + UUID (24 chars total with "mssql_test_" prefix). + """ + short = task_id.replace("-", "")[-16:].upper() + return f"mssql_test_{short}" + + +@pytest.fixture +async def mssql_adapter( + mssql_engine: AsyncEngine, + mssql_channel: str, +) -> t.AsyncGenerator[MSSQLDialectAdapter, None]: + adapter = MSSQLDialectAdapter(mssql_engine) + await adapter.ensure_queue(mssql_channel) + try: + yield adapter + finally: + adapter._stop_event.set() + for channel in list(adapter._conv_handles): + await adapter._end_conversation(channel) + + +@pytest.mark.parametrize( + ("channel", "expected"), + [ + ("taskiq", "taskiq_sb_taskiq"), + ("my-channel", "taskiq_sb_my-channel"), + # Truncation: result must be ≤ _MAX_SB_NAME chars + ("a" * 200, "taskiq_sb_" + "a" * (_MAX_SB_NAME - len("taskiq_sb_"))), + ], +) +def test_sb_queue_name(channel: str, expected: str) -> None: + """_sb_queue_name: prefix + channel, truncated to _MAX_SB_NAME.""" + result = _sb_queue_name(channel) + assert result == expected + assert len(result) <= _MAX_SB_NAME + + +@pytest.mark.parametrize( + ("channel", "expected"), + [ + ("taskiq", "taskiq_svc_taskiq"), + ("my-channel", "taskiq_svc_my-channel"), + ("a" * 200, "taskiq_svc_" + "a" * (_MAX_SB_NAME - len("taskiq_svc_"))), + ], +) +def test_sb_service_name(channel: str, expected: str) -> None: + """_sb_service_name: prefix + channel, truncated to _MAX_SB_NAME.""" + result = _sb_service_name(channel) + assert result == expected + assert len(result) <= _MAX_SB_NAME + + +def test_resolve_adapter_returns_mssql() -> None: + """resolve_adapter() must return MSSQLDialectAdapter for the mssql dialect.""" + engine = MagicMock() + engine.dialect.name = "mssql" + + manager = SQLAlchemyManager.__new__(SQLAlchemyManager) + manager._engine = engine + manager.queue_cls = MagicMock() + manager.result_cls = MagicMock() + manager.schedule_cls = MagicMock() + + adapter = resolve_adapter(manager) + assert isinstance(adapter, MSSQLDialectAdapter) + + +@pytest.mark.mssql +@pytest.mark.anyio +async def test_ensure_queue_idempotent( + mssql_adapter: MSSQLDialectAdapter, + mssql_channel: str, +) -> None: + """Calling ensure_queue() a second time must not raise.""" + await mssql_adapter.ensure_queue(mssql_channel) + + +@pytest.mark.mssql +@pytest.mark.anyio +async def test_notify_sends_message( + mssql_adapter: MSSQLDialectAdapter, + mssql_engine: AsyncEngine, + mssql_channel: str, +) -> None: + """ + notify() must place a message on the SB queue that RECEIVE can read. + """ + payload = "task-id-mssql-notify" + queue_name = _sb_queue_name(mssql_channel) + + await mssql_adapter.notify(mssql_channel, payload) + + async with mssql_engine.begin() as conn: + result = await conn.execute( + sa.text(f"WAITFOR ( RECEIVE TOP(1) message_body FROM [{queue_name}]), TIMEOUT 3000;") + ) + row = result.fetchone() + + assert row is not None, "Expected a message in the SB queue after notify()" + assert row.message_body.decode("utf-8") == payload + + +@pytest.mark.mssql +@pytest.mark.anyio +async def test_listen_yields_notified_payload( + mssql_adapter: MSSQLDialectAdapter, + mssql_channel: str, +) -> None: + """listen() must yield the payload sent by notify().""" + payload = "task-id-listen-test" + received: list[str] = [] + + await mssql_adapter.notify(mssql_channel, payload) + + async def _collect() -> None: + async for p in mssql_adapter.listen(mssql_channel): + received.append(p) + mssql_adapter._stop_event.set() + return + + with anyio.fail_after(10.0): + await _collect() + + assert received == [payload] + + +@pytest.mark.mssql +@pytest.mark.anyio +async def test_listen_stops_on_stop_event( + mssql_engine: AsyncEngine, + mssql_channel: str, +) -> None: + """ + A pre-set _stop_event causes listen() to exit immediately + without yielding anything (empty queue, short timeout). + """ + adapter = MSSQLDialectAdapter(mssql_engine) + await adapter.ensure_queue(mssql_channel) + adapter._stop_event.set() + + collected: list[str] = [] + with anyio.fail_after(5.0): + collected.extend([p async for p in adapter.listen(mssql_channel)]) + + assert collected == [] + + +@pytest.mark.mssql +@pytest.mark.anyio +async def test_worker_shutdown_ends_conversations( + mssql_engine: AsyncEngine, + mssql_channel: str, +) -> None: + """worker_shutdown() must end all cached conversations and clear _conv_handles.""" + adapter = MSSQLDialectAdapter(mssql_engine) + await adapter.ensure_queue(mssql_channel) + await adapter.worker_startup() + + assert len(adapter._conv_handles) >= 1 + + await adapter.worker_shutdown() + + assert adapter._conv_handles == {} diff --git a/tests/result_backend/conftest.py b/tests/result_backend/conftest.py index 1051ce5..8369cc8 100644 --- a/tests/result_backend/conftest.py +++ b/tests/result_backend/conftest.py @@ -31,6 +31,11 @@ id="oracle+oracledb", marks=pytest.mark.oracle, ), + pytest.param( + "mssql+aioodbc://sa:gOxN5hbl7geTwgvS@localhost:1433/taskiq?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes", + id="mssql+aioodbc", + marks=pytest.mark.mssql, + ), ] From ade08e2b9c017276b36ec05598722e409836266f Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 14:30:21 +0530 Subject: [PATCH 19/25] refactor: Standardize credentials across test db setups --- docker/docker-compose.yml | 4 ++-- docker/initdb.d/01-oracle-grants.sql | 2 +- tests/broker/adapters/test_oracle.py | 2 +- tests/broker/adapters/test_postgresql.py | 2 +- tests/broker/conftest.py | 11 ++++++++--- tests/result_backend/conftest.py | 6 +++--- 6 files changed, 16 insertions(+), 11 deletions(-) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 847c253..7a1ac1b 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -4,7 +4,7 @@ services: restart: always environment: POSTGRES_USER: taskiq_user - POSTGRES_PASSWORD: taskiq_pwd + POSTGRES_PASSWORD: "gOxN5hbl7geTwgvS" POSTGRES_DB: taskiq volumes: - postgres_data:/var/lib/postgresql/data/ @@ -23,7 +23,7 @@ services: restart: always environment: ORACLE_DATABASE: taskiq - ORACLE_PASSWORD: "PasswoRd" + ORACLE_PASSWORD: "gOxN5hbl7geTwgvS" volumes: - oracle_data:/opt/oracle/oradata - ./initdb.d/01-oracle-grants.sql:/container-entrypoint-initdb.d/01-oracle-grants.sql:ro diff --git a/docker/initdb.d/01-oracle-grants.sql b/docker/initdb.d/01-oracle-grants.sql index 128321a..af25571 100644 --- a/docker/initdb.d/01-oracle-grants.sql +++ b/docker/initdb.d/01-oracle-grants.sql @@ -2,7 +2,7 @@ ALTER SESSION SET CONTAINER=taskiq; -CREATE USER taskiq_user IDENTIFIED BY taskiq_pwd; +CREATE USER taskiq_user IDENTIFIED BY gOxN5hbl7geTwgvS; -- Basic permissions GRANT CREATE SESSION TO taskiq_user; diff --git a/tests/broker/adapters/test_oracle.py b/tests/broker/adapters/test_oracle.py index 400824f..f170918 100644 --- a/tests/broker/adapters/test_oracle.py +++ b/tests/broker/adapters/test_oracle.py @@ -23,7 +23,7 @@ from taskiq_sqlalchemy.adapters.oracle import OracleDialectAdapter, _oracle_queue_name -_ORA_URL = "oracle+oracledb://taskiq_user:taskiq_pwd@localhost:1521/?service_name=taskiq" +_ORA_URL = "oracle+oracledb://taskiq_user:gOxN5hbl7geTwgvS@localhost:1521/?service_name=taskiq" @pytest.fixture diff --git a/tests/broker/adapters/test_postgresql.py b/tests/broker/adapters/test_postgresql.py index 1bbfc9b..95ea130 100644 --- a/tests/broker/adapters/test_postgresql.py +++ b/tests/broker/adapters/test_postgresql.py @@ -15,7 +15,7 @@ pytestmark = [pytest.mark.anyio, pytest.mark.postgresql] -_PG_URL = "postgresql+asyncpg://taskiq_user:taskiq_pwd@localhost:5432/taskiq" +_PG_URL = "postgresql+asyncpg://taskiq_user:gOxN5hbl7geTwgvS@localhost:5432/taskiq" @pytest.fixture diff --git a/tests/broker/conftest.py b/tests/broker/conftest.py index 08b9f6d..1ec99cd 100644 --- a/tests/broker/conftest.py +++ b/tests/broker/conftest.py @@ -53,20 +53,25 @@ def feed(self, task_id: str) -> None: id="sqlite+aiosqlite", ), pytest.param( - "postgresql+asyncpg://taskiq_user:taskiq_pwd@localhost:5432/taskiq", + "postgresql+asyncpg://taskiq_user:gOxN5hbl7geTwgvS@localhost:5432/taskiq", id="postgresql+asyncpg", marks=pytest.mark.postgresql, ), pytest.param( - "postgresql+psycopg://taskiq_user:taskiq_pwd@localhost:5432/taskiq", + "postgresql+psycopg://taskiq_user:gOxN5hbl7geTwgvS@localhost:5432/taskiq", id="postgresql+psycopg", marks=pytest.mark.postgresql, ), pytest.param( - "oracle+oracledb://taskiq_user:taskiq_pwd@localhost:1521/?service_name=taskiq", + "oracle+oracledb://taskiq_user:gOxN5hbl7geTwgvS@localhost:1521/?service_name=taskiq", id="oracle+oracledb", marks=pytest.mark.oracle, ), + pytest.param( + "mssql+aioodbc://sa:gOxN5hbl7geTwgvS@localhost:1433/taskiq?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes", + id="mssql+aioodbc", + marks=pytest.mark.mssql, + ), ] diff --git a/tests/result_backend/conftest.py b/tests/result_backend/conftest.py index 8369cc8..8f3e01a 100644 --- a/tests/result_backend/conftest.py +++ b/tests/result_backend/conftest.py @@ -17,17 +17,17 @@ id="sqlite+aiosqlite", ), pytest.param( - "postgresql+asyncpg://taskiq_user:taskiq_pwd@localhost:5432/taskiq", + "postgresql+asyncpg://taskiq_user:gOxN5hbl7geTwgvS@localhost:5432/taskiq", id="postgresql+asyncpg", marks=pytest.mark.postgresql, ), pytest.param( - "postgresql+psycopg://taskiq_user:taskiq_pwd@localhost:5432/taskiq", + "postgresql+psycopg://taskiq_user:gOxN5hbl7geTwgvS@localhost:5432/taskiq", id="postgresql+psycopg", marks=pytest.mark.postgresql, ), pytest.param( - "oracle+oracledb://taskiq_user:taskiq_pwd@localhost:1521/?service_name=taskiq", + "oracle+oracledb://taskiq_user:gOxN5hbl7geTwgvS@localhost:1521/?service_name=taskiq", id="oracle+oracledb", marks=pytest.mark.oracle, ), From 61e71c5d736f704e2902710b437baf6a9d0800c6 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 14:30:42 +0530 Subject: [PATCH 20/25] chore: Add pytest to ci --- .github/workflows/ci.yml | 207 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 207 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 126dc21..b39772a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,8 +7,14 @@ on: pull_request: branches: - main + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.event.issue.number || github.ref }}-${{ github.event_name }} + cancel-in-progress: true + jobs: lint: + name: "Lint" runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 @@ -27,3 +33,204 @@ jobs: - name: Run ruff format run: uvx ruff@latest format --check . + + test-generic: + name: "Tests · Generic" + runs-on: ubuntu-latest + needs: lint + steps: + - uses: actions/checkout@v6 + + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync --dev + + - name: Run generic tests + run: | + uv run pytest tests/ -v \ + -m "not postgresql and not oracle and not mssql" \ + --strict-markers --maxfail=10 \ + --cov=taskiq_sqlalchemy \ + --cov-report=xml:coverage-generic.xml + + # - name: Upload coverage + # uses: codecov/codecov-action@v5 + # with: + # files: coverage-generic.xml + # flags: generic + # fail_ci_if_error: false + + test-postgresql: + name: "Tests · PostgreSQL" + runs-on: ubuntu-latest + needs: lint + + steps: + - uses: actions/checkout@v6 + + - name: Start PostgreSQL service + run: | + cd docker + docker compose up -d postgres + + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync --dev + + - name: Wait for PostgreSQL to be healthy + run: | + cd docker + echo "Waiting for PostgreSQL service to become healthy..." + for i in $(seq 1 60); do + if [ "$(docker compose ps postgres --format json | jq -r '.Health')" = "healthy" ]; then + echo "PostgreSQL is healthy after ${i}s" + exit 0 + fi + sleep 2 + done + echo "PostgreSQL did not become healthy within 120 seconds" + exit 1 + + - name: Run postgresql tests + run: | + uv run pytest tests/ -v \ + -m "postgresql" \ + --strict-markers --maxfail=10 \ + --cov=taskiq_sqlalchemy \ + --cov-report=xml:coverage-postgresql.xml + + # - name: Upload coverage + # uses: codecov/codecov-action@v5 + # with: + # files: coverage-postgresql.xml + # flags: postgresql + # fail_ci_if_error: false + + test-oracle: + name: "Tests · Oracle" + runs-on: ubuntu-latest + needs: lint + + steps: + - uses: actions/checkout@v6 + + - name: Start Oracle service + run: | + cd docker + docker compose up -d oracle + + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync --dev + + - name: Wait for Oracle to be healthy + run: | + cd docker + echo "Waiting for Oracle service to become healthy..." + for i in $(seq 1 60); do + if [ "$(docker compose ps oracle --format json | jq -r '.Health')" = "healthy" ]; then + echo "Oracle is healthy after ${i}s" + exit 0 + fi + sleep 2 + done + echo "Oracle did not become healthy within 120 seconds" + exit 1 + + - name: Run oracle tests + run: | + uv run pytest tests/ -v \ + -m "oracle" \ + --strict-markers --maxfail=10 \ + --cov=taskiq_sqlalchemy \ + --cov-report=xml:coverage-oracle.xml + + # - name: Upload coverage + # uses: codecov/codecov-action@v5 + # with: + # files: coverage-oracle.xml + # flags: oracle + # fail_ci_if_error: false + + test-mssql: + name: "Tests · MSSQL" + runs-on: ubuntu-latest + needs: lint + + steps: + - uses: actions/checkout@v6 + + - name: Start MSSQL service + run: | + cd docker + docker compose up -d mssql.configurator + + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync --dev + + - name: Install MS ODBC driver + run: | + # See: https://learn.microsoft.com/en-us/sql/connect/odbc/linux-mac/installing-the-microsoft-odbc-driver-for-sql-server + if ! [[ "18.04 20.04 22.04 24.04 25.10" == *"$(grep VERSION_ID /etc/os-release | cut -d '"' -f 2)"* ]]; + then + echo "Ubuntu $(grep VERSION_ID /etc/os-release | cut -d '"' -f 2) is not currently supported."; + exit; + fi + + curl -sSL -O https://packages.microsoft.com/config/ubuntu/$(grep VERSION_ID /etc/os-release | cut -d '"' -f 2)/packages-microsoft-prod.deb + sudo dpkg -i packages-microsoft-prod.deb + rm packages-microsoft-prod.deb + + sudo apt-get update + sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 + echo "$PATH:/opt/mssql-tools18/bin" >> $GITHUB_PATH + + - name: Wait for MSSQL to be healthy + run: | + cd docker + echo "Waiting for MSSQL service to become healthy..." + for i in $(seq 1 60); do + if [ "$(docker compose ps mssql --format json | jq -r '.Health')" = "healthy" ]; then + echo "MSSQL is healthy after ${i}s" + exit 0 + fi + sleep 2 + done + echo "MSSQL did not become healthy within 120 seconds" + exit 1 + + - name: Run mssql tests + run: | + uv run pytest tests/ -v \ + -m "mssql" \ + --strict-markers --maxfail=10 \ + --cov=taskiq_sqlalchemy \ + --cov-report=xml:coverage-mssql.xml + + # - name: Upload coverage + # uses: codecov/codecov-action@v5 + # with: + # files: coverage-mssql.xml + # flags: mssql + # fail_ci_if_error: false From b71d962ccd19f5e90a6319d0130246948f4b4ec3 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 15:23:18 +0530 Subject: [PATCH 21/25] deps: Add aioodbc to dev dependencies --- pyproject.toml | 1 + uv.lock | 2 ++ 2 files changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index e346e7e..d4f2135 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,6 +69,7 @@ dev = [ "asyncpg", # PostgreSQL asyncpg driver (optional in local dev) "psycopg", # PostgreSQL psycopg3 driver (optional in local dev) "oracledb", # OracleDB driver (optional in local dev) + "aioodbc", # SQL Server driver (optional in local dev) ] [tool.setuptools.packages.find] diff --git a/uv.lock b/uv.lock index f0ea300..3436bec 100644 --- a/uv.lock +++ b/uv.lock @@ -1338,6 +1338,7 @@ sqlite-aiosqlite = [ [package.dev-dependencies] dev = [ + { name = "aioodbc" }, { name = "aiosqlite" }, { name = "asyncpg" }, { name = "asyncpg-stubs" }, @@ -1370,6 +1371,7 @@ provides-extras = ["all", "postgresql-asyncpg", "postgresql-psycopg", "sqlite-ai [package.metadata.requires-dev] dev = [ + { name = "aioodbc" }, { name = "aiosqlite" }, { name = "asyncpg" }, { name = "asyncpg-stubs" }, From 860934aabe05ec6d439863334850a258acbc81e1 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 15:36:19 +0530 Subject: [PATCH 22/25] chore: Update README.md --- README.md | 537 ++++++++++++++++++++++++------------------------------ 1 file changed, 240 insertions(+), 297 deletions(-) diff --git a/README.md b/README.md index 32a06eb..4f5be7e 100644 --- a/README.md +++ b/README.md @@ -1,401 +1,344 @@ -# TaskIQ PostgreSQL +# taskiq-sqlalchemy -TaskIQ PostgreSQL is a comprehensive plugin for [TaskIQ](https://taskiq-python.github.io/) that provides PostgreSQL-based broker, result backend, and scheduler source with support for multiple PostgreSQL drivers. +A database-agnostic [TaskIQ](https://taskiq-python.github.io/) broker and result backend +built on top of **SQLAlchemy async engines**. + +Works with any database that has an async SQLAlchemy driver. +First-class support for PostgreSQL, Oracle and MSSQL; everything else falls back to a +polling-based broker automatically. + +--- ## Features -- **🚀 PostgreSQL Broker**: High-performance message broker using PostgreSQL LISTEN/NOTIFY -- **📦 Result Backend**: Persistent task result storage with configurable retention -- **⏰ Scheduler Source**: Cron-like task scheduling with PostgreSQL persistence -- **🔌 Multiple Drivers**: Support for asyncpg, psycopg3, and psqlpy -- **⚡ Async/Await**: Built for high-performance async operations -- **🛠️ Flexible Configuration**: Customizable table names, field types, and connection options -- **🔄 Multiple Serializers**: Support for different serialization methods (Pickle, JSON, etc.) -- **🔐 Connection Pooling**: Built-in connection pool management for all drivers +| | | +|---|---| +| **DB-agnostic result backend** | Stores task results in any SQL database — SQLite, PostgreSQL, Oracle, and more | +| **Push-based broker for PostgreSQL** | via `LISTEN / NOTIFY` (asyncpg or psycopg) | +| **Push-based broker for Oracle** | via Oracle Advanced Queuing (AQ) | +| **Push-based broker for MSSQL** |via SQL Server Service Broker | +| **Polling fallback broker** | Works on any async SQLAlchemy engine — SQLite for local dev, MySQL and others | -## Installation +--- -### Basic Installation +## Supported Databases -```bash -pip install taskiq-postgresql -``` +### Broker -### With Driver Dependencies +| Database | Transport | Driver extras | Status | +|---|---|---|---| +| **PostgreSQL** | `LISTEN / NOTIFY` (push) | `postgresql-asyncpg`, `postgresql-psycopg` | ✅ Supported | +| **Oracle** | Advanced Queuing — AQ (push) | `oracle-oracledb` | ✅ Supported | +| **MSSQL** | Service Broker (push) | `mssql-aioodbc` | ✅ Supported | +| **SQLite** | Polling | `sqlite-aiosqlite` | ✅ Supported (dev/test) | +| Any other async dialect | Polling | _(driver of your choice)_ | ✅ Supported via fallback | -Choose your preferred PostgreSQL driver: +### Result Backend -**AsyncPG (Recommended)** -```bash -pip install taskiq-postgresql[asyncpg] -``` +The result backend stores serialised `TaskiqResult` objects in a plain SQL table. +It is **fully database-agnostic** - any dialect that works with SQLAlchemy async will work unchanged. -**Psycopg3** -```bash -pip install taskiq-postgresql[psycopg] -``` +| Database | Status | +|---|---| +| SQLite | ✅ | +| PostgreSQL | ✅ | +| Oracle | ✅ | +| Any async SQLAlchemy dialect | ✅ | -**PSQLPy** -```bash -pip install taskiq-postgresql[psqlpy] -``` +### Scheduler Source +> **Roadmap** — A `SchedulerSource` implementation backed by a SQL table +> (cron expressions, one-off schedules) is planned. +--- -### Using Package Managers +## Installation -**Poetry:** ```bash -poetry add taskiq-postgresql[asyncpg] -``` +# SQLite only (zero external services — great for dev/testing) +pip install taskiq-sqlalchemy[sqlite-aiosqlite] -**UV:** -```bash -uv add taskiq-postgresql[asyncpg] +# PostgreSQL with asyncpg +pip install taskiq-sqlalchemy[postgresql-asyncpg] + +# PostgreSQL with psycopg3 +pip install taskiq-sqlalchemy[postgresql-psycopg] + +# Oracle +pip install taskiq-sqlalchemy[oracle-oracledb] + +# SQL Server (Service Broker) +pip install taskiq-sqlalchemy[mssql-aioodbc] + +# Everything +pip install taskiq-sqlalchemy[all] ``` -**Rye:** +With **uv**: + ```bash -rye add taskiq-postgresql[asyncpg] +uv add taskiq-sqlalchemy[postgresql-asyncpg] ``` -> **Note**: Driver extras are required as PostgreSQL drivers are optional dependencies. Without them, the PostgreSQL drivers won't be available. +--- ## Quick Start -### Basic Task Processing +### 1. Configure the Manager -```python -import asyncio -from taskiq_postgresql import PostgresqlBroker, PostgresqlResultBackend +`SQLAlchemyManager` owns the ORM table classes and the engine. +Pass your `DeclarativeBase` subclass so the tables are registered +in your metadata and can be created/migrated alongside your own models. -# Configure the result backend -result_backend = PostgresqlResultBackend( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db", -) +```python +from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.orm import DeclarativeBase +from taskiq_sqlalchemy.manager import SQLAlchemyManager -# Configure the broker with result backend -broker = PostgresqlBroker( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db", -).with_result_backend(result_backend) +class Base(DeclarativeBase): + pass +engine = create_async_engine("postgresql+asyncpg://user:pass@localhost/mydb") -@broker.task -async def calculate_sum(a: int, b: int) -> int: - """Calculate the sum of two numbers.""" - await asyncio.sleep(1) # Simulate some work - return a + b +manager = SQLAlchemyManager(base_classes=(Base,)) +manager.configure(engine=engine) +# Create tables (or use Alembic) +async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) +``` -async def main(): - # Startup the broker - await broker.startup() - - # Send a task - task = await calculate_sum.kiq(10, 20) - - # Wait for result - result = await task.wait_result() - print(f"Result: {result}") # Result: 30 - - # Shutdown the broker - await broker.shutdown() +### 2. Set up the Broker +```python +from taskiq_sqlalchemy.broker import SQLAlchemyBroker -if __name__ == "__main__": - asyncio.run(main()) +broker = SQLAlchemyBroker(manager, channel_name="default") ``` -### Task Scheduling +`SQLAlchemyBroker` automatically selects the right transport adapter for the +connected dialect (see [Broker Internals](#broker-internals) below). + +### 3. Set up the Result Backend ```python -from taskiq_postgresql import PostgresqlBroker, PostgresqlSchedulerSource -from taskiq import TaskiqScheduler - -# Initialize broker -broker = PostgresqlBroker( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db" -) - -# Initialize scheduler source -scheduler_source = PostgresqlSchedulerSource( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db", - table_name="taskiq_schedules", - driver="asyncpg" -) - -# Create scheduler -scheduler = TaskiqScheduler( - broker=broker, - sources=[scheduler_source], -) +from taskiq_sqlalchemy.result_backend import SQLAlchemyResultBackend -@broker.task -async def scheduled_task(): - print("This task runs on schedule!") - -# Schedule task to run every minute -async def setup_schedule(): - await scheduler_source.add_schedule( - schedule_id="task-every-minute", - task_name="scheduled_task", - cron="* * * * *", # Every minute - args=[], - kwargs={} - ) +result_backend = SQLAlchemyResultBackend(manager) +broker = broker.with_result_backend(result_backend) ``` -## Configuration - -### PostgresqlBroker +### 4. Define and run tasks -| Parameter | Type | Default | Description | -|-----------|------|---------|-------------| -| `dsn` | `str` | Required | PostgreSQL connection string | -| `queue_name` | `str` | `"taskiq_queue"` | Name of the queue table | -| `field_for_task_id` | `Literal["VarChar", "Text", "Uuid"]` | `"Uuid"` | Field type for task IDs | -| `driver` | `Literal["asyncpg", "psycopg", "psqlpy"]` | `"asyncpg"` | Database driver | -| `**connect_kwargs` | `Any` | - | Additional driver-specific connection parameters | +```python +import asyncio -### PostgresqlResultBackend +@broker.task +async def add(a: int, b: int) -> int: + return a + b -| Parameter | Type | Default | Description | -|-----------|------|---------|-------------| -| `dsn` | `str` | Required | PostgreSQL connection string | -| `keep_results` | `bool` | `True` | Whether to keep results after reading | -| `table_name` | `str` | `"taskiq_results"` | Name of the results table | -| `field_for_task_id` | `Literal["VarChar", "Text", "Uuid"]` | `"Uuid"` | Field type for task IDs | -| `serializer` | `BaseSerializer` | `PickleSerializer()` | Serializer instance | -| `driver` | `Literal["asyncpg", "psycopg", "psqlpy"]` | `"asyncpg"` | Database driver | -| `**connect_kwargs` | `Any` | - | Additional driver-specific connection parameters | +async def main() -> None: + await broker.startup() -### PostgresqlSchedulerSource + task = await add.kiq(1, 2) + result = await task.wait_result(timeout=10) + print(result.return_value) # 3 -| Parameter | Type | Default | Description | -|-----------|------|---------|-------------| -| `dsn` | `str` | Required | PostgreSQL connection string | -| `table_name` | `str` | `"taskiq_schedules"` | Name of the schedules table | -| `driver` | `Literal["asyncpg", "psycopg", "psqlpy"]` | `"asyncpg"` | Database driver | -| `startup_schedule` | `dict` | `None` | Schedule definitions to create on startup | -| `**connect_kwargs` | `Any` | - | Additional driver-specific connection parameters | + await broker.shutdown() -## Database Drivers +asyncio.run(main()) +``` -### AsyncPG (Recommended) -- **Performance**: Fastest PostgreSQL driver for Python -- **Features**: Full asyncio support, prepared statements, connection pooling -- **Use case**: High-performance applications +Run a worker in a separate process: -### Psycopg3 -- **Performance**: Good performance with extensive features -- **Features**: Full PostgreSQL feature support, mature ecosystem -- **Use case**: Feature-rich applications needing advanced PostgreSQL features +```bash +taskiq worker myapp:broker +``` -### PSQLPy -- **Performance**: Rust-based driver with excellent performance -- **Features**: Modern async implementation -- **Use case**: Applications prioritizing performance and modern architecture +--- +## Broker Internals +`SQLAlchemyBroker` separates two concerns: -## Advanced Configuration +| Concern | Component | +|---|---| +| **Persistence** | `kick()` inserts a row into the queue table, then commits | +| **Wakeup signal** | `DialectAdapter.notify()` tells workers a new row is waiting | -### Custom Serializer +The commit happens *before* the notify, so a worker that wakes up immediately +will always find the row in the database. -```python -from taskiq.serializers import JSONSerializer +### DialectAdapter -result_backend = PostgresqlResultBackend( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db", - serializer=JSONSerializer(), -) +``` +DialectAdapter (ABC) + ├── PostgresDialectAdapter — LISTEN/NOTIFY via asyncpg + ├── OracleDialectAdapter — Advanced Queuing (AQ) via oracledb Thin + ├── MSSQLDialectAdapter — Service Broker WAITFOR RECEIVE via aioodbc + └── PollingAdapter — periodic SELECT … FOR UPDATE SKIP LOCKED ``` -### Custom Table Names and Field Types +The correct adapter is chosen automatically by `resolve_adapter()` based on +`engine.dialect.name`. You can also pass a custom adapter directly: ```python -broker = PostgresqlBroker( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db", - queue_name="my_custom_queue", - field_for_task_id="Text", # Use TEXT instead of UUID -) - -result_backend = PostgresqlResultBackend( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db", - table_name="my_custom_results", - field_for_task_id="VarChar", # Use VARCHAR instead of UUID -) +from taskiq_sqlalchemy.adapters.polling import PollingAdapter + +adapter = PollingAdapter(engine, queue_cls=manager.queue_cls, poll_interval=1.0) +broker = SQLAlchemyBroker(manager, adapter=adapter) ``` -### Connection Pool Configuration +### PostgreSQL — LISTEN / NOTIFY -```python -# AsyncPG -broker = PostgresqlBroker( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db", - driver="asyncpg", - min_size=5, - max_size=20, - max_inactive_connection_lifetime=300, -) - -# Psycopg3 -broker = PostgresqlBroker( - dsn="postgresql://postgres:postgres@localhost:5432/taskiq_db", - driver="psycopg", - min_size=5, - max_size=20, - max_lifetime=3600, -) -``` +PostgreSQL's `LISTEN / NOTIFY` is a native, push-based pub/sub mechanism. +`PostgresDialectAdapter` holds a **dedicated raw asyncpg connection** separate +from the SQLAlchemy pool (the pool connection would be returned after each +statement, breaking the LISTEN subscription). -### Using Environment Variables +Notifications are bridged into the async generator via an +**anyio `MemoryObjectStream`** pair: -```python -import os +``` +asyncpg callback (sync) → send_stream.send_nowait() → recv_stream → broker.listen() +``` -# From environment -dsn = os.getenv("DATABASE_URL", "postgresql://localhost/taskiq") +This is fully backend-agnostic (asyncio and trio) and provides structured +lifecycle: closing `send_stream` on shutdown propagates `EndOfStream` to the +consumer cleanly. -# With SSL -dsn = "postgresql://user:pass@localhost:5432/db?sslmode=require" +Workers call `worker_startup()` to acquire the dedicated listener connection +and `worker_shutdown()` to release it. -broker = PostgresqlBroker(dsn=dsn) -``` +### Oracle — Advanced Queuing (AQ) -## Database Schema +Oracle AQ is Oracle's native message-queuing subsystem. +`OracleDialectAdapter` uses **oracledb Thin mode** — no Oracle Instant Client +or native libraries are needed on the worker host. -The library automatically creates the necessary tables: +Each `channel` maps to an AQ queue created via `DBMS_AQADM`. The adapter: -### Queue Table (default: `taskiq_queue`) -```sql -CREATE TABLE taskiq_queue ( - id SERIAL PRIMARY KEY, - task_id UUID NOT NULL, - task_name VARCHAR NOT NULL, - message BYTEA NOT NULL, - labels JSONB, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); -``` +1. Creates the queue table + queue on first use (`ensure_queue()`), idempotently. +2. `notify()` enqueues a RAW message with `ENQ_IMMEDIATE` (committed before the + outer transaction returns, so workers see it right away). +3. `listen()` runs a `deqone()` loop with `DEQ_NO_WAIT` and sleeps 0.5 s between + empty polls. It exits cleanly when `_stop_event` is set. -### Results Table (default: `taskiq_results`) -```sql -CREATE TABLE taskiq_results ( - task_id UUID PRIMARY KEY, - result BYTEA, - is_err BOOLEAN DEFAULT FALSE, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); -``` +> **Thin mode requirement** — `broker_startup()` raises `RuntimeError` if +> `oracledb.is_thin_mode()` returns `False`. Do not call +> `oracledb.init_oracle_client()` in your application when using this adapter. -### Schedules Table (default: `taskiq_schedules`) -```sql -CREATE TABLE taskiq_schedules ( - id UUID PRIMARY KEY, - task_name VARCHAR(100) NOT NULL, - schedule JSONB NOT NULL, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); -``` +### MSSQL — Service Broker -## Performance Tips +SQL Server's Service Broker is a built-in, durable, transactional messaging +subsystem. `MSSQLDialectAdapter` uses `aioodbc` (async ODBC) and requires no +external broker process. -1. **Choose the Right Driver**: AsyncPG typically offers the best performance -2. **Connection Pooling**: Configure appropriate pool sizes for your workload -3. **Field Types**: Use UUID for high-performance task IDs, TEXT for debugging -4. **Indexes**: Consider adding indexes on frequently queried columns -5. **Connection Reuse**: Keep broker connections alive during application lifetime +Each `channel` maps to a pair of Service Broker objects created via +`ensure_queue()` (idempotent): -## Troubleshooting +- **Queue** — `taskiq_sb_` +- **Service** — `taskiq_svc_` -### Common Issues +One long-lived **dialog conversation** is opened per channel in +`worker_startup()` and reused for all subsequent `notify()` calls, avoiding +`BEGIN DIALOG` overhead on every `kick()`. -**Connection Errors** -```python -# Ensure your connection string is correct -dsn = "postgresql://username:password@host:port/database" +`listen()` runs a `WAITFOR (RECEIVE TOP(1) …), TIMEOUT 500` loop. A 500 ms +timeout means the worst-case shutdown delay is 500 ms; when `_stop_event` is +set the loop exits immediately on the next timeout. -# Check PostgreSQL is running and accessible -import asyncpg -conn = await asyncpg.connect(dsn) -await conn.close() ``` - -**Table Creation Issues** -```python -# Ensure user has CREATE TABLE permissions -# Or manually create tables using provided schemas +mssql+aioodbc://sa:Password1@localhost:1433/taskiq?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes ``` -**Driver Import Errors** -```bash -# Install the appropriate driver extra -pip install taskiq-postgresql[asyncpg] +> **Service Broker must be enabled** on the target database: +> ```sql +> ALTER DATABASE taskiq SET ENABLE_BROKER WITH ROLLBACK IMMEDIATE; +> ``` + +### Polling Fallback + +`PollingAdapter` is used for any dialect that has no native pub/sub (SQLite, etc.). + +``` +while not stop: + SELECT task_id FROM queue + WHERE channel = :channel + ORDER BY id + LIMIT 25 + FOR UPDATE SKIP LOCKED + → yield each task_id + sleep(poll_interval) # default: 1 s ``` -## Requirements +`FOR UPDATE SKIP LOCKED` ensures that multiple workers never claim the same row, +even under concurrent load. On SQLite (which lacks `SKIP LOCKED`) the adapter +falls back to a plain `DELETE … RETURNING` which is atomic at the SQLite WAL level. -- **Python**: 3.9+ -- **TaskIQ**: 0.11.7+ -- **PostgreSQL**: 10+ +`notify()` is a no-op for this adapter — `kick()` still inserts the row; workers +simply discover it on the next poll cycle. -### Driver Dependencies +--- -| Driver | Version | Extra | -|--------|---------|-------| -| AsyncPG | 0.30.0+ | `[asyncpg]` | -| Psycopg3 | 3.2.9+ | `[psycopg]` | -| PSQLPy | 0.11.3+ | `[psqlpy]` | +## Configuration Reference -## Development +### `SQLAlchemyManager` -This project uses modern Python development tools: +| Parameter | Type | Default | Description | +|---|---|---|---| +| `base_classes` | `Sequence[type]` | `()` | `DeclarativeBase` subclass(es) for table registration | +| `queue_cls` | `type[TaskiqQueueMixin]` | auto | Bring your own queue ORM class | +| `result_cls` | `type[TaskiqResultMixin]` | auto | Bring your own result ORM class | +| `schedule_cls` | `type[TaskiqScheduleMixin]` | auto | Bring your own schedule ORM class | -- **[UV](https://github.com/astral-sh/uv)**: Fast Python package installer and resolver -- **[Ruff](https://github.com/astral-sh/ruff)**: Extremely fast Python linter and formatter -- **[Pytest](https://pytest.org/)**: Testing framework +### `SQLAlchemyBroker` -### Setup Development Environment +| Parameter | Type | Default | Description | +|---|---|---|---| +| `manager` | `SQLAlchemyManager` | required | Bound manager | +| `channel_name` | `str` | `"taskiq"` | Logical queue channel name | +| `adapter` | `DialectAdapter \| None` | auto | Override the transport adapter | -```bash -# Clone the repository -git clone https://github.com/z22092/taskiq-postgresql.git -cd taskiq-postgresql +### `SQLAlchemyResultBackend` -# Install dependencies with UV -uv sync +| Parameter | Type | Default | Description | +|---|---|---|---| +| `manager` | `SQLAlchemyManager` | required | Bound manager | +| `keep_results` | `bool` | `True` | If `False`, the result row is deleted after the first `get_result()` call | +| `serializer` | `TaskiqSerializer` | `PickleSerializer()` | Swap in any `TaskiqSerializer` implementation | -# Install with all driver extras -uv sync --extra asyncpg --extra psycopg --extra psqlpy +--- -# Run tests -uv run pytest +## Running Tests -# Format and lint -uv run ruff format -uv run ruff check -``` +```bash +# Generic tests only (SQLite, no external services) +uv run pytest tests/ -m "not postgresql and not oracle" -## Contributing +# PostgreSQL tests (requires a running Postgres) +docker compose -f docker/docker-compose.yml up -d postgres +uv run pytest tests/ -m "postgresql" -Contributions are welcome! Please: +# Oracle tests (requires a running Oracle XE) +docker compose -f docker/docker-compose.yml up -d oracle +uv run pytest tests/ -m "oracle" -1. Fork the repository -2. Create a feature branch -3. Add tests for new functionality -4. Ensure all tests pass -5. Submit a pull request +# Everything at once +uv run pytest tests/ +``` -## License +--- -This project is licensed under the MIT License. See the LICENSE file for details. +## Requirements + +- Python 3.9+ +- TaskIQ 0.11.7+ +- SQLAlchemy 2.0+ +- anyio 4+ -## Links +--- + +## License -- **[TaskIQ Documentation](https://taskiq-python.github.io/)** - Main TaskIQ framework -- **[AsyncPG Documentation](https://magicstack.github.io/asyncpg/)** - AsyncPG driver docs -- **[Psycopg Documentation](https://www.psycopg.org/psycopg3/)** - Psycopg3 driver docs -- **[PostgreSQL Documentation](https://www.postgresql.org/docs/)** - PostgreSQL database docs -- **[GitHub Repository](https://github.com/z22092/taskiq-postgresql)** - Source code and issues +MIT — see [LICENSE](LICENSE). From c0bcf9db202d4a84ae841b75fd744ae3c8d5d37f Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 16:12:14 +0530 Subject: [PATCH 23/25] feat: Add UPSERT specializations for sqlite, postgres and mysql Closes: https://github.com/corridor/taskiq-sqlalchemy/issues/3 See: https://docs.sqlalchemy.org/en/20/orm/queryguide/dml.html#orm-upsert-statements --- taskiq_sqlalchemy/result_backend.py | 71 +++++++++++++++++++++-------- 1 file changed, 51 insertions(+), 20 deletions(-) diff --git a/taskiq_sqlalchemy/result_backend.py b/taskiq_sqlalchemy/result_backend.py index ad56cef..7d68184 100644 --- a/taskiq_sqlalchemy/result_backend.py +++ b/taskiq_sqlalchemy/result_backend.py @@ -8,12 +8,14 @@ import typing as t import sqlalchemy as sa +from sqlalchemy.sql.dml import Insert from taskiq import AsyncResultBackend from taskiq.abc.serializer import TaskiqSerializer from taskiq.result import TaskiqResult from taskiq.serializers.pickle import PickleSerializer from taskiq_sqlalchemy.manager import SQLAlchemyManager +from taskiq_sqlalchemy.models import TaskiqResultMixin _ReturnType = t.TypeVar("_ReturnType") @@ -36,9 +38,44 @@ def __init__( self.keep_results = keep_results self.serializer = serializer or PickleSerializer() - # ------------------------------------------------------------------ - # AsyncResultBackend protocol - # ------------------------------------------------------------------ + @classmethod + async def build_upsert_statement( + cls, + dialect: str, + result_cls: type[TaskiqResultMixin], + values: dict, + update: dict, + ) -> Insert: + if dialect == "postgresql": + from sqlalchemy.dialects.postgresql import insert # noqa: PLC0415 + + return ( + insert(result_cls) + .values(**values) + .on_conflict_do_update( + index_elements=[result_cls.task_id], + set_=update, + ) + ) + + if dialect == "sqlite": + from sqlalchemy.dialects.sqlite import insert # noqa: PLC0415 + + return ( + insert(result_cls) + .values(**values) + .on_conflict_do_update( + index_elements=[result_cls.task_id], + set_=update, + ) + ) + + if dialect == "mysql": + from sqlalchemy.dialects.mysql import insert # noqa: PLC0415 + + return insert(result_cls).values(**values).on_duplicate_key_update(**update) + + raise NotImplementedError(f"Upsert not supported for {dialect}") async def set_result(self, task_id: str, result: TaskiqResult[_ReturnType]) -> None: async_engine = self.manager.engine @@ -50,23 +87,17 @@ async def set_result(self, task_id: str, result: TaskiqResult[_ReturnType]) -> N # to a delete-then-insert, which is safe because set_result is # called at most once per task_id. dialect = async_engine.dialect.name - if dialect in ("postgresql",): - from sqlalchemy.dialects.postgresql import insert as pg_insert # noqa: PLC0415 - - stmt = ( - pg_insert(self.manager.result_cls) - .values( - task_id=task_id, - result=serialised, - is_err=result.is_err, - ) - .on_conflict_do_update( - index_elements=["task_id"], - set_={ - "result": serialised, - "is_err": result.is_err, - }, - ) + + result_cls = self.manager.result_cls + if dialect in ("postgresql", "sqlite", "myssql"): + stmt = await self.build_upsert_statement( + dialect, + result_cls, + values={"task_id": task_id, "result": serialised, "is_err": result.is_err}, + update={ + "result": serialised, + "is_err": result.is_err, + }, ) else: # Generic fallback: delete then insert (safe, task results are From c41df796d028f34425e814c97408dfcda20b5d91 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 20:35:19 +0530 Subject: [PATCH 24/25] feat: Add support for MySQL --- docker/docker-compose.yml | 21 ++++++++++++++++++++ pyproject.toml | 8 ++++---- taskiq_sqlalchemy/broker.py | 34 +++++++++++++++++++++++--------- tests/broker/conftest.py | 5 +++++ tests/result_backend/conftest.py | 5 +++++ uv.lock | 32 ++++++++++++++++++++++++++++-- 6 files changed, 90 insertions(+), 15 deletions(-) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 7a1ac1b..5c20ecf 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -77,10 +77,31 @@ services: networks: - default + myssql: + image: mysql:latest + restart: always + environment: + MYSQL_ROOT_PASSWORD: "S3cureP@ssw0rd" + MYSQL_DATABASE: taskiq + MYSQL_USER: taskiq_user + MYSQL_PASSWORD: "gOxN5hbl7geTwgvS" + volumes: + - mysql_data:/var/lib/mysql + ports: + - 3306:3306 + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + interval: 10s + retries: 10 + timeout: 3s + networks: + - default + volumes: postgres_data: oracle_data: mssql_data: + mysql_data: networks: default: driver: bridge diff --git a/pyproject.toml b/pyproject.toml index d4f2135..9a5b3fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,7 +37,7 @@ dependencies = ["taskiq>=0.11.7", 'sqlalchemy>=2', 'anyio>=4'] [project.optional-dependencies] all = [ - "taskiq_sqlalchemy[postgresql-asyncpg,postgresql-psycopg,sqlite-aiosqlite,oracle-oracledb,mssql-aioodbc]", + "taskiq_sqlalchemy[postgresql-asyncpg,postgresql-psycopg,sqlite-aiosqlite,oracle-oracledb,mssql-aioodbc,mysql-aiomysql]", ] postgresql-asyncpg = ["asyncpg"] @@ -45,6 +45,7 @@ postgresql-psycopg = ["psycopg[binary,pool]"] sqlite-aiosqlite = ["aiosqlite"] oracle-oracledb = ["oracledb"] mssql-aioodbc = ["aioodbc"] +mysql-aiomysql = ["aiomysql"] [project.urls] Homepage = "https://github.com/corridor/taskiq-sqlalchemy" @@ -70,6 +71,7 @@ dev = [ "psycopg", # PostgreSQL psycopg3 driver (optional in local dev) "oracledb", # OracleDB driver (optional in local dev) "aioodbc", # SQL Server driver (optional in local dev) + "aiomysql", # MySQL driver (optional in local dev) ] [tool.setuptools.packages.find] @@ -92,6 +94,7 @@ markers = [ "postgresql: tests that require a running PostgreSQL server", "oracle: tests that require a running OracleDB server", "mssql: tests that require a running SQL Server instance", + "mysql: tests that require a running MySQL server", ] [tool.ruff] @@ -152,9 +155,6 @@ ignore = [ [tool.ruff.lint.pydocstyle] convention = "google" -[tool.ruff.lint.flake8-bugbear] -extend-immutable-calls = ["taskiq_dependencies.Depends", "taskiq.TaskiqDepends"] - [tool.ruff.lint.flake8-tidy-imports.banned-api] "__future__.annotations".msg = "__future__.annotations is discouraged. See PEP-649 and PEP-749" diff --git a/taskiq_sqlalchemy/broker.py b/taskiq_sqlalchemy/broker.py index 342b531..78ac34d 100644 --- a/taskiq_sqlalchemy/broker.py +++ b/taskiq_sqlalchemy/broker.py @@ -98,15 +98,31 @@ async def _fetch_message(self, task_id: str) -> t.Optional[AckableMessage]: Fetch and deserialise one message, then delete it from the queue. """ async with self.manager.engine.begin() as conn: - result = await conn.execute( - sa.delete(self.manager.queue_cls) - .filter_by(task_id=task_id, channel=self.channel_name) - .returning(self.manager.queue_cls.message), - ) - row = result.first() - if row is None: - # Another worker already claimed it - return None + if self.manager.engine.dialect.name == "mysql": + # MySQL does not support DELETE .. RETURNING + # SELECT FOR UPDATE SKIP LOCKED atomically claims the row. + result = await conn.execute( + sa.select(self.manager.queue_cls) + .filter_by(task_id=task_id, channel=self.channel_name) + .with_for_update(skip_locked=True) + ) + row = result.first() + if row is None: + # Another worker already locked/deleted it + return None + await conn.execute( + sa.delete(self.manager.queue_cls).filter_by(task_id=task_id, channel=self.channel_name) + ) + else: + result = await conn.execute( + sa.delete(self.manager.queue_cls) + .filter_by(task_id=task_id, channel=self.channel_name) + .returning(self.manager.queue_cls.message) + ) + row = result.first() + if row is None: + # Another worker already claimed it + return None async def ack() -> None: # No-op: the row was already deleted when claimed. diff --git a/tests/broker/conftest.py b/tests/broker/conftest.py index 1ec99cd..0d683a2 100644 --- a/tests/broker/conftest.py +++ b/tests/broker/conftest.py @@ -72,6 +72,11 @@ def feed(self, task_id: str) -> None: id="mssql+aioodbc", marks=pytest.mark.mssql, ), + pytest.param( + "mysql+aiomysql://taskiq_user:gOxN5hbl7geTwgvS@localhost:3306/taskiq", + id="mysql+aiomysql", + marks=pytest.mark.mysql, + ), ] diff --git a/tests/result_backend/conftest.py b/tests/result_backend/conftest.py index 8f3e01a..4403662 100644 --- a/tests/result_backend/conftest.py +++ b/tests/result_backend/conftest.py @@ -36,6 +36,11 @@ id="mssql+aioodbc", marks=pytest.mark.mssql, ), + pytest.param( + "mysql+aiomysql://taskiq_user:gOxN5hbl7geTwgvS@localhost:3306/taskiq", + id="mysql+aiomysql", + marks=pytest.mark.mysql, + ), ] diff --git a/uv.lock b/uv.lock index 3436bec..1bebbdb 100644 --- a/uv.lock +++ b/uv.lock @@ -10,6 +10,18 @@ required-markers = [ "python_full_version == '3.10.*'", ] +[[package]] +name = "aiomysql" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pymysql" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/e0/302aeffe8d90853556f47f3106b89c16cc2ec2a4d269bdfd82e3f4ae12cc/aiomysql-0.3.2.tar.gz", hash = "sha256:72d15ef5cfc34c03468eb41e1b90adb9fd9347b0b589114bd23ead569a02ac1a", size = 108311, upload-time = "2025-10-22T00:15:21.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/af/aae0153c3e28712adaf462328f6c7a3c196a1c1c27b491de4377dd3e6b52/aiomysql-0.3.2-py3-none-any.whl", hash = "sha256:c82c5ba04137d7afd5c693a258bea8ead2aad77101668044143a991e04632eb2", size = 71834, upload-time = "2025-10-22T00:15:15.905Z" }, +] + [[package]] name = "aioodbc" version = "0.5.0" @@ -951,6 +963,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pymysql" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/ae/1fe3fcd9f959efa0ebe200b8de88b5a5ce3e767e38c7ac32fb179f16a388/pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03", size = 48258, upload-time = "2025-08-24T12:55:55.146Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9", size = 45300, upload-time = "2025-08-24T12:55:53.394Z" }, +] + [[package]] name = "pyodbc" version = "5.3.0" @@ -1314,6 +1335,7 @@ dependencies = [ [package.optional-dependencies] all = [ + { name = "aiomysql" }, { name = "aioodbc" }, { name = "aiosqlite" }, { name = "asyncpg" }, @@ -1323,6 +1345,9 @@ all = [ mssql-aioodbc = [ { name = "aioodbc" }, ] +mysql-aiomysql = [ + { name = "aiomysql" }, +] oracle-oracledb = [ { name = "oracledb" }, ] @@ -1338,6 +1363,7 @@ sqlite-aiosqlite = [ [package.dev-dependencies] dev = [ + { name = "aiomysql" }, { name = "aioodbc" }, { name = "aiosqlite" }, { name = "asyncpg" }, @@ -1357,6 +1383,7 @@ dev = [ [package.metadata] requires-dist = [ + { name = "aiomysql", marker = "extra == 'mysql-aiomysql'" }, { name = "aioodbc", marker = "extra == 'mssql-aioodbc'" }, { name = "aiosqlite", marker = "extra == 'sqlite-aiosqlite'" }, { name = "anyio", specifier = ">=4" }, @@ -1365,12 +1392,13 @@ requires-dist = [ { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgresql-psycopg'" }, { name = "sqlalchemy", specifier = ">=2" }, { name = "taskiq", specifier = ">=0.11.7" }, - { name = "taskiq-sqlalchemy", extras = ["postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb", "mssql-aioodbc"], marker = "extra == 'all'" }, + { name = "taskiq-sqlalchemy", extras = ["postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb", "mssql-aioodbc", "mysql-aiomysql"], marker = "extra == 'all'" }, ] -provides-extras = ["all", "postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb", "mssql-aioodbc"] +provides-extras = ["all", "postgresql-asyncpg", "postgresql-psycopg", "sqlite-aiosqlite", "oracle-oracledb", "mssql-aioodbc", "mysql-aiomysql"] [package.metadata.requires-dev] dev = [ + { name = "aiomysql" }, { name = "aioodbc" }, { name = "aiosqlite" }, { name = "asyncpg" }, From 83bc409d308495be00d14939ae4bcd7900435cd4 Mon Sep 17 00:00:00 2001 From: Ashwin A Nayar Date: Mon, 30 Mar 2026 20:38:04 +0530 Subject: [PATCH 25/25] chore: Add tests in CI for mssql --- .github/workflows/ci.yml | 51 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b39772a..9f70da4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -234,3 +234,54 @@ jobs: # files: coverage-mssql.xml # flags: mssql # fail_ci_if_error: false + + test-mysql: + name: "Tests · MySQL" + runs-on: ubuntu-latest + needs: lint + + steps: + - uses: actions/checkout@v6 + + - name: Start MySQL service + run: | + cd docker + docker compose up -d mysql + + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync --dev + + - name: Wait for MySQL to be healthy + run: | + cd docker + echo "Waiting for MySQL service to become healthy..." + for i in $(seq 1 60); do + if [ "$(docker compose ps mysql --format json | jq -r '.Health')" = "healthy" ]; then + echo "MySQL is healthy after ${i}s" + exit 0 + fi + sleep 2 + done + echo "MySQL did not become healthy within 120 seconds" + exit 1 + + - name: Run mysql tests + run: | + uv run pytest tests/ -v \ + -m "mysql" \ + --strict-markers --maxfail=10 \ + --cov=taskiq_sqlalchemy \ + --cov-report=xml:coverage-mysql.xml + + # - name: Upload coverage + # uses: codecov/codecov-action@v5 + # with: + # files: coverage-mysql.xml + # flags: mysql + # fail_ci_if_error: false