diff --git a/agent/src/testflinger_agent/client.py b/agent/src/testflinger_agent/client.py index 7242d5be9..8880deaf4 100644 --- a/agent/src/testflinger_agent/client.py +++ b/agent/src/testflinger_agent/client.py @@ -316,22 +316,49 @@ def post_log( ): """Post log data to the testflinger server for this job. - :param job_id - id for the job - :param log_input - Dataclass with all of the keys for the log endpoint - :param log_type - Enum of different log types the server accepts + :param job_id: id for the job + :param log_input: Dataclass with all of the keys for the log endpoint + :param log_type: Enum of different log types the server accepts """ endpoint = urljoin(self.server, f"/v1/result/{job_id}/log/{log_type}") + + # Define request success flags + request = None + + # TODO: Remove legacy endpoint support in future versions + # Define legacy_request success flag + legacy_request = None + + # Enum is "serial", for compatibility, define "serial_output" instead + suffix = ( + "serial_output" + if log_type == LogType.SERIAL_OUTPUT + else log_type.value + ) + legacy_endpoint = urljoin(self.server, f"/v1/result/{job_id}/{suffix}") + # Prioritize writing to legacy endpoint try: - job_request = self.session.post( + legacy_request = self.session.post( + legacy_endpoint, + data=log_input.log_data.encode("utf-8"), + timeout=60, + ) + except requests.exceptions.RequestException as exc: + logger.error(exc) + logger.info("Fallback to new log endpoint") + + # Write logs to new endpoint + try: + request = self.session.post( endpoint, json=asdict(log_input), timeout=60 ) except requests.exceptions.RequestException as exc: logger.error(exc) - return False - return bool(job_request) + + # Return True if either request was successful + return any( + req is not None and req.ok for req in (legacy_request, request) + ) def post_advertised_queues(self): """Post the list of advertised queues to testflinger server.""" diff --git a/agent/uv.lock b/agent/uv.lock index b8314b869..db56ea8ab 100644 --- a/agent/uv.lock +++ b/agent/uv.lock @@ -1653,7 +1653,7 @@ test = [{ name = "testflinger-device-connectors", directory = "../device-connect [[package]] name = "testflinger-common" -version = "1.1.3" +version = "1.1.4" source = { directory = "../common" } dependencies = [ { name = "strenum" }, diff --git a/cli/testflinger_cli/__init__.py b/cli/testflinger_cli/__init__.py index 58eac398c..592583ee4 100644 --- a/cli/testflinger_cli/__init__.py +++ b/cli/testflinger_cli/__init__.py @@ -1089,6 +1089,7 @@ def do_poll( if job_state == "waiting": print("This job is waiting on a node to become available.") cur_fragment = start_fragment + consecutive_empty_polls = 0 while True: try: job_state = self.get_job_state(job_id) @@ -1117,8 +1118,16 @@ def do_poll( f"of it in the queue are complete" ) elif last_fragment_number < 0: - print("Waiting on output...") + consecutive_empty_polls += 1 + # Only show message after 90 seconds of no output + # Indicates the CLI is actively attempting to poll data + if consecutive_empty_polls == 9: + print("Waiting on output...", file=sys.stderr) else: + # Reset counter when we get data + consecutive_empty_polls = 0 + + # Print the retrieved log data print(log_data, end="", flush=True) cur_fragment = last_fragment_number + 1 time.sleep(10) diff --git a/cli/tests/test_cli.py b/cli/tests/test_cli.py index 21ed7ce2f..c170935e2 100644 --- a/cli/tests/test_cli.py +++ b/cli/tests/test_cli.py @@ -22,6 +22,7 @@ import re import sys import tarfile +import time import uuid from http import HTTPStatus from pathlib import Path @@ -33,8 +34,8 @@ import testflinger_cli from testflinger_cli.client import HTTPError -from testflinger_cli.errors import AuthorizationError, NetworkError from testflinger_cli.enums import LogType +from testflinger_cli.errors import NetworkError URL = "https://testflinger.canonical.com" @@ -1193,8 +1194,6 @@ def test_live_polling_with_fragments_progression( capsys, requests_mock, monkeypatch ): """Test live polling uses cur_fragment and progresses through fragments.""" - import time - job_id = str(uuid.uuid1()) # Mock time.sleep @@ -1275,3 +1274,51 @@ def combined_log_wrapper_2( # Should have slept between iterations assert len(sleep_calls) >= 2 + +def test_live_polling_with_empty_poll(capsys, requests_mock, monkeypatch): + """Test that live output handles empty polls correctly.""" + job_id = str(uuid.uuid1()) + + sleep_calls = [] + monkeypatch.setattr( + time, "sleep", lambda duration: sleep_calls.append(duration) + ) + + # Mock job status + requests_mock.get( + f"{URL}/v1/result/{job_id}", + 10 * [{"json": {"job_state": "active"}}] + + [{"json": {"job_state": "complete"}}], + ) + + # Mock log output with 10 empty responses + requests_mock.get( + f"{URL}/v1/result/{job_id}/log/output", + 10 + * [ + { + "json": { + "output": { + "test": {"last_fragment_number": -1, "log_data": ""} + } + } + } + ] + + [ + { + "json": { + "output": { + "test": {"last_fragment_number": 0, "log_data": "data"} + } + } + } + ], + ) + + sys.argv = ["", "poll", job_id] + tfcli = testflinger_cli.TestflingerCli() + tfcli.run() + + captured = capsys.readouterr() + assert "Waiting on output..." in captured.err + assert len(sleep_calls) >= 9 diff --git a/common/pyproject.toml b/common/pyproject.toml index dba617cd3..3c4f94b53 100644 --- a/common/pyproject.toml +++ b/common/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "uv_build" name = "testflinger-common" description = "Testflinger common modules" readme = "README.md" -version = "1.1.3" +version = "1.1.4" requires-python = ">=3.10" dependencies = ["strenum>=0.4.15"] diff --git a/common/uv.lock b/common/uv.lock index 053fbcb1f..24fe6194d 100644 --- a/common/uv.lock +++ b/common/uv.lock @@ -114,7 +114,7 @@ wheels = [ [[package]] name = "testflinger-common" -version = "1.1.3" +version = "1.1.4" source = { editable = "." } dependencies = [ { name = "strenum" }, diff --git a/server/src/testflinger/api/schemas.py b/server/src/testflinger/api/schemas.py index 5597d2826..aea933265 100644 --- a/server/src/testflinger/api/schemas.py +++ b/server/src/testflinger/api/schemas.py @@ -597,3 +597,71 @@ class SecretIn(Schema): """Secret input schema.""" value = fields.String(required=True) + + +class ResultLegacy(Schema): + """Legacy Result Post schema for backwards compatibility.""" + + # TODO: Remove this schema after deprecating legacy endpoints + setup_status = fields.Integer(required=False) + setup_output = fields.String(required=False) + setup_serial = fields.String(required=False) + provision_status = fields.Integer(required=False) + provision_output = fields.String(required=False) + provision_serial = fields.String(required=False) + firmware_update_status = fields.Integer(required=False) + firmware_update_output = fields.String(required=False) + firmware_update_serial = fields.String(required=False) + test_status = fields.Integer(required=False) + test_output = fields.String(required=False) + test_serial = fields.String(required=False) + allocate_status = fields.Integer(required=False) + allocate_output = fields.String(required=False) + allocate_serial = fields.String(required=False) + reserve_status = fields.Integer(required=False) + reserve_output = fields.String(required=False) + reserve_serial = fields.String(required=False) + cleanup_status = fields.Integer(required=False) + cleanup_output = fields.String(required=False) + cleanup_serial = fields.String(required=False) + device_info = fields.Dict(required=False) + job_state = fields.String(required=False) + + +class ResultSchema(OneOfSchema): + """Polymorphic schema for posting results in new and legacy formats.""" + + type_schemas = { + "new": ResultPost, + "legacy": ResultLegacy, + } + + def get_obj_type(self, obj): + """Get object type depending on which schema is correctly parsed.""" + return self.get_data_type(obj) + + def get_data_type(self, data): + """Get schema type depending on which schema is correctly parsed.""" + # Try legacy first + try: + ResultLegacy().load(data) + return "legacy" + except ValidationError: + # If legacy fails, try new format + try: + ResultPost().load(data) + return "new" + except ValidationError as err: + # Re-raise the last validation error with more context + raise ValidationError( + "Invalid result data schema. " + f"Data does not match either legacy or new format: {err}" + ) from err + + def _dump(self, obj, **kwargs): + result = super()._dump(obj, **kwargs) + # Parent dump injects the type field: + # result[self.type_field] = self.get_obj_type(obj) + # So we need to remove it + result.pop(self.type_field) + return result diff --git a/server/src/testflinger/api/v1.py b/server/src/testflinger/api/v1.py index a401ea7e0..508e3b6c5 100644 --- a/server/src/testflinger/api/v1.py +++ b/server/src/testflinger/api/v1.py @@ -403,15 +403,21 @@ def to_url(self, obj): @v1.get("/result//log/") @v1.output(schemas.LogGet) def log_get(job_id: str, log_type: LogType): - """Get logs for a specified job_id.""" + """Get logs for a specified job_id. + + :param job_id: UUID as a string for the job + :param log_type: LogType enum value for the type of log requested + :raises HTTPError: If the job_id is not a valid UUID or if invalid query + :return: Dictionary with log data + """ args = request.args if not check_valid_uuid(job_id): - abort(400, message="Invalid job id\n") + abort(HTTPStatus.BAD_REQUEST, message="Invalid job id\n") query_schema = schemas.LogQueryParams() try: query_params = query_schema.load(args) except ValidationError as err: - abort(400, message=err.messages) + abort(HTTPStatus.BAD_REQUEST, message=err.messages) start_fragment = query_params.get("start_fragment", 0) start_timestamp = query_params.get("start_timestamp") phase = query_params.get("phase") @@ -439,10 +445,16 @@ def log_get(job_id: str, log_type: LogType): @v1.post("/result//log/") @v1.input(schemas.LogPost, location="json") -def log_post(job_id: str, log_type: LogType, json_data: dict): - """Post logs for a specified job ID.""" +def log_post(job_id: str, log_type: LogType, json_data: dict) -> str: + """Post logs for a specified job ID. + + :param job_id: UUID as a string for the job + :param log_type: LogType enum value for the type of log being posted + :raises HTTPError: If the job_id is not a valid UUID + :param json_data: Dictionary with log data + """ if not check_valid_uuid(job_id): - abort(400, message="Invalid job_id specified") + abort(HTTPStatus.BAD_REQUEST, message="Invalid job_id specified") log_fragment = LogFragment( job_id, log_type, @@ -457,15 +469,21 @@ def log_post(job_id: str, log_type: LogType, json_data: dict): @v1.post("/result/") -@v1.input(schemas.ResultPost, location="json") -def result_post(job_id, json_data): +@v1.input(schemas.ResultSchema, location="json") +def result_post(job_id: str, json_data: dict) -> str: """Post a result for a specified job_id. - :param job_id: - UUID as a string for the job + :param job_id: UUID as a string for the job + :raises HTTPError: If the job_id is not a valid UUID """ if not check_valid_uuid(job_id): - abort(400, message="Invalid job_id specified") + abort(HTTPStatus.BAD_REQUEST, message="Invalid job_id specified") + + # fail if input payload is larger than the BSON size limit + # https://www.mongodb.com/docs/manual/reference/limits/ + content_length = request.content_length + if content_length and content_length >= 16 * 1024 * 1024: + abort(HTTPStatus.REQUEST_ENTITY_TOO_LARGE, message="Payload too large") database.add_job_results(job_id, json_data) return "OK" @@ -473,19 +491,26 @@ def result_post(job_id, json_data): @v1.get("/result/") @v1.output(schemas.ResultGet) -def result_get(job_id): +def result_get(job_id: str): """Return results for a specified job_id. - :param job_id: - UUID as a string for the job + :param job_id: UUID as a string for the job + :raises HTTPError: If the job_id is not a valid UUID """ if not check_valid_uuid(job_id): - abort(400, message="Invalid job_id specified") + abort(HTTPStatus.BAD_REQUEST, message="Invalid job_id specified") response = database.get_job_results(job_id) if not response or not (result_data := response.get("result_data")): - return "", 204 + return "", HTTPStatus.NO_CONTENT + + if any(key.endswith(("_output", "_serial")) for key in result_data.keys()): + # Legacy result format detected; return as-is + # TODO: Remove this path after deprecating legacy endpoints + return result_data + + # Reconstruct result format with logs and phase statuses log_handler = MongoLogHandler(database.mongo) result_logs = { phase + "_" + log_type: log_data @@ -1219,3 +1244,89 @@ def secrets_delete(client_id, path): abort(HTTPStatus.INTERNAL_SERVER_ERROR, message=str(error)) return "OK" + + +@v1.get("/result//output") +def legacy_output_get(job_id: str) -> str: + """Legacy endpoint to get job output for a specified job_id. + + TODO: Remove after CLI/agent migration completes. + + :param job_id: UUID as a string for the job + :raises HTTPError: BAD_REQUEST when job_id is invalid + :return: Plain text output + """ + if not check_valid_uuid(job_id): + abort(HTTPStatus.BAD_REQUEST, message="Invalid job_id specified") + response = database.mongo.db.output.find_one_and_delete( + {"job_id": job_id}, {"_id": False} + ) + output = response.get("output", []) if response else None + if output: + return "\n".join(output) + return "", HTTPStatus.NO_CONTENT + + +@v1.post("/result//output") +def legacy_output_post(job_id: str) -> str: + """Legacy endpoint to post output for a specified job_id. + + TODO: Remove after CLI/agent migration completes. + + :param job_id: UUID as a string for the job + :raises HTTPError: BAD_REQUEST when job_id is invalid + :return: "OK" on success + """ + if not check_valid_uuid(job_id): + abort(HTTPStatus.BAD_REQUEST, message="Invalid job_id specified") + data = request.get_data().decode("utf-8") + timestamp = datetime.now(timezone.utc) + database.mongo.db.output.update_one( + {"job_id": job_id}, + {"$set": {"updated_at": timestamp}, "$push": {"output": data}}, + upsert=True, + ) + return "OK" + + +@v1.get("/result//serial_output") +def legacy_serial_output_get(job_id: str) -> str: + """Legacy endpoint to get latest serial output for a specified job ID. + + TODO: Remove after CLI/agent migration completes. + + :param job_id: UUID as a string for the job + :raises HTTPError: BAD_REQUEST when job_id is invalid + :return: Plain text serial output + """ + if not check_valid_uuid(job_id): + abort(HTTPStatus.BAD_REQUEST, message="Invalid job_id specified") + response = database.mongo.db.serial_output.find_one_and_delete( + {"job_id": job_id}, {"_id": False} + ) + output = response.get("serial_output", []) if response else None + if output: + return "\n".join(output) + return "", HTTPStatus.NO_CONTENT + + +@v1.post("/result//serial_output") +def legacy_serial_output_post(job_id: str) -> str: + """Legacy endpoint to post serial output for a specified job ID. + + TODO: Remove after CLI/agent migration completes. + + :param job_id: UUID as a string for the job + :raises HTTPError: BAD_REQUEST when job_id is invalid + :return: "OK" on success + """ + if not check_valid_uuid(job_id): + abort(HTTPStatus.BAD_REQUEST, message="Invalid job_id specified") + data = request.get_data().decode("utf-8") + timestamp = datetime.now(timezone.utc) + database.mongo.db.serial_output.update_one( + {"job_id": job_id}, + {"$set": {"updated_at": timestamp}, "$push": {"serial_output": data}}, + upsert=True, + ) + return "OK" diff --git a/server/src/testflinger/database.py b/server/src/testflinger/database.py index 434863636..f3f383133 100644 --- a/server/src/testflinger/database.py +++ b/server/src/testflinger/database.py @@ -90,6 +90,11 @@ def create_indexes(): "updated_at", expireAfterSeconds=OUTPUT_EXPIRATION ) + # Remove logs after 7 days + mongo.db.logs.create_index( + "updated_at", expireAfterSeconds=DEFAULT_EXPIRATION + ) + # Remove artifacts after 7 days mongo.db.fs.chunks.create_index( "uploadDate", expireAfterSeconds=DEFAULT_EXPIRATION diff --git a/server/tests/test_results.py b/server/tests/test_results.py index 04c2b6531..695c4a13c 100644 --- a/server/tests/test_results.py +++ b/server/tests/test_results.py @@ -16,8 +16,10 @@ """Unit tests for Testflinger v1 API results endpoint.""" from datetime import datetime, timezone +from http import HTTPStatus from io import BytesIO +import pytest from testflinger_common.enums import LogType, TestPhase @@ -135,3 +137,126 @@ def test_job_get_result_no_data(mongo_app): output = app.get(job_url) assert 204 == output.status_code assert "" == output.text + + +def test_result_post_legacy_format(mongo_app): + """Test posting results in legacy format succeeds.""" + app, _ = mongo_app + newjob = app.post("/v1/job", json={"job_queue": "test"}) + job_id = newjob.json.get("job_id") + + # Lagacy format uses individual fields for each test phase + legacy_data = { + "setup_status": 0, + "setup_output": "Setup completed successfully.", + "test_status": 0, + "test_output": "All tests passed.", + "device_info": {"agent_name": "test", "device_ip": "1.1.1.1"}, + } + response = app.post(f"/v1/result/{job_id}", json=legacy_data) + assert "OK" == response.text + + +def test_result_post_invalid_format(mongo_app): + """Test posting results with invalid format fails due to ResultSchema.""" + app, _ = mongo_app + newjob = app.post("/v1/job", json={"job_queue": "test"}) + job_id = newjob.json.get("job_id") + + # Using an invalid field to fail validation + invalid_data = { + "invalid_field": 123, + } + response = app.post(f"/v1/result/{job_id}", json=invalid_data) + assert "Validation error" in response.text + assert HTTPStatus.UNPROCESSABLE_ENTITY == response.status_code + + +def test_result_get_legacy_format(mongo_app): + """Test getting results posted in legacy format.""" + app, _ = mongo_app + newjob = app.post("/v1/job", json={"job_queue": "test"}) + job_id = newjob.json.get("job_id") + + legacy_data = { + "setup_status": 0, + "setup_output": "Setup completed successfully.", + "test_status": 0, + "test_output": "All tests passed.", + "device_info": {"agent_name": "test", "device_ip": "1.1.1.1"}, + } + + # Post Legacy data to job results first + app.post(f"/v1/result/{job_id}", json=legacy_data) + + # Retrieve results and verify it matches what was posted + response = app.get(f"/v1/result/{job_id}") + result_data = response.json + for key, value in legacy_data.items(): + assert result_data[key] == value + + +def test_result_post_payload_too_large(mongo_app, monkeypatch): + """Test posting large payloads to results endpoint fails.""" + app, _ = mongo_app + newjob = app.post("/v1/job", json={"job_queue": "test"}) + job_id = newjob.json.get("job_id") + + large_data = { + "setup_status": 0, + "setup_output": "x" * (17 * 1024 * 1024), + "device_info": {"test": "device"}, + } + + response = app.post( + f"/v1/result/{job_id}", + json=large_data, + ) + assert "Payload too large" in response.text + assert HTTPStatus.REQUEST_ENTITY_TOO_LARGE == response.status_code + + +@pytest.mark.parametrize( + "endpoint", + [ + "/v1/result/INVALID_UUID/log/output", + "/v1/result/INVALID_UUID/output", + "/v1/result/INVALID_UUID/serial_output", + ], +) +def test_invalid_uuid_get_endpoints(mongo_app, endpoint): + """Test that GET endpoints reject invalid UUIDs.""" + app, _ = mongo_app + response = app.get(endpoint) + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert "Invalid job" in response.text + + +@pytest.mark.parametrize( + "endpoint,data", + [ + ( + "/v1/result/INVALID_UUID/log/output", + { + "fragment_number": 1, + "timestamp": "2023-01-01T00:00:00Z", + "phase": "test", + "log_data": "test", + }, + ), + ("/v1/result/INVALID_UUID/output", None), + ("/v1/result/INVALID_UUID/serial_output", None), + ], +) +def test_invalid_uuid_post_endpoints(mongo_app, endpoint, data): + """Test that POST endpoints reject invalid UUIDs.""" + app, _ = mongo_app + + if data: + response = app.post(endpoint, json=data) + else: + response = app.post(endpoint, data="test") + + assert response.status_code == HTTPStatus.BAD_REQUEST + assert "Invalid job" in response.text or "Invalid job_id" in response.text diff --git a/server/uv.lock b/server/uv.lock index a704c6cd9..ad50db747 100644 --- a/server/uv.lock +++ b/server/uv.lock @@ -836,6 +836,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d", size = 584358, upload-time = "2025-08-07T13:18:23.708Z" }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5", size = 1113550, upload-time = "2025-08-07T13:42:37.467Z" }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f", size = 1137126, upload-time = "2025-08-07T13:18:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7", size = 1544904, upload-time = "2025-11-04T12:42:04.763Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8", size = 1611228, upload-time = "2025-11-04T12:42:08.423Z" }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c", size = 298654, upload-time = "2025-08-07T13:50:00.469Z" }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, @@ -845,6 +847,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, @@ -854,6 +858,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, @@ -863,6 +869,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, @@ -870,6 +878,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681", size = 1680760, upload-time = "2025-11-04T12:42:25.341Z" }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, ] @@ -1986,7 +1996,7 @@ dev = [ [[package]] name = "testflinger-common" -version = "1.1.3" +version = "1.1.4" source = { editable = "../common" } dependencies = [ { name = "strenum" },