From cc20641dedccf0a01fb277494fb3c9a225ac650c Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Wed, 6 Aug 2025 00:34:49 +0000 Subject: [PATCH 01/15] change tests from respx mocks to pytest-httpserver --- pyproject.toml | 1 + tests/integration/__init__.py | 0 tests/unit/__init__.py | 0 tests/unit/conftest.py | 31 ++ tests/unit/logging_old.py | 437 ++++++++++++++++++++++++ tests/unit/test_client_errors.py | 32 +- tests/unit/test_client_request_queue.py | 46 +-- tests/unit/test_client_timeouts.py | 95 ++++-- tests/unit/test_logging.py | 280 +++++++-------- uv.lock | 28 +- 10 files changed, 732 insertions(+), 218 deletions(-) create mode 100644 tests/integration/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/conftest.py create mode 100644 tests/unit/logging_old.py diff --git a/pyproject.toml b/pyproject.toml index 9d429449..5309aa78 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ dev = [ "pytest-timeout>=2.4.0", "pytest-xdist~=3.8.0", "pytest~=8.4.0", + "pytest-httpserver>=1.1.3", "redbaron~=0.9.0", "respx~=0.22.0", "ruff~=0.12.0", diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 00000000..aea69820 --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,31 @@ +from collections.abc import Iterable, Iterator +from logging import getLogger + +import pytest +from pytest_httpserver import HTTPServer + + +@pytest.fixture(scope='session') +def make_httpserver() -> Iterable[HTTPServer]: + werkzeug_logger = getLogger('werkzeug') + werkzeug_logger.disabled = True + + server = HTTPServer(threaded=True) + server.start() + yield server + server.clear() # type: ignore[no-untyped-call] + if server.is_running(): + server.stop() # type: ignore[no-untyped-call] + + +@pytest.fixture(scope='session') +def httpserver(make_httpserver: HTTPServer) -> HTTPServer: + return make_httpserver + + +@pytest.fixture +def patch_basic_url(httpserver: HTTPServer, monkeypatch: pytest.MonkeyPatch) -> Iterator[None]: + server_url = httpserver.url_for('/').removesuffix('/') + monkeypatch.setattr('apify_client.client.DEFAULT_API_URL', server_url) + yield + monkeypatch.undo() diff --git a/tests/unit/logging_old.py b/tests/unit/logging_old.py new file mode 100644 index 00000000..19dd6ede --- /dev/null +++ b/tests/unit/logging_old.py @@ -0,0 +1,437 @@ +import asyncio +import json +import logging +import threading +import time +from collections.abc import AsyncIterator, Generator, Iterator +from datetime import datetime, timedelta +from unittest.mock import patch + +import httpx +import pytest +import respx +from _pytest.logging import LogCaptureFixture +from apify_shared.consts import ActorJobStatus + +from apify_client import ApifyClient, ApifyClientAsync +from apify_client._logging import RedirectLogFormatter +from apify_client.clients.resource_clients.log import StatusMessageWatcher, StreamedLog + +_MOCKED_API_URL = '/logging' +_MOCKED_RUN_ID = 'mocked_run_id' +_MOCKED_ACTOR_NAME = 'mocked_actor_name' +_MOCKED_ACTOR_ID = 'mocked_actor_id' +_MOCKED_ACTOR_LOGS = ( + b'2025-05-13T07:24:12.588Z ACTOR: Pulling Docker image of build.\n' + b'2025-05-13T07:24:12.686Z ACTOR: Creating Docker container.\n' + b'2025-05-13T07:24:12.745Z ACTOR: Starting Docker container.\n', # Several logs merged into one chunk + b'2025-05-13T07:26:14.132Z [apify] DEBUG \xc3', # Chunked log split in the middle of the multibyte character + b'\xa1\n', # part 2 + b'2025-05-13T07:24:14.132Z [apify] INFO multiline \n log\n', + b'2025-05-13T07:25:14.132Z [apify] WARNING some warning\n', + b'2025-05-13T07:26:14.132Z [apify] DEBUG c\n', + b'2025-05-13T0', # Chunked log that got split in the marker + b'7:26:14.132Z [apify] DEBUG d\n' # part 2 + b'2025-05-13T07:27:14.132Z [apify] DEB', # Chunked log that got split outside of marker + b'UG e\n', # part 2 + # Already redirected message + b'2025-05-13T07:28:14.132Z [apify.redirect-logger runId:4U1oAnKau6jpzjUuA] -> 2025-05-13T07:27:14.132Z ACTOR:...\n', +) +_EXISTING_LOGS_BEFORE_REDIRECT_ATTACH = 3 + +_EXPECTED_MESSAGES_AND_LEVELS = ( + ('2025-05-13T07:24:12.588Z ACTOR: Pulling Docker image of build.', logging.INFO), + ('2025-05-13T07:24:12.686Z ACTOR: Creating Docker container.', logging.INFO), + ('2025-05-13T07:24:12.745Z ACTOR: Starting Docker container.', logging.INFO), + ('2025-05-13T07:26:14.132Z [apify] DEBUG á', logging.DEBUG), + ('2025-05-13T07:24:14.132Z [apify] INFO multiline \n log', logging.INFO), + ('2025-05-13T07:25:14.132Z [apify] WARNING some warning', logging.WARNING), + ('2025-05-13T07:26:14.132Z [apify] DEBUG c', logging.DEBUG), + ('2025-05-13T07:26:14.132Z [apify] DEBUG d', logging.DEBUG), + ('2025-05-13T07:27:14.132Z [apify] DEBUG e', logging.DEBUG), + ( + '2025-05-13T07:28:14.132Z [apify.redirect-logger runId:4U1oAnKau6jpzjUuA] -> ' + '2025-05-13T07:27:14.132Z ACTOR:...', + logging.INFO, + ), +) + +_EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES = ( + ('Status: RUNNING, Message: Initial message', logging.INFO), + *_EXPECTED_MESSAGES_AND_LEVELS, + ('Status: RUNNING, Message: Another message', logging.INFO), + ('Status: SUCCEEDED, Message: Final message', logging.INFO), +) + + +@pytest.fixture +def mock_api() -> None: + test_server_lock = threading.Lock() + + def get_responses() -> Generator[httpx.Response, None, None]: + """Simulate actor run that changes status 3 times.""" + for _ in range(5): + yield httpx.Response( + content=json.dumps( + { + 'data': { + 'id': _MOCKED_RUN_ID, + 'actId': _MOCKED_ACTOR_ID, + 'status': ActorJobStatus.RUNNING, + 'statusMessage': 'Initial message', + 'isStatusMessageTerminal': False, + } + } + ), + status_code=200, + ) + for _ in range(5): + yield httpx.Response( + content=json.dumps( + { + 'data': { + 'id': _MOCKED_RUN_ID, + 'actId': _MOCKED_ACTOR_ID, + 'status': ActorJobStatus.RUNNING, + 'statusMessage': 'Another message', + 'isStatusMessageTerminal': False, + } + } + ), + status_code=200, + ) + while True: + yield httpx.Response( + content=json.dumps( + { + 'data': { + 'id': _MOCKED_RUN_ID, + 'actId': _MOCKED_ACTOR_ID, + 'status': ActorJobStatus.SUCCEEDED, + 'statusMessage': 'Final message', + 'isStatusMessageTerminal': True, + } + } + ), + status_code=200, + ) + + responses = get_responses() + + def actor_runs_side_effect(_: httpx.Request) -> httpx.Response: + test_server_lock.acquire() + # To avoid multiple threads accessing at the same time and causing `ValueError: generator already executing` + response = next(responses) + test_server_lock.release_lock() + return response + + respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}').mock(side_effect=actor_runs_side_effect) + + respx.get(url=f'{_MOCKED_API_URL}/v2/acts/{_MOCKED_ACTOR_ID}').mock( + return_value=httpx.Response(content=json.dumps({'data': {'name': _MOCKED_ACTOR_NAME}}), status_code=200) + ) + + respx.post(url=f'{_MOCKED_API_URL}/v2/acts/{_MOCKED_ACTOR_ID}/runs').mock( + return_value=httpx.Response(content=json.dumps({'data': {'id': _MOCKED_RUN_ID}}), status_code=200) + ) + + +@pytest.fixture +def mock_api_async(mock_api: None) -> None: # noqa: ARG001, fixture + class AsyncByteStream(httpx._types.AsyncByteStream): + async def __aiter__(self) -> AsyncIterator[bytes]: + for i in _MOCKED_ACTOR_LOGS: + yield i + await asyncio.sleep(0.01) + + async def aclose(self) -> None: + pass + + respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}/log?stream=1&raw=1').mock( + return_value=httpx.Response(stream=AsyncByteStream(), status_code=200) + ) + + +@pytest.fixture +def mock_api_sync(mock_api: None) -> None: # noqa: ARG001, fixture + class SyncByteStream(httpx._types.SyncByteStream): + def __iter__(self) -> Iterator[bytes]: + for i in _MOCKED_ACTOR_LOGS: + yield i + time.sleep(0.01) + + def close(self) -> None: + pass + + respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}/log?stream=1&raw=1').mock( + return_value=httpx.Response(stream=SyncByteStream(), status_code=200) + ) + + +@pytest.fixture +def propagate_stream_logs() -> None: + # Enable propagation of logs to the caplog fixture + StreamedLog._force_propagate = True + StatusMessageWatcher._force_propagate = True + logging.getLogger(f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}').setLevel(logging.DEBUG) + + +@pytest.fixture +def reduce_final_timeout_for_status_message_redirector() -> None: + """Reduce timeout used by the `StatusMessageWatcher` + + This timeout makes sense on the platform, but in tests it is better to reduce it to speed up the tests. + """ + StatusMessageWatcher._final_sleep_time_s = 2 + + +@pytest.mark.parametrize( + ('log_from_start', 'expected_log_count'), + [ + (True, len(_EXPECTED_MESSAGES_AND_LEVELS)), + (False, len(_EXPECTED_MESSAGES_AND_LEVELS) - _EXISTING_LOGS_BEFORE_REDIRECT_ATTACH), + ], +) +@respx.mock +async def test_redirected_logs_async( + *, + caplog: LogCaptureFixture, + mock_api_async: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + log_from_start: bool, + expected_log_count: int, +) -> None: + """Test that redirected logs are formatted correctly.""" + + run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + + with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: + # Mock `now()` so that it has timestamp bigger than the first 3 logs + mocked_datetime.now.return_value = datetime.fromisoformat('2025-05-13T07:24:14.132+00:00') + streamed_log = await run_client.get_streamed_log(from_start=log_from_start) + + # Set `propagate=True` during the tests, so that caplog can see the logs.. + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + + with caplog.at_level(logging.DEBUG, logger=logger_name): + async with streamed_log: + # Do stuff while the log from the other Actor is being redirected to the logs. + await asyncio.sleep(2) + + # Ensure logs are propagated + assert {(record.message, record.levelno) for record in caplog.records} == set( + _EXPECTED_MESSAGES_AND_LEVELS[-expected_log_count:] + ) + + +@pytest.mark.parametrize( + ('log_from_start', 'expected_log_count'), + [ + (True, len(_EXPECTED_MESSAGES_AND_LEVELS)), + (False, len(_EXPECTED_MESSAGES_AND_LEVELS) - _EXISTING_LOGS_BEFORE_REDIRECT_ATTACH), + ], +) +@respx.mock +def test_redirected_logs_sync( + *, + caplog: LogCaptureFixture, + mock_api_sync: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + log_from_start: bool, + expected_log_count: int, +) -> None: + """Test that redirected logs are formatted correctly.""" + + run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + + with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: + # Mock `now()` so that it has timestamp bigger than the first 3 logs + mocked_datetime.now.return_value = datetime.fromisoformat('2025-05-13T07:24:14.132+00:00') + streamed_log = run_client.get_streamed_log(from_start=log_from_start) + + # Set `propagate=True` during the tests, so that caplog can see the logs.. + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + + with caplog.at_level(logging.DEBUG, logger=logger_name), streamed_log: + # Do stuff while the log from the other Actor is being redirected to the logs. + time.sleep(2) + + # Ensure logs are propagated + assert {(record.message, record.levelno) for record in caplog.records} == set( + _EXPECTED_MESSAGES_AND_LEVELS[-expected_log_count:] + ) + + +@respx.mock +async def test_actor_call_redirect_logs_to_default_logger_async( + caplog: LogCaptureFixture, + mock_api_async: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture +) -> None: + """Test that logs are redirected correctly to the default logger. + + Caplog contains logs before formatting, so formatting is not included in the test expectations.""" + logger_name = f'apify.{_MOCKED_ACTOR_NAME} runId:{_MOCKED_RUN_ID}' + logger = logging.getLogger(logger_name) + actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + await actor_client.call() + + # Ensure expected handler and formater + assert isinstance(logger.handlers[0].formatter, RedirectLogFormatter) + assert isinstance(logger.handlers[0], logging.StreamHandler) + + # Ensure logs are propagated + assert {(record.message, record.levelno) for record in caplog.records} == set( + _EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES + ) + + +@respx.mock +def test_actor_call_redirect_logs_to_default_logger_sync( + caplog: LogCaptureFixture, + mock_api_sync: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture +) -> None: + """Test that logs are redirected correctly to the default logger. + + Caplog contains logs before formatting, so formatting is not included in the test expectations.""" + logger_name = f'apify.{_MOCKED_ACTOR_NAME} runId:{_MOCKED_RUN_ID}' + logger = logging.getLogger(logger_name) + actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + actor_client.call() + + # Ensure expected handler and formater + assert isinstance(logger.handlers[0].formatter, RedirectLogFormatter) + assert isinstance(logger.handlers[0], logging.StreamHandler) + + # Ensure logs are propagated + assert {(record.message, record.levelno) for record in caplog.records} == set( + _EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES + ) + + +@respx.mock +async def test_actor_call_no_redirect_logs_async( + caplog: LogCaptureFixture, + mock_api_async: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture +) -> None: + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + await actor_client.call(logger=None) + + assert len(caplog.records) == 0 + + +@respx.mock +def test_actor_call_no_redirect_logs_sync( + caplog: LogCaptureFixture, + mock_api_sync: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture +) -> None: + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + actor_client.call(logger=None) + + assert len(caplog.records) == 0 + + +@respx.mock +async def test_actor_call_redirect_logs_to_custom_logger_async( + caplog: LogCaptureFixture, + mock_api_async: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture +) -> None: + """Test that logs are redirected correctly to the custom logger.""" + logger_name = 'custom_logger' + logger = logging.getLogger(logger_name) + actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + await actor_client.call(logger=logger) + + # Ensure logs are propagated + assert {(record.message, record.levelno) for record in caplog.records} == set( + _EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES + ) + + +@respx.mock +def test_actor_call_redirect_logs_to_custom_logger_sync( + caplog: LogCaptureFixture, + mock_api_sync: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture +) -> None: + """Test that logs are redirected correctly to the custom logger.""" + logger_name = 'custom_logger' + logger = logging.getLogger(logger_name) + actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + actor_client.call(logger=logger) + + # Ensure logs are propagated + assert {(record.message, record.levelno) for record in caplog.records} == set( + _EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES + ) + + +@respx.mock +async def test_redirect_status_message_async( + *, + caplog: LogCaptureFixture, + mock_api: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture +) -> None: + """Test redirected status and status messages.""" + + run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + + status_message_redirector = await run_client.get_status_message_watcher(check_period=timedelta(seconds=0)) + with caplog.at_level(logging.DEBUG, logger=logger_name): + async with status_message_redirector: + # Do stuff while the status from the other Actor is being redirected to the logs. + await asyncio.sleep(3) + + assert caplog.records[0].message == 'Status: RUNNING, Message: Initial message' + assert caplog.records[1].message == 'Status: RUNNING, Message: Another message' + assert caplog.records[2].message == 'Status: SUCCEEDED, Message: Final message' + + +@respx.mock +def test_redirect_status_message_sync( + *, + caplog: LogCaptureFixture, + mock_api: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture +) -> None: + """Test redirected status and status messages.""" + + run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + + status_message_redirector = run_client.get_status_message_watcher(check_period=timedelta(seconds=0)) + with caplog.at_level(logging.DEBUG, logger=logger_name), status_message_redirector: + # Do stuff while the status from the other Actor is being redirected to the logs. + time.sleep(3) + + assert caplog.records[0].message == 'Status: RUNNING, Message: Initial message' + assert caplog.records[1].message == 'Status: RUNNING, Message: Another message' + assert caplog.records[2].message == 'Status: SUCCEEDED, Message: Final message' diff --git a/tests/unit/test_client_errors.py b/tests/unit/test_client_errors.py index 71d0e4ea..fe127509 100644 --- a/tests/unit/test_client_errors.py +++ b/tests/unit/test_client_errors.py @@ -1,14 +1,16 @@ -import json -from collections.abc import Generator +from __future__ import annotations + +from typing import TYPE_CHECKING -import httpx import pytest -import respx from apify_client._errors import ApifyApiError from apify_client._http_client import HTTPClient, HTTPClientAsync -_TEST_URL = 'http://example.com' +if TYPE_CHECKING: + from pytest_httpserver import HTTPServer + +_TEST_PATH = '/errors' _EXPECTED_MESSAGE = 'some_message' _EXPECTED_TYPE = 'some_type' _EXPECTED_DATA = { @@ -16,34 +18,32 @@ } -@pytest.fixture(autouse=True) -def mocked_response() -> Generator[respx.MockRouter]: - response_content = json.dumps( - {'error': {'message': _EXPECTED_MESSAGE, 'type': _EXPECTED_TYPE, 'data': _EXPECTED_DATA}} +@pytest.fixture +def test_endpoint(httpserver: HTTPServer) -> str: + httpserver.expect_request(_TEST_PATH).respond_with_json( + {'error': {'message': _EXPECTED_MESSAGE, 'type': _EXPECTED_TYPE, 'data': _EXPECTED_DATA}}, status=400 ) - with respx.mock() as respx_mock: - respx_mock.get(_TEST_URL).mock(return_value=httpx.Response(400, content=response_content)) - yield respx_mock + return str(httpserver.url_for(_TEST_PATH)) -def test_client_apify_api_error_with_data() -> None: +def test_client_apify_api_error_with_data(test_endpoint: str) -> None: """Test that client correctly throws ApifyApiError with error data from response.""" client = HTTPClient() with pytest.raises(ApifyApiError) as e: - client.call(method='GET', url=_TEST_URL) + client.call(method='GET', url=test_endpoint) assert e.value.message == _EXPECTED_MESSAGE assert e.value.type == _EXPECTED_TYPE assert e.value.data == _EXPECTED_DATA -async def test_async_client_apify_api_error_with_data() -> None: +async def test_async_client_apify_api_error_with_data(test_endpoint: str) -> None: """Test that async client correctly throws ApifyApiError with error data from response.""" client = HTTPClientAsync() with pytest.raises(ApifyApiError) as e: - await client.call(method='GET', url=_TEST_URL) + await client.call(method='GET', url=test_endpoint) assert e.value.message == _EXPECTED_MESSAGE assert e.value.type == _EXPECTED_TYPE diff --git a/tests/unit/test_client_request_queue.py b/tests/unit/test_client_request_queue.py index 8e339305..ec6bf606 100644 --- a/tests/unit/test_client_request_queue.py +++ b/tests/unit/test_client_request_queue.py @@ -1,9 +1,16 @@ +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + import pytest -import respx import apify_client from apify_client import ApifyClient, ApifyClientAsync +if TYPE_CHECKING: + from pytest_httpserver import HTTPServer + _PARTIALLY_ADDED_BATCH_RESPONSE_CONTENT = """{ "data": { "processedRequests": [ @@ -25,12 +32,11 @@ }""" -@respx.mock -async def test_batch_not_processed_raises_exception_async() -> None: +@pytest.mark.usefixtures('patch_basic_url') +async def test_batch_not_processed_raises_exception_async(httpserver: HTTPServer) -> None: """Test that client exceptions are not silently ignored""" - client = ApifyClientAsync(token='') - - respx.route(method='POST', host='api.apify.com').mock(return_value=respx.MockResponse(401)) + client = ApifyClientAsync(token='placeholder_token') + httpserver.expect_oneshot_request(re.compile(r'.*'), method='POST').respond_with_data(status=401) requests = [ {'uniqueKey': 'http://example.com/1', 'url': 'http://example.com/1', 'method': 'GET'}, {'uniqueKey': 'http://example.com/2', 'url': 'http://example.com/2', 'method': 'GET'}, @@ -41,12 +47,12 @@ async def test_batch_not_processed_raises_exception_async() -> None: await rq_client.batch_add_requests(requests=requests) -@respx.mock -async def test_batch_processed_partially_async() -> None: - client = ApifyClientAsync(token='') +@pytest.mark.usefixtures('patch_basic_url') +async def test_batch_processed_partially_async(httpserver: HTTPServer) -> None: + client = ApifyClientAsync(token='placeholder_token') - respx.route(method='POST', host='api.apify.com').mock( - return_value=respx.MockResponse(200, content=_PARTIALLY_ADDED_BATCH_RESPONSE_CONTENT) + httpserver.expect_oneshot_request(re.compile(r'.*'), method='POST').respond_with_data( + status=200, response_data=_PARTIALLY_ADDED_BATCH_RESPONSE_CONTENT ) requests = [ {'uniqueKey': 'http://example.com/1', 'url': 'http://example.com/1', 'method': 'GET'}, @@ -59,12 +65,12 @@ async def test_batch_processed_partially_async() -> None: assert response['unprocessedRequests'] == [requests[1]] -@respx.mock -def test_batch_not_processed_raises_exception_sync() -> None: +@pytest.mark.usefixtures('patch_basic_url') +def test_batch_not_processed_raises_exception_sync(httpserver: HTTPServer) -> None: """Test that client exceptions are not silently ignored""" - client = ApifyClient(token='') + client = ApifyClient(token='placeholder_token') - respx.route(method='POST', host='api.apify.com').mock(return_value=respx.MockResponse(401)) + httpserver.expect_oneshot_request(re.compile(r'.*'), method='POST').respond_with_data(status=401) requests = [ {'uniqueKey': 'http://example.com/1', 'url': 'http://example.com/1', 'method': 'GET'}, {'uniqueKey': 'http://example.com/2', 'url': 'http://example.com/2', 'method': 'GET'}, @@ -75,12 +81,12 @@ def test_batch_not_processed_raises_exception_sync() -> None: rq_client.batch_add_requests(requests=requests) -@respx.mock -async def test_batch_processed_partially_sync() -> None: - client = ApifyClient(token='') +@pytest.mark.usefixtures('patch_basic_url') +async def test_batch_processed_partially_sync(httpserver: HTTPServer) -> None: + client = ApifyClient(token='placeholder_token') - respx.route(method='POST', host='api.apify.com').mock( - return_value=respx.MockResponse(200, content=_PARTIALLY_ADDED_BATCH_RESPONSE_CONTENT) + httpserver.expect_oneshot_request(re.compile(r'.*'), method='POST').respond_with_data( + status=200, response_data=_PARTIALLY_ADDED_BATCH_RESPONSE_CONTENT ) requests = [ {'uniqueKey': 'http://example.com/1', 'url': 'http://example.com/1', 'method': 'GET'}, diff --git a/tests/unit/test_client_timeouts.py b/tests/unit/test_client_timeouts.py index 82362644..3259c977 100644 --- a/tests/unit/test_client_timeouts.py +++ b/tests/unit/test_client_timeouts.py @@ -1,10 +1,13 @@ from __future__ import annotations +import time from functools import partial +from typing import TYPE_CHECKING +from unittest.mock import Mock import pytest import respx -from httpx import Request, Response, TimeoutException +from werkzeug import Response as WerkzeugResponse from apify_client import ApifyClient from apify_client._http_client import HTTPClient, HTTPClientAsync @@ -13,13 +16,17 @@ from apify_client.clients.resource_clients import dataset, request_queue from apify_client.clients.resource_clients import key_value_store as kvs +if TYPE_CHECKING: + from httpx import Request, Response + from pytest_httpserver import HTTPServer + from werkzeug import Request as WerkzeugRequest + class EndOfTestError(Exception): """Custom exception that is raised after the relevant part of the code is executed to stop the test.""" -@respx.mock -async def test_dynamic_timeout_async_client() -> None: +async def test_dynamic_timeout_async_client(httpserver: HTTPServer) -> None: """Tests timeout values for request with retriable errors. Values should increase with each attempt, starting from initial call value and bounded by the client timeout value. @@ -28,27 +35,35 @@ async def test_dynamic_timeout_async_client() -> None: call_timeout = 1 client_timeout = 5 expected_timeouts = iter((call_timeout, 2, 4, client_timeout)) + retry_counter_mock = Mock() + + def slow_handler(_request: WerkzeugRequest) -> WerkzeugResponse: + timeout = next(expected_timeouts) + should_raise = next(should_raise_error) + # Counter for retries + retry_counter_mock() + + if should_raise: + # We expect longer than the client is willing to wait. This will cause a timeout on the client side. + time.sleep(timeout + 0.02) - def check_timeout(request: Request) -> Response: - expected_timeout = next(expected_timeouts) - assert request.extensions['timeout'] == { - 'connect': expected_timeout, - 'pool': expected_timeout, - 'read': expected_timeout, - 'write': expected_timeout, - } - if next(should_raise_error): - raise TimeoutException('This error can be retried') - return Response(200) - - respx.get('https://example.com').mock(side_effect=check_timeout) - await HTTPClientAsync(timeout_secs=client_timeout).call( - method='GET', url='https://example.com', timeout_secs=call_timeout + return WerkzeugResponse('200 OK') + + httpserver.expect_request('/async_timeout', method='GET').respond_with_handler(slow_handler) + + server_url = str(httpserver.url_for('/async_timeout')) + response = await HTTPClientAsync(timeout_secs=client_timeout).call( + method='GET', url=server_url, timeout_secs=call_timeout ) + # Check that the retry counter was called the expected number of times + # (4 times: 3 retries + 1 final successful call) + assert retry_counter_mock.call_count == 4 + # Check that the response is successful + assert response.status_code == 200 -@respx.mock -def test_dynamic_timeout_sync_client() -> None: + +def test_dynamic_timeout_sync_client(httpserver: HTTPServer) -> None: """Tests timeout values for request with retriable errors. Values should increase with each attempt, starting from initial call value and bounded by the client timeout value. @@ -57,21 +72,31 @@ def test_dynamic_timeout_sync_client() -> None: call_timeout = 1 client_timeout = 5 expected_timeouts = iter((call_timeout, 2, 4, client_timeout)) + retry_counter_mock = Mock() + + def slow_handler(_request: WerkzeugRequest) -> WerkzeugResponse: + timeout = next(expected_timeouts) + should_raise = next(should_raise_error) + # Counter for retries + retry_counter_mock() + + if should_raise: + # We expect longer than the client is willing to wait. This will cause a timeout on the client side. + time.sleep(timeout + 0.02) + + return WerkzeugResponse('200 OK') + + httpserver.expect_request('/sync_timeout', method='GET').respond_with_handler(slow_handler) + + server_url = str(httpserver.url_for('/sync_timeout')) - def check_timeout(request: Request) -> Response: - expected_timeout = next(expected_timeouts) - assert request.extensions['timeout'] == { - 'connect': expected_timeout, - 'pool': expected_timeout, - 'read': expected_timeout, - 'write': expected_timeout, - } - if next(should_raise_error): - raise TimeoutException('This error can be retired') - return Response(200) + response = HTTPClient(timeout_secs=client_timeout).call(method='GET', url=server_url, timeout_secs=call_timeout) - respx.get('https://example.com').mock(side_effect=check_timeout) - HTTPClient(timeout_secs=client_timeout).call(method='GET', url='https://example.com', timeout_secs=call_timeout) + # Check that the retry counter was called the expected number of times + # (4 times: 3 retries + 1 final successful call) + assert retry_counter_mock.call_count == 4 + # Check that the response is successful + assert response.status_code == 200 def assert_timeout(expected_timeout: int, request: Request) -> Response: @@ -122,6 +147,8 @@ def assert_timeout(expected_timeout: int, request: Request) -> Response: ] +# This test will probably need to be reworked or skipped when switching to `impit`. +# Without the mock library, it's difficult to reproduce, maybe with monkeypatch? @pytest.mark.parametrize( ('client_type', 'method', 'expected_timeout', 'kwargs'), _timeout_params, @@ -139,6 +166,8 @@ def test_specific_timeouts_for_specific_endpoints_sync( getattr(client, method)(**kwargs) +# This test will probably need to be reworked or skipped when switching to `impit`. +# Without the mock library, it's difficult to reproduce, maybe with monkeypatch? @pytest.mark.parametrize( ('client_type', 'method', 'expected_timeout', 'kwargs'), _timeout_params, diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 636f0fc4..52d05f06 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -1,23 +1,27 @@ +from __future__ import annotations + import asyncio import json import logging -import threading import time -from collections.abc import AsyncIterator, Generator, Iterator from datetime import datetime, timedelta +from typing import TYPE_CHECKING from unittest.mock import patch -import httpx import pytest -import respx -from _pytest.logging import LogCaptureFixture from apify_shared.consts import ActorJobStatus +from werkzeug import Request, Response from apify_client import ApifyClient, ApifyClientAsync from apify_client._logging import RedirectLogFormatter from apify_client.clients.resource_clients.log import StatusMessageWatcher, StreamedLog -_MOCKED_API_URL = 'https://example.com' +if TYPE_CHECKING: + from collections.abc import Iterator + + from _pytest.logging import LogCaptureFixture + from pytest_httpserver import HTTPServer + _MOCKED_RUN_ID = 'mocked_run_id' _MOCKED_ACTOR_NAME = 'mocked_actor_name' _MOCKED_ACTOR_ID = 'mocked_actor_id' @@ -64,113 +68,90 @@ ) -@pytest.fixture -def mock_api() -> None: - test_server_lock = threading.Lock() - - def get_responses() -> Generator[httpx.Response, None, None]: - """Simulate actor run that changes status 3 times.""" - for _ in range(5): - yield httpx.Response( - content=json.dumps( - { - 'data': { - 'id': _MOCKED_RUN_ID, - 'actId': _MOCKED_ACTOR_ID, - 'status': ActorJobStatus.RUNNING, - 'statusMessage': 'Initial message', - 'isStatusMessageTerminal': False, - } - } - ), - status_code=200, - ) - for _ in range(5): - yield httpx.Response( - content=json.dumps( - { - 'data': { - 'id': _MOCKED_RUN_ID, - 'actId': _MOCKED_ACTOR_ID, - 'status': ActorJobStatus.RUNNING, - 'statusMessage': 'Another message', - 'isStatusMessageTerminal': False, - } - } - ), - status_code=200, - ) - while True: - yield httpx.Response( - content=json.dumps( - { - 'data': { - 'id': _MOCKED_RUN_ID, - 'actId': _MOCKED_ACTOR_ID, - 'status': ActorJobStatus.SUCCEEDED, - 'statusMessage': 'Final message', - 'isStatusMessageTerminal': True, - } - } - ), - status_code=200, - ) - - responses = get_responses() - - def actor_runs_side_effect(_: httpx.Request) -> httpx.Response: - test_server_lock.acquire() - # To avoid multiple threads accessing at the same time and causing `ValueError: generator already executing` - response = next(responses) - test_server_lock.release_lock() - return response - - respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}').mock(side_effect=actor_runs_side_effect) - - respx.get(url=f'{_MOCKED_API_URL}/v2/acts/{_MOCKED_ACTOR_ID}').mock( - return_value=httpx.Response(content=json.dumps({'data': {'name': _MOCKED_ACTOR_NAME}}), status_code=200) - ) +class StatusResponseGenerator: + """Generator for actor run status responses to simulate changing status over time.""" - respx.post(url=f'{_MOCKED_API_URL}/v2/acts/{_MOCKED_ACTOR_ID}/runs').mock( - return_value=httpx.Response(content=json.dumps({'data': {'id': _MOCKED_RUN_ID}}), status_code=200) - ) + def __init__(self) -> None: + self.current_status_index = 0 + self.requests_for_current_status = 0 + self.min_requests_per_status = 5 + + self.statuses = [ + ('Initial message', ActorJobStatus.RUNNING, False), + ('Another message', ActorJobStatus.RUNNING, False), + ('Final message', ActorJobStatus.SUCCEEDED, True), + ] + + def get_response(self, _request: Request) -> Response: + if self.current_status_index < len(self.statuses): + message, status, is_terminal = self.statuses[self.current_status_index] + else: + message, status, is_terminal = self.statuses[-1] + + self.requests_for_current_status += 1 + + if ( + self.requests_for_current_status >= self.min_requests_per_status + and self.current_status_index < len(self.statuses) - 1 + and not is_terminal + ): + self.current_status_index += 1 + self.requests_for_current_status = 0 + + status_data = { + 'data': { + 'id': _MOCKED_RUN_ID, + 'actId': _MOCKED_ACTOR_ID, + 'status': status, + 'statusMessage': message, + 'isStatusMessageTerminal': is_terminal, + } + } + + return Response(response=json.dumps(status_data), status=200, mimetype='application/json') + + +def _streaming_log_handler(_request: Request) -> Response: + """Handler for streaming log requests.""" + + def generate_logs() -> Iterator[bytes]: + for chunk in _MOCKED_ACTOR_LOGS: + yield chunk + time.sleep(0.01) + + return Response(response=generate_logs(), status=200, mimetype='application/octet-stream') @pytest.fixture -def mock_api_async(mock_api: None) -> None: # noqa: ARG001, fixture - class AsyncByteStream(httpx._types.AsyncByteStream): - async def __aiter__(self) -> AsyncIterator[bytes]: - for i in _MOCKED_ACTOR_LOGS: - yield i - await asyncio.sleep(0.01) - - async def aclose(self) -> None: - pass - - respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}/log?stream=1&raw=1').mock( - return_value=httpx.Response(stream=AsyncByteStream(), status_code=200) +def mock_api(httpserver: HTTPServer) -> None: + """Set up HTTP server with mocked API endpoints.""" + httpserver.clear() # type: ignore[no-untyped-call] + + status_generator = StatusResponseGenerator() + + # Add actor run status endpoint + httpserver.expect_request(f'/v2/actor-runs/{_MOCKED_RUN_ID}', method='GET').respond_with_handler( + status_generator.get_response ) + # Add actor info endpoint + httpserver.expect_request(f'/v2/acts/{_MOCKED_ACTOR_ID}', method='GET').respond_with_json( + {'data': {'name': _MOCKED_ACTOR_NAME}} + ) -@pytest.fixture -def mock_api_sync(mock_api: None) -> None: # noqa: ARG001, fixture - class SyncByteStream(httpx._types.SyncByteStream): - def __iter__(self) -> Iterator[bytes]: - for i in _MOCKED_ACTOR_LOGS: - yield i - time.sleep(0.01) - - def close(self) -> None: - pass - - respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}/log?stream=1&raw=1').mock( - return_value=httpx.Response(stream=SyncByteStream(), status_code=200) + # Add actor run creation endpoint + httpserver.expect_request(f'/v2/acts/{_MOCKED_ACTOR_ID}/runs', method='POST').respond_with_json( + {'data': {'id': _MOCKED_RUN_ID}} ) + httpserver.expect_request( + f'/v2/actor-runs/{_MOCKED_RUN_ID}/log', method='GET', query_string='stream=1&raw=1' + ).respond_with_handler(_streaming_log_handler) + @pytest.fixture def propagate_stream_logs() -> None: - # Enable propagation of logs to the caplog fixture + """Enable propagation of logs to the caplog fixture.""" StreamedLog._force_propagate = True StatusMessageWatcher._force_propagate = True logging.getLogger(f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}').setLevel(logging.DEBUG) @@ -178,7 +159,7 @@ def propagate_stream_logs() -> None: @pytest.fixture def reduce_final_timeout_for_status_message_redirector() -> None: - """Reduce timeout used by the `StatusMessageWatcher` + """Reduce timeout used by the `StatusMessageWatcher`. This timeout makes sense on the platform, but in tests it is better to reduce it to speed up the tests. """ @@ -192,18 +173,19 @@ def reduce_final_timeout_for_status_message_redirector() -> None: (False, len(_EXPECTED_MESSAGES_AND_LEVELS) - _EXISTING_LOGS_BEFORE_REDIRECT_ATTACH), ], ) -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs') async def test_redirected_logs_async( *, caplog: LogCaptureFixture, - mock_api_async: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture log_from_start: bool, expected_log_count: int, + httpserver: HTTPServer, ) -> None: """Test that redirected logs are formatted correctly.""" - run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + api_url = httpserver.url_for('/').removesuffix('/') + + run_client = ApifyClientAsync(token='mocked_token', api_url=api_url).run(run_id=_MOCKED_RUN_ID) with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: # Mock `now()` so that it has timestamp bigger than the first 3 logs @@ -231,18 +213,19 @@ async def test_redirected_logs_async( (False, len(_EXPECTED_MESSAGES_AND_LEVELS) - _EXISTING_LOGS_BEFORE_REDIRECT_ATTACH), ], ) -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs') def test_redirected_logs_sync( *, caplog: LogCaptureFixture, - mock_api_sync: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture log_from_start: bool, expected_log_count: int, + httpserver: HTTPServer, ) -> None: """Test that redirected logs are formatted correctly.""" - run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + api_url = httpserver.url_for('/').removesuffix('/') + + run_client = ApifyClient(token='mocked_token', api_url=api_url).run(run_id=_MOCKED_RUN_ID) with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: # Mock `now()` so that it has timestamp bigger than the first 3 logs @@ -262,19 +245,19 @@ def test_redirected_logs_sync( ) -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs', 'reduce_final_timeout_for_status_message_redirector') async def test_actor_call_redirect_logs_to_default_logger_async( caplog: LogCaptureFixture, - mock_api_async: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture + httpserver: HTTPServer, ) -> None: """Test that logs are redirected correctly to the default logger. Caplog contains logs before formatting, so formatting is not included in the test expectations.""" + api_url = httpserver.url_for('/').removesuffix('/') + logger_name = f'apify.{_MOCKED_ACTOR_NAME} runId:{_MOCKED_RUN_ID}' logger = logging.getLogger(logger_name) - actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + actor_client = ApifyClientAsync(token='mocked_token', api_url=api_url).actor(actor_id=_MOCKED_ACTOR_ID) with caplog.at_level(logging.DEBUG, logger=logger_name): await actor_client.call() @@ -289,19 +272,19 @@ async def test_actor_call_redirect_logs_to_default_logger_async( ) -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs', 'reduce_final_timeout_for_status_message_redirector') def test_actor_call_redirect_logs_to_default_logger_sync( caplog: LogCaptureFixture, - mock_api_sync: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture + httpserver: HTTPServer, ) -> None: """Test that logs are redirected correctly to the default logger. Caplog contains logs before formatting, so formatting is not included in the test expectations.""" + api_url = httpserver.url_for('/').removesuffix('/') + logger_name = f'apify.{_MOCKED_ACTOR_NAME} runId:{_MOCKED_RUN_ID}' logger = logging.getLogger(logger_name) - actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + actor_client = ApifyClient(token='mocked_token', api_url=api_url).actor(actor_id=_MOCKED_ACTOR_ID) with caplog.at_level(logging.DEBUG, logger=logger_name): actor_client.call() @@ -316,14 +299,15 @@ def test_actor_call_redirect_logs_to_default_logger_sync( ) -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs') async def test_actor_call_no_redirect_logs_async( caplog: LogCaptureFixture, - mock_api_async: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture + httpserver: HTTPServer, ) -> None: + api_url = httpserver.url_for('/').removesuffix('/') + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' - actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + actor_client = ApifyClientAsync(token='mocked_token', api_url=api_url).actor(actor_id=_MOCKED_ACTOR_ID) with caplog.at_level(logging.DEBUG, logger=logger_name): await actor_client.call(logger=None) @@ -331,14 +315,15 @@ async def test_actor_call_no_redirect_logs_async( assert len(caplog.records) == 0 -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs') def test_actor_call_no_redirect_logs_sync( caplog: LogCaptureFixture, - mock_api_sync: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture + httpserver: HTTPServer, ) -> None: + api_url = httpserver.url_for('/').removesuffix('/') + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' - actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + actor_client = ApifyClient(token='mocked_token', api_url=api_url).actor(actor_id=_MOCKED_ACTOR_ID) with caplog.at_level(logging.DEBUG, logger=logger_name): actor_client.call(logger=None) @@ -346,17 +331,17 @@ def test_actor_call_no_redirect_logs_sync( assert len(caplog.records) == 0 -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs', 'reduce_final_timeout_for_status_message_redirector') async def test_actor_call_redirect_logs_to_custom_logger_async( caplog: LogCaptureFixture, - mock_api_async: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture + httpserver: HTTPServer, ) -> None: """Test that logs are redirected correctly to the custom logger.""" + api_url = httpserver.url_for('/').removesuffix('/') + logger_name = 'custom_logger' logger = logging.getLogger(logger_name) - actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + actor_client = ApifyClientAsync(token='mocked_token', api_url=api_url).actor(actor_id=_MOCKED_ACTOR_ID) with caplog.at_level(logging.DEBUG, logger=logger_name): await actor_client.call(logger=logger) @@ -367,17 +352,17 @@ async def test_actor_call_redirect_logs_to_custom_logger_async( ) -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs', 'reduce_final_timeout_for_status_message_redirector') def test_actor_call_redirect_logs_to_custom_logger_sync( caplog: LogCaptureFixture, - mock_api_sync: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture + httpserver: HTTPServer, ) -> None: """Test that logs are redirected correctly to the custom logger.""" + api_url = httpserver.url_for('/').removesuffix('/') + logger_name = 'custom_logger' logger = logging.getLogger(logger_name) - actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + actor_client = ApifyClient(token='mocked_token', api_url=api_url).actor(actor_id=_MOCKED_ACTOR_ID) with caplog.at_level(logging.DEBUG, logger=logger_name): actor_client.call(logger=logger) @@ -388,17 +373,16 @@ def test_actor_call_redirect_logs_to_custom_logger_sync( ) -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs', 'reduce_final_timeout_for_status_message_redirector') async def test_redirect_status_message_async( *, caplog: LogCaptureFixture, - mock_api: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture + httpserver: HTTPServer, ) -> None: """Test redirected status and status messages.""" + api_url = httpserver.url_for('/').removesuffix('/') - run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + run_client = ApifyClientAsync(token='mocked_token', api_url=api_url).run(run_id=_MOCKED_RUN_ID) logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' @@ -413,17 +397,17 @@ async def test_redirect_status_message_async( assert caplog.records[2].message == 'Status: SUCCEEDED, Message: Final message' -@respx.mock +@pytest.mark.usefixtures('mock_api', 'propagate_stream_logs', 'reduce_final_timeout_for_status_message_redirector') def test_redirect_status_message_sync( *, caplog: LogCaptureFixture, - mock_api: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture + httpserver: HTTPServer, ) -> None: """Test redirected status and status messages.""" - run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + api_url = httpserver.url_for('/').removesuffix('/') + + run_client = ApifyClient(token='mocked_token', api_url=api_url).run(run_id=_MOCKED_RUN_ID) logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' diff --git a/uv.lock b/uv.lock index f00a008e..f0e88fa6 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.10'", @@ -42,6 +42,7 @@ dev = [ { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, + { name = "pytest-httpserver" }, { name = "pytest-timeout" }, { name = "pytest-xdist" }, { name = "redbaron" }, @@ -69,6 +70,7 @@ dev = [ { name = "pytest", specifier = "~=8.4.0" }, { name = "pytest-asyncio", specifier = "~=1.1.0" }, { name = "pytest-cov", specifier = "~=6.2.0" }, + { name = "pytest-httpserver", specifier = ">=1.1.3" }, { name = "pytest-timeout", specifier = ">=2.4.0" }, { name = "pytest-xdist", specifier = "~=3.8.0" }, { name = "redbaron", specifier = "~=0.9.0" }, @@ -892,6 +894,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, ] +[[package]] +name = "pytest-httpserver" +version = "1.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/d8/def15ba33bd696dd72dd4562a5287c0cba4d18a591eeb82e0b08ab385afc/pytest_httpserver-1.1.3.tar.gz", hash = "sha256:af819d6b533f84b4680b9416a5b3f67f1df3701f1da54924afd4d6e4ba5917ec", size = 68870, upload-time = "2025-04-10T08:17:15.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/d2/dfc2f25f3905921c2743c300a48d9494d29032f1389fc142e718d6978fb2/pytest_httpserver-1.1.3-py3-none-any.whl", hash = "sha256:5f84757810233e19e2bb5287f3826a71c97a3740abe3a363af9155c0f82fdbb9", size = 21000, upload-time = "2025-04-10T08:17:13.906Z" }, +] + [[package]] name = "pytest-timeout" version = "2.4.0" @@ -1202,6 +1216,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] +[[package]] +name = "werkzeug" +version = "3.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925, upload-time = "2024-11-08T15:52:18.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498, upload-time = "2024-11-08T15:52:16.132Z" }, +] + [[package]] name = "wrapt" version = "1.17.2" From 919d39e5c0b225978a92448462bc75ed586199e3 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Wed, 6 Aug 2025 01:07:29 +0000 Subject: [PATCH 02/15] test --- tests/unit/logging_old.py | 437 ------------------------------------- tests/unit/test_logging.py | 11 +- 2 files changed, 9 insertions(+), 439 deletions(-) delete mode 100644 tests/unit/logging_old.py diff --git a/tests/unit/logging_old.py b/tests/unit/logging_old.py deleted file mode 100644 index 19dd6ede..00000000 --- a/tests/unit/logging_old.py +++ /dev/null @@ -1,437 +0,0 @@ -import asyncio -import json -import logging -import threading -import time -from collections.abc import AsyncIterator, Generator, Iterator -from datetime import datetime, timedelta -from unittest.mock import patch - -import httpx -import pytest -import respx -from _pytest.logging import LogCaptureFixture -from apify_shared.consts import ActorJobStatus - -from apify_client import ApifyClient, ApifyClientAsync -from apify_client._logging import RedirectLogFormatter -from apify_client.clients.resource_clients.log import StatusMessageWatcher, StreamedLog - -_MOCKED_API_URL = '/logging' -_MOCKED_RUN_ID = 'mocked_run_id' -_MOCKED_ACTOR_NAME = 'mocked_actor_name' -_MOCKED_ACTOR_ID = 'mocked_actor_id' -_MOCKED_ACTOR_LOGS = ( - b'2025-05-13T07:24:12.588Z ACTOR: Pulling Docker image of build.\n' - b'2025-05-13T07:24:12.686Z ACTOR: Creating Docker container.\n' - b'2025-05-13T07:24:12.745Z ACTOR: Starting Docker container.\n', # Several logs merged into one chunk - b'2025-05-13T07:26:14.132Z [apify] DEBUG \xc3', # Chunked log split in the middle of the multibyte character - b'\xa1\n', # part 2 - b'2025-05-13T07:24:14.132Z [apify] INFO multiline \n log\n', - b'2025-05-13T07:25:14.132Z [apify] WARNING some warning\n', - b'2025-05-13T07:26:14.132Z [apify] DEBUG c\n', - b'2025-05-13T0', # Chunked log that got split in the marker - b'7:26:14.132Z [apify] DEBUG d\n' # part 2 - b'2025-05-13T07:27:14.132Z [apify] DEB', # Chunked log that got split outside of marker - b'UG e\n', # part 2 - # Already redirected message - b'2025-05-13T07:28:14.132Z [apify.redirect-logger runId:4U1oAnKau6jpzjUuA] -> 2025-05-13T07:27:14.132Z ACTOR:...\n', -) -_EXISTING_LOGS_BEFORE_REDIRECT_ATTACH = 3 - -_EXPECTED_MESSAGES_AND_LEVELS = ( - ('2025-05-13T07:24:12.588Z ACTOR: Pulling Docker image of build.', logging.INFO), - ('2025-05-13T07:24:12.686Z ACTOR: Creating Docker container.', logging.INFO), - ('2025-05-13T07:24:12.745Z ACTOR: Starting Docker container.', logging.INFO), - ('2025-05-13T07:26:14.132Z [apify] DEBUG á', logging.DEBUG), - ('2025-05-13T07:24:14.132Z [apify] INFO multiline \n log', logging.INFO), - ('2025-05-13T07:25:14.132Z [apify] WARNING some warning', logging.WARNING), - ('2025-05-13T07:26:14.132Z [apify] DEBUG c', logging.DEBUG), - ('2025-05-13T07:26:14.132Z [apify] DEBUG d', logging.DEBUG), - ('2025-05-13T07:27:14.132Z [apify] DEBUG e', logging.DEBUG), - ( - '2025-05-13T07:28:14.132Z [apify.redirect-logger runId:4U1oAnKau6jpzjUuA] -> ' - '2025-05-13T07:27:14.132Z ACTOR:...', - logging.INFO, - ), -) - -_EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES = ( - ('Status: RUNNING, Message: Initial message', logging.INFO), - *_EXPECTED_MESSAGES_AND_LEVELS, - ('Status: RUNNING, Message: Another message', logging.INFO), - ('Status: SUCCEEDED, Message: Final message', logging.INFO), -) - - -@pytest.fixture -def mock_api() -> None: - test_server_lock = threading.Lock() - - def get_responses() -> Generator[httpx.Response, None, None]: - """Simulate actor run that changes status 3 times.""" - for _ in range(5): - yield httpx.Response( - content=json.dumps( - { - 'data': { - 'id': _MOCKED_RUN_ID, - 'actId': _MOCKED_ACTOR_ID, - 'status': ActorJobStatus.RUNNING, - 'statusMessage': 'Initial message', - 'isStatusMessageTerminal': False, - } - } - ), - status_code=200, - ) - for _ in range(5): - yield httpx.Response( - content=json.dumps( - { - 'data': { - 'id': _MOCKED_RUN_ID, - 'actId': _MOCKED_ACTOR_ID, - 'status': ActorJobStatus.RUNNING, - 'statusMessage': 'Another message', - 'isStatusMessageTerminal': False, - } - } - ), - status_code=200, - ) - while True: - yield httpx.Response( - content=json.dumps( - { - 'data': { - 'id': _MOCKED_RUN_ID, - 'actId': _MOCKED_ACTOR_ID, - 'status': ActorJobStatus.SUCCEEDED, - 'statusMessage': 'Final message', - 'isStatusMessageTerminal': True, - } - } - ), - status_code=200, - ) - - responses = get_responses() - - def actor_runs_side_effect(_: httpx.Request) -> httpx.Response: - test_server_lock.acquire() - # To avoid multiple threads accessing at the same time and causing `ValueError: generator already executing` - response = next(responses) - test_server_lock.release_lock() - return response - - respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}').mock(side_effect=actor_runs_side_effect) - - respx.get(url=f'{_MOCKED_API_URL}/v2/acts/{_MOCKED_ACTOR_ID}').mock( - return_value=httpx.Response(content=json.dumps({'data': {'name': _MOCKED_ACTOR_NAME}}), status_code=200) - ) - - respx.post(url=f'{_MOCKED_API_URL}/v2/acts/{_MOCKED_ACTOR_ID}/runs').mock( - return_value=httpx.Response(content=json.dumps({'data': {'id': _MOCKED_RUN_ID}}), status_code=200) - ) - - -@pytest.fixture -def mock_api_async(mock_api: None) -> None: # noqa: ARG001, fixture - class AsyncByteStream(httpx._types.AsyncByteStream): - async def __aiter__(self) -> AsyncIterator[bytes]: - for i in _MOCKED_ACTOR_LOGS: - yield i - await asyncio.sleep(0.01) - - async def aclose(self) -> None: - pass - - respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}/log?stream=1&raw=1').mock( - return_value=httpx.Response(stream=AsyncByteStream(), status_code=200) - ) - - -@pytest.fixture -def mock_api_sync(mock_api: None) -> None: # noqa: ARG001, fixture - class SyncByteStream(httpx._types.SyncByteStream): - def __iter__(self) -> Iterator[bytes]: - for i in _MOCKED_ACTOR_LOGS: - yield i - time.sleep(0.01) - - def close(self) -> None: - pass - - respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}/log?stream=1&raw=1').mock( - return_value=httpx.Response(stream=SyncByteStream(), status_code=200) - ) - - -@pytest.fixture -def propagate_stream_logs() -> None: - # Enable propagation of logs to the caplog fixture - StreamedLog._force_propagate = True - StatusMessageWatcher._force_propagate = True - logging.getLogger(f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}').setLevel(logging.DEBUG) - - -@pytest.fixture -def reduce_final_timeout_for_status_message_redirector() -> None: - """Reduce timeout used by the `StatusMessageWatcher` - - This timeout makes sense on the platform, but in tests it is better to reduce it to speed up the tests. - """ - StatusMessageWatcher._final_sleep_time_s = 2 - - -@pytest.mark.parametrize( - ('log_from_start', 'expected_log_count'), - [ - (True, len(_EXPECTED_MESSAGES_AND_LEVELS)), - (False, len(_EXPECTED_MESSAGES_AND_LEVELS) - _EXISTING_LOGS_BEFORE_REDIRECT_ATTACH), - ], -) -@respx.mock -async def test_redirected_logs_async( - *, - caplog: LogCaptureFixture, - mock_api_async: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - log_from_start: bool, - expected_log_count: int, -) -> None: - """Test that redirected logs are formatted correctly.""" - - run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) - - with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: - # Mock `now()` so that it has timestamp bigger than the first 3 logs - mocked_datetime.now.return_value = datetime.fromisoformat('2025-05-13T07:24:14.132+00:00') - streamed_log = await run_client.get_streamed_log(from_start=log_from_start) - - # Set `propagate=True` during the tests, so that caplog can see the logs.. - logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' - - with caplog.at_level(logging.DEBUG, logger=logger_name): - async with streamed_log: - # Do stuff while the log from the other Actor is being redirected to the logs. - await asyncio.sleep(2) - - # Ensure logs are propagated - assert {(record.message, record.levelno) for record in caplog.records} == set( - _EXPECTED_MESSAGES_AND_LEVELS[-expected_log_count:] - ) - - -@pytest.mark.parametrize( - ('log_from_start', 'expected_log_count'), - [ - (True, len(_EXPECTED_MESSAGES_AND_LEVELS)), - (False, len(_EXPECTED_MESSAGES_AND_LEVELS) - _EXISTING_LOGS_BEFORE_REDIRECT_ATTACH), - ], -) -@respx.mock -def test_redirected_logs_sync( - *, - caplog: LogCaptureFixture, - mock_api_sync: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - log_from_start: bool, - expected_log_count: int, -) -> None: - """Test that redirected logs are formatted correctly.""" - - run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) - - with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: - # Mock `now()` so that it has timestamp bigger than the first 3 logs - mocked_datetime.now.return_value = datetime.fromisoformat('2025-05-13T07:24:14.132+00:00') - streamed_log = run_client.get_streamed_log(from_start=log_from_start) - - # Set `propagate=True` during the tests, so that caplog can see the logs.. - logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' - - with caplog.at_level(logging.DEBUG, logger=logger_name), streamed_log: - # Do stuff while the log from the other Actor is being redirected to the logs. - time.sleep(2) - - # Ensure logs are propagated - assert {(record.message, record.levelno) for record in caplog.records} == set( - _EXPECTED_MESSAGES_AND_LEVELS[-expected_log_count:] - ) - - -@respx.mock -async def test_actor_call_redirect_logs_to_default_logger_async( - caplog: LogCaptureFixture, - mock_api_async: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture -) -> None: - """Test that logs are redirected correctly to the default logger. - - Caplog contains logs before formatting, so formatting is not included in the test expectations.""" - logger_name = f'apify.{_MOCKED_ACTOR_NAME} runId:{_MOCKED_RUN_ID}' - logger = logging.getLogger(logger_name) - actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) - - with caplog.at_level(logging.DEBUG, logger=logger_name): - await actor_client.call() - - # Ensure expected handler and formater - assert isinstance(logger.handlers[0].formatter, RedirectLogFormatter) - assert isinstance(logger.handlers[0], logging.StreamHandler) - - # Ensure logs are propagated - assert {(record.message, record.levelno) for record in caplog.records} == set( - _EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES - ) - - -@respx.mock -def test_actor_call_redirect_logs_to_default_logger_sync( - caplog: LogCaptureFixture, - mock_api_sync: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture -) -> None: - """Test that logs are redirected correctly to the default logger. - - Caplog contains logs before formatting, so formatting is not included in the test expectations.""" - logger_name = f'apify.{_MOCKED_ACTOR_NAME} runId:{_MOCKED_RUN_ID}' - logger = logging.getLogger(logger_name) - actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) - - with caplog.at_level(logging.DEBUG, logger=logger_name): - actor_client.call() - - # Ensure expected handler and formater - assert isinstance(logger.handlers[0].formatter, RedirectLogFormatter) - assert isinstance(logger.handlers[0], logging.StreamHandler) - - # Ensure logs are propagated - assert {(record.message, record.levelno) for record in caplog.records} == set( - _EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES - ) - - -@respx.mock -async def test_actor_call_no_redirect_logs_async( - caplog: LogCaptureFixture, - mock_api_async: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture -) -> None: - logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' - actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) - - with caplog.at_level(logging.DEBUG, logger=logger_name): - await actor_client.call(logger=None) - - assert len(caplog.records) == 0 - - -@respx.mock -def test_actor_call_no_redirect_logs_sync( - caplog: LogCaptureFixture, - mock_api_sync: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture -) -> None: - logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' - actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) - - with caplog.at_level(logging.DEBUG, logger=logger_name): - actor_client.call(logger=None) - - assert len(caplog.records) == 0 - - -@respx.mock -async def test_actor_call_redirect_logs_to_custom_logger_async( - caplog: LogCaptureFixture, - mock_api_async: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture -) -> None: - """Test that logs are redirected correctly to the custom logger.""" - logger_name = 'custom_logger' - logger = logging.getLogger(logger_name) - actor_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) - - with caplog.at_level(logging.DEBUG, logger=logger_name): - await actor_client.call(logger=logger) - - # Ensure logs are propagated - assert {(record.message, record.levelno) for record in caplog.records} == set( - _EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES - ) - - -@respx.mock -def test_actor_call_redirect_logs_to_custom_logger_sync( - caplog: LogCaptureFixture, - mock_api_sync: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture -) -> None: - """Test that logs are redirected correctly to the custom logger.""" - logger_name = 'custom_logger' - logger = logging.getLogger(logger_name) - actor_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) - - with caplog.at_level(logging.DEBUG, logger=logger_name): - actor_client.call(logger=logger) - - # Ensure logs are propagated - assert {(record.message, record.levelno) for record in caplog.records} == set( - _EXPECTED_MESSAGES_AND_LEVELS_WITH_STATUS_MESSAGES - ) - - -@respx.mock -async def test_redirect_status_message_async( - *, - caplog: LogCaptureFixture, - mock_api: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture -) -> None: - """Test redirected status and status messages.""" - - run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) - - logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' - - status_message_redirector = await run_client.get_status_message_watcher(check_period=timedelta(seconds=0)) - with caplog.at_level(logging.DEBUG, logger=logger_name): - async with status_message_redirector: - # Do stuff while the status from the other Actor is being redirected to the logs. - await asyncio.sleep(3) - - assert caplog.records[0].message == 'Status: RUNNING, Message: Initial message' - assert caplog.records[1].message == 'Status: RUNNING, Message: Another message' - assert caplog.records[2].message == 'Status: SUCCEEDED, Message: Final message' - - -@respx.mock -def test_redirect_status_message_sync( - *, - caplog: LogCaptureFixture, - mock_api: None, # noqa: ARG001, fixture - propagate_stream_logs: None, # noqa: ARG001, fixture - reduce_final_timeout_for_status_message_redirector: None, # noqa: ARG001, fixture -) -> None: - """Test redirected status and status messages.""" - - run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) - - logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' - - status_message_redirector = run_client.get_status_message_watcher(check_period=timedelta(seconds=0)) - with caplog.at_level(logging.DEBUG, logger=logger_name), status_message_redirector: - # Do stuff while the status from the other Actor is being redirected to the logs. - time.sleep(3) - - assert caplog.records[0].message == 'Status: RUNNING, Message: Initial message' - assert caplog.records[1].message == 'Status: RUNNING, Message: Another message' - assert caplog.records[2].message == 'Status: SUCCEEDED, Message: Final message' diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 52d05f06..b6aff7c5 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -117,9 +117,16 @@ def _streaming_log_handler(_request: Request) -> Response: def generate_logs() -> Iterator[bytes]: for chunk in _MOCKED_ACTOR_LOGS: yield chunk - time.sleep(0.01) + time.sleep(0.05) - return Response(response=generate_logs(), status=200, mimetype='application/octet-stream') + total_size = sum(len(chunk) for chunk in _MOCKED_ACTOR_LOGS) + + return Response( + response=generate_logs(), + status=200, + mimetype='application/octet-stream', + headers={'Content-Length': str(total_size)}, + ) @pytest.fixture From e06f2c2fc7b2909c289e0c3553ff6e23b4bb1abf Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Wed, 6 Aug 2025 01:31:29 +0000 Subject: [PATCH 03/15] wincheck --- tests/unit/test_logging.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index b6aff7c5..97694b54 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -3,6 +3,7 @@ import asyncio import json import logging +import sys import time from datetime import datetime, timedelta from typing import TYPE_CHECKING @@ -68,6 +69,11 @@ ) +def _get_windows_sleep_time() -> float: + """Get adjusted sleep time for Windows systems.""" + return 5.0 if sys.platform == 'win32' else 2.0 + + class StatusResponseGenerator: """Generator for actor run status responses to simulate changing status over time.""" @@ -244,7 +250,7 @@ def test_redirected_logs_sync( with caplog.at_level(logging.DEBUG, logger=logger_name), streamed_log: # Do stuff while the log from the other Actor is being redirected to the logs. - time.sleep(2) + time.sleep(_get_windows_sleep_time()) # Ensure logs are propagated assert {(record.message, record.levelno) for record in caplog.records} == set( @@ -421,7 +427,7 @@ def test_redirect_status_message_sync( status_message_redirector = run_client.get_status_message_watcher(check_period=timedelta(seconds=0)) with caplog.at_level(logging.DEBUG, logger=logger_name), status_message_redirector: # Do stuff while the status from the other Actor is being redirected to the logs. - time.sleep(3) + time.sleep(_get_windows_sleep_time()) assert caplog.records[0].message == 'Status: RUNNING, Message: Initial message' assert caplog.records[1].message == 'Status: RUNNING, Message: Another message' From 1b8358465a2b51fe1f3ca3fabe477f74140c2892 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Wed, 6 Aug 2025 01:34:39 +0000 Subject: [PATCH 04/15] wincheck --- tests/unit/test_logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 97694b54..42df378d 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -71,7 +71,7 @@ def _get_windows_sleep_time() -> float: """Get adjusted sleep time for Windows systems.""" - return 5.0 if sys.platform == 'win32' else 2.0 + return 10.0 if sys.platform == 'win32' else 2.0 class StatusResponseGenerator: From 09dabbfeb9701abee644d312d324b3dba3bc1309 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Wed, 6 Aug 2025 01:53:16 +0000 Subject: [PATCH 05/15] wincheck --- tests/unit/test_logging.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 42df378d..0f5b7dd3 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -69,11 +69,6 @@ ) -def _get_windows_sleep_time() -> float: - """Get adjusted sleep time for Windows systems.""" - return 10.0 if sys.platform == 'win32' else 2.0 - - class StatusResponseGenerator: """Generator for actor run status responses to simulate changing status over time.""" @@ -123,7 +118,7 @@ def _streaming_log_handler(_request: Request) -> Response: def generate_logs() -> Iterator[bytes]: for chunk in _MOCKED_ACTOR_LOGS: yield chunk - time.sleep(0.05) + time.sleep(0.01) total_size = sum(len(chunk) for chunk in _MOCKED_ACTOR_LOGS) @@ -211,7 +206,7 @@ async def test_redirected_logs_async( with caplog.at_level(logging.DEBUG, logger=logger_name): async with streamed_log: # Do stuff while the log from the other Actor is being redirected to the logs. - await asyncio.sleep(2) + await asyncio.sleep(4.0 if sys.platform == 'win32' else 2.0) # Ensure logs are propagated assert {(record.message, record.levelno) for record in caplog.records} == set( @@ -250,7 +245,7 @@ def test_redirected_logs_sync( with caplog.at_level(logging.DEBUG, logger=logger_name), streamed_log: # Do stuff while the log from the other Actor is being redirected to the logs. - time.sleep(_get_windows_sleep_time()) + time.sleep(4.0 if sys.platform == 'win32' else 2.0) # Ensure logs are propagated assert {(record.message, record.levelno) for record in caplog.records} == set( @@ -427,7 +422,7 @@ def test_redirect_status_message_sync( status_message_redirector = run_client.get_status_message_watcher(check_period=timedelta(seconds=0)) with caplog.at_level(logging.DEBUG, logger=logger_name), status_message_redirector: # Do stuff while the status from the other Actor is being redirected to the logs. - time.sleep(_get_windows_sleep_time()) + time.sleep(3) assert caplog.records[0].message == 'Status: RUNNING, Message: Initial message' assert caplog.records[1].message == 'Status: RUNNING, Message: Another message' From 3f15d023b27d2873c4b6a6f538d514879ef6550a Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Wed, 6 Aug 2025 14:17:40 +0000 Subject: [PATCH 06/15] without localhost --- tests/unit/conftest.py | 10 ++++++---- tests/unit/test_logging.py | 11 ++++------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index aea69820..747a99bd 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -10,7 +10,7 @@ def make_httpserver() -> Iterable[HTTPServer]: werkzeug_logger = getLogger('werkzeug') werkzeug_logger.disabled = True - server = HTTPServer(threaded=True) + server = HTTPServer(threaded=True, host='127.0.0.1') server.start() yield server server.clear() # type: ignore[no-untyped-call] @@ -18,9 +18,11 @@ def make_httpserver() -> Iterable[HTTPServer]: server.stop() # type: ignore[no-untyped-call] -@pytest.fixture(scope='session') -def httpserver(make_httpserver: HTTPServer) -> HTTPServer: - return make_httpserver +@pytest.fixture +def httpserver(make_httpserver: HTTPServer) -> Iterable[HTTPServer]: + server = make_httpserver + yield server + server.clear() # type: ignore[no-untyped-call] @pytest.fixture diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 0f5b7dd3..c518935b 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -3,7 +3,6 @@ import asyncio import json import logging -import sys import time from datetime import datetime, timedelta from typing import TYPE_CHECKING @@ -133,8 +132,6 @@ def generate_logs() -> Iterator[bytes]: @pytest.fixture def mock_api(httpserver: HTTPServer) -> None: """Set up HTTP server with mocked API endpoints.""" - httpserver.clear() # type: ignore[no-untyped-call] - status_generator = StatusResponseGenerator() # Add actor run status endpoint @@ -206,7 +203,7 @@ async def test_redirected_logs_async( with caplog.at_level(logging.DEBUG, logger=logger_name): async with streamed_log: # Do stuff while the log from the other Actor is being redirected to the logs. - await asyncio.sleep(4.0 if sys.platform == 'win32' else 2.0) + await asyncio.sleep(1) # Ensure logs are propagated assert {(record.message, record.levelno) for record in caplog.records} == set( @@ -245,7 +242,7 @@ def test_redirected_logs_sync( with caplog.at_level(logging.DEBUG, logger=logger_name), streamed_log: # Do stuff while the log from the other Actor is being redirected to the logs. - time.sleep(4.0 if sys.platform == 'win32' else 2.0) + time.sleep(1) # Ensure logs are propagated assert {(record.message, record.levelno) for record in caplog.records} == set( @@ -398,7 +395,7 @@ async def test_redirect_status_message_async( with caplog.at_level(logging.DEBUG, logger=logger_name): async with status_message_redirector: # Do stuff while the status from the other Actor is being redirected to the logs. - await asyncio.sleep(3) + await asyncio.sleep(1) assert caplog.records[0].message == 'Status: RUNNING, Message: Initial message' assert caplog.records[1].message == 'Status: RUNNING, Message: Another message' @@ -422,7 +419,7 @@ def test_redirect_status_message_sync( status_message_redirector = run_client.get_status_message_watcher(check_period=timedelta(seconds=0)) with caplog.at_level(logging.DEBUG, logger=logger_name), status_message_redirector: # Do stuff while the status from the other Actor is being redirected to the logs. - time.sleep(3) + time.sleep(1) assert caplog.records[0].message == 'Status: RUNNING, Message: Initial message' assert caplog.records[1].message == 'Status: RUNNING, Message: Another message' From bfdf114ed9dc4527599aca93be0995faf0b533b9 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Wed, 6 Aug 2025 20:19:28 +0000 Subject: [PATCH 07/15] add werkzeug in dev dependency --- pyproject.toml | 1 + uv.lock | 8 +++++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5309aa78..8ae54b6a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,7 @@ dev = [ "ruff~=0.12.0", "setuptools", # setuptools are used by pytest but not explicitly required "types-colorama~=0.4.15.20240106", + "werkzeug~=3.0.0", # Werkzeug is used by pytest-httpserver ] [tool.hatch.build.targets.wheel] diff --git a/uv.lock b/uv.lock index f0e88fa6..cddc278d 100644 --- a/uv.lock +++ b/uv.lock @@ -50,6 +50,7 @@ dev = [ { name = "ruff" }, { name = "setuptools" }, { name = "types-colorama" }, + { name = "werkzeug" }, ] [package.metadata] @@ -78,6 +79,7 @@ dev = [ { name = "ruff", specifier = "~=0.12.0" }, { name = "setuptools" }, { name = "types-colorama", specifier = "~=0.4.15.20240106" }, + { name = "werkzeug", specifier = "~=3.0.0" }, ] [[package]] @@ -1218,14 +1220,14 @@ wheels = [ [[package]] name = "werkzeug" -version = "3.1.3" +version = "3.0.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925, upload-time = "2024-11-08T15:52:18.093Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/f9/0ba83eaa0df9b9e9d1efeb2ea351d0677c37d41ee5d0f91e98423c7281c9/werkzeug-3.0.6.tar.gz", hash = "sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d", size = 805170, upload-time = "2024-10-25T18:52:31.688Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498, upload-time = "2024-11-08T15:52:16.132Z" }, + { url = "https://files.pythonhosted.org/packages/6c/69/05837f91dfe42109203ffa3e488214ff86a6d68b2ed6c167da6cdc42349b/werkzeug-3.0.6-py3-none-any.whl", hash = "sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17", size = 227979, upload-time = "2024-10-25T18:52:30.129Z" }, ] [[package]] From 202e79ed4d929a2b2bb1745a55e578baf9f13a7b Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Thu, 7 Aug 2025 01:09:20 +0000 Subject: [PATCH 08/15] replace httpx with impit --- docs/02_concepts/09_streaming.mdx | 2 +- docs/02_concepts/code/01_async_support.py | 4 +- docs/02_concepts/code/09_streaming_async.py | 4 +- docs/02_concepts/code/09_streaming_sync.py | 4 +- pyproject.toml | 1 + src/apify_client/_errors.py | 19 +-- src/apify_client/_http_client.py | 96 +++++++------- .../clients/base/actor_job_base_client.py | 9 +- .../clients/base/resource_client.py | 10 +- .../base/resource_collection_client.py | 13 +- .../clients/resource_clients/actor.py | 13 +- .../clients/resource_clients/build.py | 5 +- .../clients/resource_clients/dataset.py | 15 +-- .../resource_clients/key_value_store.py | 5 +- .../clients/resource_clients/log.py | 6 +- .../clients/resource_clients/request_queue.py | 41 +++--- .../clients/resource_clients/run.py | 13 +- .../clients/resource_clients/schedule.py | 5 +- .../clients/resource_clients/task.py | 13 +- .../clients/resource_clients/user.py | 9 +- .../clients/resource_clients/webhook.py | 5 +- tests/integration/conftest.py | 2 +- tests/unit/test_client_timeouts.py | 119 ++++++++++-------- tests/unit/test_logging.py | 5 +- uv.lock | 51 ++++++++ 25 files changed, 276 insertions(+), 193 deletions(-) diff --git a/docs/02_concepts/09_streaming.mdx b/docs/02_concepts/09_streaming.mdx index b365e34a..c62da01e 100644 --- a/docs/02_concepts/09_streaming.mdx +++ b/docs/02_concepts/09_streaming.mdx @@ -18,7 +18,7 @@ Supported streaming methods: - [`KeyValueStoreClient.stream_record`](/reference/class/KeyValueStoreClient#stream_record) - Stream key-value store records as raw data. - [`LogClient.stream`](/reference/class/LogClient#stream) - Stream logs in real time. -These methods return a raw, context-managed `httpx.Response` object. The response must be consumed within a with block to ensure that the connection is closed automatically, preventing memory leaks or unclosed connections. +These methods return a raw, context-managed `impit.Response` object. The response must be consumed within a with block to ensure that the connection is closed automatically, preventing memory leaks or unclosed connections. The following example demonstrates how to stream the logs of an Actor run incrementally: diff --git a/docs/02_concepts/code/01_async_support.py b/docs/02_concepts/code/01_async_support.py index 28186519..22cc390e 100644 --- a/docs/02_concepts/code/01_async_support.py +++ b/docs/02_concepts/code/01_async_support.py @@ -17,8 +17,8 @@ async def main() -> None: # Stream the logs async with log_client.stream() as async_log_stream: if async_log_stream: - async for line in async_log_stream.aiter_lines(): - print(line) + async for bytes_chunk in async_log_stream.aiter_bytes(): + print(bytes_chunk) if __name__ == '__main__': diff --git a/docs/02_concepts/code/09_streaming_async.py b/docs/02_concepts/code/09_streaming_async.py index 6ff097a8..5459784e 100644 --- a/docs/02_concepts/code/09_streaming_async.py +++ b/docs/02_concepts/code/09_streaming_async.py @@ -10,5 +10,5 @@ async def main() -> None: async with log_client.stream() as log_stream: if log_stream: - for line in log_stream.iter_lines(): - print(line) + async for bytes_chunk in log_stream.aiter_bytes(): + print(bytes_chunk) diff --git a/docs/02_concepts/code/09_streaming_sync.py b/docs/02_concepts/code/09_streaming_sync.py index 4eb0093d..e7617ab3 100644 --- a/docs/02_concepts/code/09_streaming_sync.py +++ b/docs/02_concepts/code/09_streaming_sync.py @@ -10,5 +10,5 @@ def main() -> None: with log_client.stream() as log_stream: if log_stream: - for line in log_stream.iter_lines(): - print(line) + for bytes_chunk in log_stream.iter_bytes(): + print(bytes_chunk) diff --git a/pyproject.toml b/pyproject.toml index 8ae54b6a..57ade99a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,6 +28,7 @@ dependencies = [ "apify-shared<2.0.0", "colorama>=0.4.0", "httpx>=0.25", + "impit>=0.5.1", "more_itertools>=10.0.0", ] diff --git a/src/apify_client/_errors.py b/src/apify_client/_errors.py index 87a91f2b..6b3d8db2 100644 --- a/src/apify_client/_errors.py +++ b/src/apify_client/_errors.py @@ -1,6 +1,8 @@ from __future__ import annotations -import httpx +import json as jsonlib + +import impit from apify_shared.utils import ignore_docs @@ -17,12 +19,13 @@ class ApifyApiError(ApifyClientError): """ @ignore_docs - def __init__(self, response: httpx.Response, attempt: int) -> None: + def __init__(self, response: impit.Response, attempt: int, method: str = 'GET') -> None: """Initialize a new instance. Args: response: The response to the failed API call. attempt: Which attempt was the request that failed. + method: The HTTP method used for the request. """ self.message: str | None = None self.type: str | None = None @@ -30,7 +33,7 @@ def __init__(self, response: httpx.Response, attempt: int) -> None: self.message = f'Unexpected error: {response.text}' try: - response_data = response.json() + response_data = jsonlib.loads(response.text) if 'error' in response_data: self.message = response_data['error']['message'] self.type = response_data['error']['type'] @@ -44,7 +47,7 @@ def __init__(self, response: httpx.Response, attempt: int) -> None: self.name = 'ApifyApiError' self.status_code = response.status_code self.attempt = attempt - self.http_method = response.request.method + self.http_method = method # TODO: self.client_method # noqa: TD003 # TODO: self.original_stack # noqa: TD003 @@ -61,7 +64,7 @@ class InvalidResponseBodyError(ApifyClientError): """ @ignore_docs - def __init__(self, response: httpx.Response) -> None: + def __init__(self, response: impit.Response) -> None: """Initialize a new instance. Args: @@ -80,8 +83,8 @@ def is_retryable_error(exc: Exception) -> bool: exc, ( InvalidResponseBodyError, - httpx.NetworkError, - httpx.TimeoutException, - httpx.RemoteProtocolError, + impit.NetworkError, + impit.TimeoutException, + impit.RemoteProtocolError, ), ) diff --git a/src/apify_client/_http_client.py b/src/apify_client/_http_client.py index 4ba304af..42e6577c 100644 --- a/src/apify_client/_http_client.py +++ b/src/apify_client/_http_client.py @@ -8,8 +8,9 @@ from http import HTTPStatus from importlib import metadata from typing import TYPE_CHECKING, Any +from urllib.parse import urlencode -import httpx +import impit from apify_shared.utils import ignore_docs, is_content_type_json, is_content_type_text, is_content_type_xml from apify_client._errors import ApifyApiError, InvalidResponseBodyError, is_retryable_error @@ -59,13 +60,13 @@ def __init__( if token is not None: headers['Authorization'] = f'Bearer {token}' - self.httpx_client = httpx.Client(headers=headers, follow_redirects=True, timeout=timeout_secs) - self.httpx_async_client = httpx.AsyncClient(headers=headers, follow_redirects=True, timeout=timeout_secs) + self.impit_client = impit.Client(headers=headers, follow_redirects=True, timeout=timeout_secs) + self.impit_async_client = impit.AsyncClient(headers=headers, follow_redirects=True, timeout=timeout_secs) self.stats = stats or Statistics() @staticmethod - def _maybe_parse_response(response: httpx.Response) -> Any: + def _maybe_parse_response(response: impit.Response) -> Any: if response.status_code == HTTPStatus.NO_CONTENT: return None @@ -75,7 +76,7 @@ def _maybe_parse_response(response: httpx.Response) -> Any: try: if is_content_type_json(content_type): - return response.json() + return jsonlib.loads(response.text) elif is_content_type_xml(content_type) or is_content_type_text(content_type): # noqa: RET505 return response.text else: @@ -131,6 +132,21 @@ def _prepare_request_call( data, ) + def _build_url_with_params(self, url: str, params: dict | None = None) -> str: + if not params: + return url + + param_pairs: list[tuple[str, str]] = [] + for key, value in params.items(): + if isinstance(value, list): + param_pairs.extend((key, str(v)) for v in value) + else: + param_pairs.append((key, str(value))) + + query_string = urlencode(param_pairs) + + return f'{url}?{query_string}' + class HTTPClient(_BaseHTTPClient): def call( @@ -145,7 +161,7 @@ def call( stream: bool | None = None, parse_response: bool | None = True, timeout_secs: int | None = None, - ) -> httpx.Response: + ) -> impit.Response: log_context.method.set(method) log_context.url.set(url) @@ -156,41 +172,34 @@ def call( headers, params, content = self._prepare_request_call(headers, params, data, json) - httpx_client = self.httpx_client + impit_client = self.impit_client - def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response: + def _make_request(stop_retrying: Callable, attempt: int) -> impit.Response: log_context.attempt.set(attempt) logger.debug('Sending request') self.stats.requests += 1 try: - request = httpx_client.build_request( + # Increase timeout with each attempt. Max timeout is bounded by the client timeout. + timeout = min(self.timeout_secs, (timeout_secs or self.timeout_secs) * 2 ** (attempt - 1)) + + url_with_params = self._build_url_with_params(url, params) + + response = impit_client.request( method=method, - url=url, + url=url_with_params, headers=headers, - params=params, content=content, - ) - - # Increase timeout with each attempt. Max timeout is bounded by the client timeout. - timeout = min(self.timeout_secs, (timeout_secs or self.timeout_secs) * 2 ** (attempt - 1)) - request.extensions['timeout'] = { - 'connect': timeout, - 'pool': timeout, - 'read': timeout, - 'write': timeout, - } - - response = httpx_client.send( - request=request, + timeout=timeout, stream=stream or False, ) # If response status is < 300, the request was successful, and we can return the result if response.status_code < 300: # noqa: PLR2004 logger.debug('Request successful', extra={'status_code': response.status_code}) - if not stream: + # TODODO Impit does not support setting custom attributes on the response object, + if not stream and response.content == b'A unique condition for checking types. ABRACADABRA': _maybe_parsed_body = ( self._maybe_parse_response(response) if parse_response else response.content ) @@ -214,7 +223,7 @@ def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response: if response.status_code < 500 and response.status_code != HTTPStatus.TOO_MANY_REQUESTS: # noqa: PLR2004 logger.debug('Status code is not retryable', extra={'status_code': response.status_code}) stop_retrying() - raise ApifyApiError(response, attempt) + raise ApifyApiError(response, attempt, method=method) return retry_with_exp_backoff( _make_request, @@ -238,7 +247,7 @@ async def call( stream: bool | None = None, parse_response: bool | None = True, timeout_secs: int | None = None, - ) -> httpx.Response: + ) -> impit.Response: log_context.method.set(method) log_context.url.set(url) @@ -249,38 +258,31 @@ async def call( headers, params, content = self._prepare_request_call(headers, params, data, json) - httpx_async_client = self.httpx_async_client + impit_async_client = self.impit_async_client - async def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response: + async def _make_request(stop_retrying: Callable, attempt: int) -> impit.Response: log_context.attempt.set(attempt) logger.debug('Sending request') try: - request = httpx_async_client.build_request( + # Increase timeout with each attempt. Max timeout is bounded by the client timeout. + timeout = min(self.timeout_secs, (timeout_secs or self.timeout_secs) * 2 ** (attempt - 1)) + + url_with_params = self._build_url_with_params(url, params) + + response = await impit_async_client.request( method=method, - url=url, + url=url_with_params, headers=headers, - params=params, content=content, - ) - - # Increase timeout with each attempt. Max timeout is bounded by the client timeout. - timeout = min(self.timeout_secs, (timeout_secs or self.timeout_secs) * 2 ** (attempt - 1)) - request.extensions['timeout'] = { - 'connect': timeout, - 'pool': timeout, - 'read': timeout, - 'write': timeout, - } - - response = await httpx_async_client.send( - request=request, + timeout=timeout, stream=stream or False, ) # If response status is < 300, the request was successful, and we can return the result if response.status_code < 300: # noqa: PLR2004 logger.debug('Request successful', extra={'status_code': response.status_code}) - if not stream: + # TODODO Impit does not support setting custom attributes on the response object, + if not stream and response.content == b'A unique condition for checking types. ABRACADABRA': _maybe_parsed_body = ( self._maybe_parse_response(response) if parse_response else response.content ) @@ -304,7 +306,7 @@ async def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response if response.status_code < 500 and response.status_code != HTTPStatus.TOO_MANY_REQUESTS: # noqa: PLR2004 logger.debug('Status code is not retryable', extra={'status_code': response.status_code}) stop_retrying() - raise ApifyApiError(response, attempt) + raise ApifyApiError(response, attempt, method=method) return await retry_with_exp_backoff_async( _make_request, diff --git a/src/apify_client/clients/base/actor_job_base_client.py b/src/apify_client/clients/base/actor_job_base_client.py index 2b63c834..13c23c40 100644 --- a/src/apify_client/clients/base/actor_job_base_client.py +++ b/src/apify_client/clients/base/actor_job_base_client.py @@ -1,6 +1,7 @@ from __future__ import annotations import asyncio +import json as jsonlib import math import time from datetime import datetime, timezone @@ -39,7 +40,7 @@ def _wait_for_finish(self, wait_secs: int | None = None) -> dict | None: method='GET', params=self._params(waitForFinish=wait_for_finish), ) - job = parse_date_fields(pluck_data(response.json())) + job = parse_date_fields(pluck_data(jsonlib.loads(response.text))) seconds_elapsed = math.floor((datetime.now(timezone.utc) - started_at).total_seconds()) if ActorJobStatus(job['status']).is_terminal or ( @@ -70,7 +71,7 @@ def _abort(self, *, gracefully: bool | None = None) -> dict: method='POST', params=self._params(gracefully=gracefully), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) @ignore_docs @@ -94,7 +95,7 @@ async def _wait_for_finish(self, wait_secs: int | None = None) -> dict | None: method='GET', params=self._params(waitForFinish=wait_for_finish), ) - job = parse_date_fields(pluck_data(response.json())) + job = parse_date_fields(pluck_data(jsonlib.loads(response.text))) seconds_elapsed = math.floor((datetime.now(timezone.utc) - started_at).total_seconds()) if ActorJobStatus(job['status']).is_terminal or ( @@ -125,4 +126,4 @@ async def _abort(self, *, gracefully: bool | None = None) -> dict: method='POST', params=self._params(gracefully=gracefully), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) diff --git a/src/apify_client/clients/base/resource_client.py b/src/apify_client/clients/base/resource_client.py index 37e2d3b0..8a8ef279 100644 --- a/src/apify_client/clients/base/resource_client.py +++ b/src/apify_client/clients/base/resource_client.py @@ -1,5 +1,7 @@ from __future__ import annotations +import json as jsonlib + from apify_shared.utils import ignore_docs, parse_date_fields from apify_client._errors import ApifyApiError @@ -20,7 +22,7 @@ def _get(self, timeout_secs: int | None = None) -> dict | None: timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -36,7 +38,7 @@ def _update(self, updated_fields: dict, timeout_secs: int | None = None) -> dict timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def _delete(self, timeout_secs: int | None = None) -> None: try: @@ -64,7 +66,7 @@ async def _get(self, timeout_secs: int | None = None) -> dict | None: timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -80,7 +82,7 @@ async def _update(self, updated_fields: dict, timeout_secs: int | None = None) - timeout_secs=timeout_secs, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def _delete(self, timeout_secs: int | None = None) -> None: try: diff --git a/src/apify_client/clients/base/resource_collection_client.py b/src/apify_client/clients/base/resource_collection_client.py index e4984fa9..f007c596 100644 --- a/src/apify_client/clients/base/resource_collection_client.py +++ b/src/apify_client/clients/base/resource_collection_client.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import Any, Generic, TypeVar from apify_shared.utils import ignore_docs, parse_date_fields @@ -53,7 +54,7 @@ def _list(self, **kwargs: Any) -> ListPage: params=self._params(**kwargs), ) - return ListPage(parse_date_fields(pluck_data(response.json()))) + return ListPage(parse_date_fields(pluck_data(jsonlib.loads(response.text)))) def _create(self, resource: dict) -> dict: response = self.http_client.call( @@ -63,7 +64,7 @@ def _create(self, resource: dict) -> dict: json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def _get_or_create(self, name: str | None = None, resource: dict | None = None) -> dict: response = self.http_client.call( @@ -73,7 +74,7 @@ def _get_or_create(self, name: str | None = None, resource: dict | None = None) json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) @ignore_docs @@ -87,7 +88,7 @@ async def _list(self, **kwargs: Any) -> ListPage: params=self._params(**kwargs), ) - return ListPage(parse_date_fields(pluck_data(response.json()))) + return ListPage(parse_date_fields(pluck_data(jsonlib.loads(response.text)))) async def _create(self, resource: dict) -> dict: response = await self.http_client.call( @@ -97,7 +98,7 @@ async def _create(self, resource: dict) -> dict: json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def _get_or_create( self, @@ -111,4 +112,4 @@ async def _get_or_create( json=resource, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) diff --git a/src/apify_client/clients/resource_clients/actor.py b/src/apify_client/clients/resource_clients/actor.py index a2473699..9b64795b 100644 --- a/src/apify_client/clients/resource_clients/actor.py +++ b/src/apify_client/clients/resource_clients/actor.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import TYPE_CHECKING, Any, Literal from apify_shared.utils import ( @@ -276,7 +277,7 @@ def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def call( self, @@ -386,7 +387,7 @@ def build( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def builds(self) -> BuildCollectionClient: """Retrieve a client for the builds of this Actor.""" @@ -417,7 +418,7 @@ async def default_build( ) response = self.http_client.call(url=self._url('builds/default'), method='GET', params=request_params) - data = pluck_data(response.json()) + data = pluck_data(jsonlib.loads(response.text)) return BuildClient( base_url=self.base_url, @@ -681,7 +682,7 @@ async def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def call( self, @@ -795,7 +796,7 @@ async def build( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def builds(self) -> BuildCollectionClientAsync: """Retrieve a client for the builds of this Actor.""" @@ -830,7 +831,7 @@ async def default_build( method='GET', params=request_params, ) - data = pluck_data(response.json()) + data = pluck_data(jsonlib.loads(response.text)) return BuildClientAsync( base_url=self.base_url, diff --git a/src/apify_client/clients/resource_clients/build.py b/src/apify_client/clients/resource_clients/build.py index bbc91f59..b427fcfa 100644 --- a/src/apify_client/clients/resource_clients/build.py +++ b/src/apify_client/clients/resource_clients/build.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import Any from apify_shared.utils import ignore_docs @@ -56,7 +57,7 @@ def get_open_api_definition(self) -> dict | None: method='GET', ) - response_data: dict = response.json() + response_data: dict = jsonlib.loads(response.text) return response_data @@ -133,7 +134,7 @@ async def get_open_api_definition(self) -> dict | None: method='GET', ) - response_data: dict = response.json() + response_data: dict = jsonlib.loads(response.text) return response_data diff --git a/src/apify_client/clients/resource_clients/dataset.py b/src/apify_client/clients/resource_clients/dataset.py index 368eef0b..19e98bda 100644 --- a/src/apify_client/clients/resource_clients/dataset.py +++ b/src/apify_client/clients/resource_clients/dataset.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib import warnings from contextlib import asynccontextmanager, contextmanager from typing import TYPE_CHECKING, Any @@ -14,7 +15,7 @@ if TYPE_CHECKING: from collections.abc import AsyncIterator, Iterator - import httpx + import impit from apify_shared.consts import StorageGeneralAccess from apify_shared.types import JSONSerializable @@ -137,7 +138,7 @@ def list_items( params=request_params, ) - data = response.json() + data = jsonlib.loads(response.text) return ListPage( { @@ -446,7 +447,7 @@ def stream_items( skip_hidden: bool | None = None, xml_root: str | None = None, xml_row: str | None = None, - ) -> Iterator[httpx.Response]: + ) -> Iterator[impit.Response]: """Retrieve the items in the dataset as a stream. https://docs.apify.com/api/v2#/reference/datasets/item-collection/get-items @@ -565,7 +566,7 @@ def get_statistics(self) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return pluck_data(response.json()) + return pluck_data(jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -687,7 +688,7 @@ async def list_items( params=request_params, ) - data = response.json() + data = jsonlib.loads(response.text) return ListPage( { @@ -903,7 +904,7 @@ async def stream_items( skip_hidden: bool | None = None, xml_root: str | None = None, xml_row: str | None = None, - ) -> AsyncIterator[httpx.Response]: + ) -> AsyncIterator[impit.Response]: """Retrieve the items in the dataset as a stream. https://docs.apify.com/api/v2#/reference/datasets/item-collection/get-items @@ -1022,7 +1023,7 @@ async def get_statistics(self) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return pluck_data(response.json()) + return pluck_data(jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/key_value_store.py b/src/apify_client/clients/resource_clients/key_value_store.py index 7100d475..ccc254f8 100644 --- a/src/apify_client/clients/resource_clients/key_value_store.py +++ b/src/apify_client/clients/resource_clients/key_value_store.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib import warnings from contextlib import asynccontextmanager, contextmanager from http import HTTPStatus @@ -99,7 +100,7 @@ def list_keys( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def get_record(self, key: str, *, as_bytes: bool = False, as_file: bool = False) -> dict | None: """Retrieve the given record from the key-value store. @@ -367,7 +368,7 @@ async def list_keys( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def get_record(self, key: str) -> dict | None: """Retrieve the given record from the key-value store. diff --git a/src/apify_client/clients/resource_clients/log.py b/src/apify_client/clients/resource_clients/log.py index bca8b07a..e007f667 100644 --- a/src/apify_client/clients/resource_clients/log.py +++ b/src/apify_client/clients/resource_clients/log.py @@ -21,7 +21,7 @@ from collections.abc import AsyncIterator, Iterator from types import TracebackType - import httpx + import impit from typing_extensions import Self from apify_client.clients import RunClient, RunClientAsync @@ -87,7 +87,7 @@ def get_as_bytes(self, *, raw: bool = False) -> bytes | None: return None @contextmanager - def stream(self, *, raw: bool = False) -> Iterator[httpx.Response | None]: + def stream(self, *, raw: bool = False) -> Iterator[impit.Response | None]: """Retrieve the log as a stream. https://docs.apify.com/api/v2#/reference/logs/log/get-log @@ -177,7 +177,7 @@ async def get_as_bytes(self, *, raw: bool = False) -> bytes | None: return None @asynccontextmanager - async def stream(self, *, raw: bool = False) -> AsyncIterator[httpx.Response | None]: + async def stream(self, *, raw: bool = False) -> AsyncIterator[impit.Response | None]: """Retrieve the log as a stream. https://docs.apify.com/api/v2#/reference/logs/log/get-log diff --git a/src/apify_client/clients/resource_clients/request_queue.py b/src/apify_client/clients/resource_clients/request_queue.py index 7470c84f..d6716b33 100644 --- a/src/apify_client/clients/resource_clients/request_queue.py +++ b/src/apify_client/clients/resource_clients/request_queue.py @@ -1,6 +1,7 @@ from __future__ import annotations import asyncio +import json as jsonlib import logging import math from collections.abc import Iterable @@ -116,7 +117,7 @@ def list_head(self, *, limit: int | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dict: """Retrieve a given number of unlocked requests from the beginning of the queue and lock them for a given time. @@ -139,7 +140,7 @@ def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dic timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: """Add a request to the queue. @@ -163,7 +164,7 @@ def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def get_request(self, request_id: str) -> dict | None: """Retrieve a request from the queue. @@ -183,7 +184,7 @@ def get_request(self, request_id: str) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -214,7 +215,7 @@ def update_request(self, request: dict, *, forefront: bool | None = None) -> dic timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def delete_request(self, request_id: str) -> None: """Delete a request from the queue. @@ -260,7 +261,7 @@ def prolong_request_lock( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def delete_request_lock(self, request_id: str, *, forefront: bool | None = None) -> None: """Delete the lock on a request. @@ -349,7 +350,7 @@ def batch_add_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - response_parsed = parse_date_fields(pluck_data(response.json())) + response_parsed = parse_date_fields(pluck_data(jsonlib.loads(response.text))) processed_requests.extend(response_parsed.get('processedRequests', [])) unprocessed_requests.extend(response_parsed.get('unprocessedRequests', [])) @@ -376,7 +377,7 @@ def batch_delete_requests(self, requests: list[dict]) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def list_requests( self, @@ -401,7 +402,7 @@ def list_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def unlock_requests(self: RequestQueueClient) -> dict: """Unlock all requests in the queue, which were locked by the same clientKey or from the same Actor run. @@ -419,7 +420,7 @@ def unlock_requests(self: RequestQueueClient) -> dict: params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) class RequestQueueClientAsync(ResourceClientAsync): @@ -497,7 +498,7 @@ async def list_head(self, *, limit: int | None = None) -> dict: timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) -> dict: """Retrieve a given number of unlocked requests from the beginning of the queue and lock them for a given time. @@ -520,7 +521,7 @@ async def list_and_lock_head(self, *, lock_secs: int, limit: int | None = None) timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def add_request(self, request: dict, *, forefront: bool | None = None) -> dict: """Add a request to the queue. @@ -544,7 +545,7 @@ async def add_request(self, request: dict, *, forefront: bool | None = None) -> timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def get_request(self, request_id: str) -> dict | None: """Retrieve a request from the queue. @@ -564,7 +565,7 @@ async def get_request(self, request_id: str) -> dict | None: params=self._params(), timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -595,7 +596,7 @@ async def update_request(self, request: dict, *, forefront: bool | None = None) timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def delete_request(self, request_id: str) -> None: """Delete a request from the queue. @@ -639,7 +640,7 @@ async def prolong_request_lock( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def delete_request_lock( self, @@ -695,7 +696,7 @@ async def _batch_add_requests_worker( timeout_secs=_MEDIUM_TIMEOUT, ) - response_parsed = parse_date_fields(pluck_data(response.json())) + response_parsed = parse_date_fields(pluck_data(jsonlib.loads(response.text))) processed_requests.extend(response_parsed.get('processedRequests', [])) unprocessed_requests.extend(response_parsed.get('unprocessedRequests', [])) @@ -805,7 +806,7 @@ async def batch_delete_requests(self, requests: list[dict]) -> dict: json=requests, timeout_secs=_SMALL_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def list_requests( self, @@ -830,7 +831,7 @@ async def list_requests( timeout_secs=_MEDIUM_TIMEOUT, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def unlock_requests(self: RequestQueueClientAsync) -> dict: """Unlock all requests in the queue, which were locked by the same clientKey or from the same Actor run. @@ -848,4 +849,4 @@ async def unlock_requests(self: RequestQueueClientAsync) -> dict: params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) diff --git a/src/apify_client/clients/resource_clients/run.py b/src/apify_client/clients/resource_clients/run.py index 2b51aaf7..90158763 100644 --- a/src/apify_client/clients/resource_clients/run.py +++ b/src/apify_client/clients/resource_clients/run.py @@ -1,6 +1,7 @@ from __future__ import annotations import json +import json as jsonlib import logging import random import string @@ -148,7 +149,7 @@ def metamorph( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def resurrect( self, @@ -195,7 +196,7 @@ def resurrect( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def reboot(self) -> dict: """Reboot an Actor run. Only runs that are running, i.e. runs with status RUNNING can be rebooted. @@ -209,7 +210,7 @@ def reboot(self) -> dict: url=self._url('reboot'), method='POST', ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def dataset(self) -> DatasetClient: """Get the client for the default dataset of the Actor run. @@ -470,7 +471,7 @@ async def metamorph( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def resurrect( self, @@ -517,7 +518,7 @@ async def resurrect( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def reboot(self) -> dict: """Reboot an Actor run. Only runs that are running, i.e. runs with status RUNNING can be rebooted. @@ -531,7 +532,7 @@ async def reboot(self) -> dict: url=self._url('reboot'), method='POST', ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def dataset(self) -> DatasetClientAsync: """Get the client for the default dataset of the Actor run. diff --git a/src/apify_client/clients/resource_clients/schedule.py b/src/apify_client/clients/resource_clients/schedule.py index 315c5b83..360e819e 100644 --- a/src/apify_client/clients/resource_clients/schedule.py +++ b/src/apify_client/clients/resource_clients/schedule.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import Any from apify_shared.utils import filter_out_none_values_recursively, ignore_docs @@ -115,7 +116,7 @@ def get_log(self) -> list | None: method='GET', params=self._params(), ) - return pluck_data_as_list(response.json()) + return pluck_data_as_list(jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -205,7 +206,7 @@ async def get_log(self) -> list | None: method='GET', params=self._params(), ) - return pluck_data_as_list(response.json()) + return pluck_data_as_list(jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/task.py b/src/apify_client/clients/resource_clients/task.py index e791f46b..7276650e 100644 --- a/src/apify_client/clients/resource_clients/task.py +++ b/src/apify_client/clients/resource_clients/task.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import TYPE_CHECKING, Any, cast from apify_shared.utils import ( @@ -201,7 +202,7 @@ def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) def call( self, @@ -264,7 +265,7 @@ def get_input(self) -> dict | None: method='GET', params=self._params(), ) - return cast('dict', response.json()) + return cast('dict', jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) return None @@ -283,7 +284,7 @@ def update_input(self, *, task_input: dict) -> dict: params=self._params(), json=task_input, ) - return cast('dict', response.json()) + return cast('dict', jsonlib.loads(response.text)) def runs(self) -> RunCollectionClient: """Retrieve a client for the runs of this task.""" @@ -458,7 +459,7 @@ async def start( params=request_params, ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) async def call( self, @@ -521,7 +522,7 @@ async def get_input(self) -> dict | None: method='GET', params=self._params(), ) - return cast('dict', response.json()) + return cast('dict', jsonlib.loads(response.text)) except ApifyApiError as exc: catch_not_found_or_throw(exc) return None @@ -540,7 +541,7 @@ async def update_input(self, *, task_input: dict) -> dict: params=self._params(), json=task_input, ) - return cast('dict', response.json()) + return cast('dict', jsonlib.loads(response.text)) def runs(self) -> RunCollectionClientAsync: """Retrieve a client for the runs of this task.""" diff --git a/src/apify_client/clients/resource_clients/user.py b/src/apify_client/clients/resource_clients/user.py index 71ab9998..87d7aeb2 100644 --- a/src/apify_client/clients/resource_clients/user.py +++ b/src/apify_client/clients/resource_clients/user.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import Any from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, parse_date_fields @@ -50,7 +51,7 @@ def monthly_usage(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -74,7 +75,7 @@ def limits(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -142,7 +143,7 @@ async def monthly_usage(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -166,7 +167,7 @@ async def limits(self) -> dict | None: method='GET', params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/webhook.py b/src/apify_client/clients/resource_clients/webhook.py index 87592a15..a74d0216 100644 --- a/src/apify_client/clients/resource_clients/webhook.py +++ b/src/apify_client/clients/resource_clients/webhook.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json as jsonlib from typing import TYPE_CHECKING, Any from apify_shared.utils import ( @@ -151,7 +152,7 @@ def test(self) -> dict | None: params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) @@ -262,7 +263,7 @@ async def test(self) -> dict | None: params=self._params(), ) - return parse_date_fields(pluck_data(response.json())) + return parse_date_fields(pluck_data(jsonlib.loads(response.text))) except ApifyApiError as exc: catch_not_found_or_throw(exc) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 5114a4b1..5768e87f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -21,7 +21,7 @@ def apify_client() -> ApifyClient: # This fixture can't be session-scoped, # because then you start getting `RuntimeError: Event loop is closed` errors, -# because `httpx.AsyncClient` in `ApifyClientAsync` tries to reuse the same event loop across requests, +# because `impit.AsyncClient` in `ApifyClientAsync` tries to reuse the same event loop across requests, # but `pytest-asyncio` closes the event loop after each test, # and uses a new one for the next test. @pytest.fixture diff --git a/tests/unit/test_client_timeouts.py b/tests/unit/test_client_timeouts.py index 3259c977..bc1efca2 100644 --- a/tests/unit/test_client_timeouts.py +++ b/tests/unit/test_client_timeouts.py @@ -1,13 +1,10 @@ from __future__ import annotations -import time -from functools import partial -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from unittest.mock import Mock import pytest -import respx -from werkzeug import Response as WerkzeugResponse +from impit import Response, TimeoutException from apify_client import ApifyClient from apify_client._http_client import HTTPClient, HTTPClientAsync @@ -17,16 +14,33 @@ from apify_client.clients.resource_clients import key_value_store as kvs if TYPE_CHECKING: - from httpx import Request, Response + from collections.abc import Iterator + from pytest_httpserver import HTTPServer - from werkzeug import Request as WerkzeugRequest class EndOfTestError(Exception): """Custom exception that is raised after the relevant part of the code is executed to stop the test.""" -async def test_dynamic_timeout_async_client(httpserver: HTTPServer) -> None: +@pytest.fixture +def patch_request(monkeypatch: pytest.MonkeyPatch) -> Iterator[list]: + timeouts = [] + + def mock_request(*_args: Any, **kwargs: Any) -> None: + timeouts.append(kwargs.get('timeout')) + raise EndOfTestError + + async def mock_request_async(*args: Any, **kwargs: Any) -> None: + return mock_request(*args, **kwargs) + + monkeypatch.setattr('impit.Client.request', mock_request) + monkeypatch.setattr('impit.AsyncClient.request', mock_request_async) + yield timeouts + monkeypatch.undo() + + +async def test_dynamic_timeout_async_client(monkeypatch: pytest.MonkeyPatch) -> None: """Tests timeout values for request with retriable errors. Values should increase with each attempt, starting from initial call value and bounded by the client timeout value. @@ -34,36 +48,37 @@ async def test_dynamic_timeout_async_client(httpserver: HTTPServer) -> None: should_raise_error = iter((True, True, True, False)) call_timeout = 1 client_timeout = 5 - expected_timeouts = iter((call_timeout, 2, 4, client_timeout)) + expected_timeouts = [call_timeout, 2, 4, client_timeout] retry_counter_mock = Mock() - def slow_handler(_request: WerkzeugRequest) -> WerkzeugResponse: - timeout = next(expected_timeouts) - should_raise = next(should_raise_error) - # Counter for retries - retry_counter_mock() + timeouts = [] + async def mock_request(*_args: Any, **kwargs: Any) -> Response: + timeouts.append(kwargs.get('timeout')) + retry_counter_mock() + should_raise = next(should_raise_error) if should_raise: - # We expect longer than the client is willing to wait. This will cause a timeout on the client side. - time.sleep(timeout + 0.02) + raise TimeoutException - return WerkzeugResponse('200 OK') + response = Response + response.status_code = 200 + return response # type: ignore[return-value] - httpserver.expect_request('/async_timeout', method='GET').respond_with_handler(slow_handler) + monkeypatch.setattr('impit.AsyncClient.request', mock_request) - server_url = str(httpserver.url_for('/async_timeout')) response = await HTTPClientAsync(timeout_secs=client_timeout).call( - method='GET', url=server_url, timeout_secs=call_timeout + method='GET', url='http://placeholder.url/async_timeout', timeout_secs=call_timeout ) # Check that the retry counter was called the expected number of times # (4 times: 3 retries + 1 final successful call) assert retry_counter_mock.call_count == 4 + assert timeouts == expected_timeouts # Check that the response is successful assert response.status_code == 200 -def test_dynamic_timeout_sync_client(httpserver: HTTPServer) -> None: +def test_dynamic_timeout_sync_client(monkeypatch: pytest.MonkeyPatch) -> None: """Tests timeout values for request with retriable errors. Values should increase with each attempt, starting from initial call value and bounded by the client timeout value. @@ -71,48 +86,36 @@ def test_dynamic_timeout_sync_client(httpserver: HTTPServer) -> None: should_raise_error = iter((True, True, True, False)) call_timeout = 1 client_timeout = 5 - expected_timeouts = iter((call_timeout, 2, 4, client_timeout)) + expected_timeouts = [call_timeout, 2, 4, client_timeout] retry_counter_mock = Mock() - def slow_handler(_request: WerkzeugRequest) -> WerkzeugResponse: - timeout = next(expected_timeouts) - should_raise = next(should_raise_error) - # Counter for retries - retry_counter_mock() + timeouts = [] + def mock_request(*_args: Any, **kwargs: Any) -> Response: + timeouts.append(kwargs.get('timeout')) + retry_counter_mock() + should_raise = next(should_raise_error) if should_raise: - # We expect longer than the client is willing to wait. This will cause a timeout on the client side. - time.sleep(timeout + 0.02) + raise TimeoutException - return WerkzeugResponse('200 OK') + response = Response + response.status_code = 200 + return response # type: ignore[return-value] - httpserver.expect_request('/sync_timeout', method='GET').respond_with_handler(slow_handler) + monkeypatch.setattr('impit.Client.request', mock_request) - server_url = str(httpserver.url_for('/sync_timeout')) - - response = HTTPClient(timeout_secs=client_timeout).call(method='GET', url=server_url, timeout_secs=call_timeout) + response = HTTPClient(timeout_secs=client_timeout).call( + method='GET', url='http://placeholder.url/sync_timeout', timeout_secs=call_timeout + ) # Check that the retry counter was called the expected number of times # (4 times: 3 retries + 1 final successful call) assert retry_counter_mock.call_count == 4 + assert timeouts == expected_timeouts # Check that the response is successful assert response.status_code == 200 -def assert_timeout(expected_timeout: int, request: Request) -> Response: - """Assert that correct timeouts are set on the request and raise `EndOfTestError`. - - This is intended for tests that are only testing timeout value and further execution of the code is not desired. - """ - assert request.extensions['timeout'] == { - 'connect': expected_timeout, - 'pool': expected_timeout, - 'read': expected_timeout, - 'write': expected_timeout, - } - raise EndOfTestError - - _timeout_params = [ (DatasetClient, 'get', dataset._SMALL_TIMEOUT, {}), (DatasetClient, 'update', dataset._SMALL_TIMEOUT, {}), @@ -153,18 +156,22 @@ def assert_timeout(expected_timeout: int, request: Request) -> Response: ('client_type', 'method', 'expected_timeout', 'kwargs'), _timeout_params, ) -@respx.mock def test_specific_timeouts_for_specific_endpoints_sync( client_type: type[DatasetClient | KeyValueStoreClient | RequestQueueClient], method: str, kwargs: dict, expected_timeout: int, + patch_request: list[float | None], + httpserver: HTTPServer, ) -> None: - respx.route(host='example.com').mock(side_effect=partial(assert_timeout, expected_timeout)) - client = client_type(base_url='https://example.com', root_client=ApifyClient(), http_client=HTTPClient()) + httpserver.expect_request('/').respond_with_data(status=200) + client = client_type(base_url=httpserver.url_for('/'), root_client=ApifyClient(), http_client=HTTPClient()) with pytest.raises(EndOfTestError): getattr(client, method)(**kwargs) + assert len(patch_request) == 1 + assert patch_request[0] == expected_timeout + # This test will probably need to be reworked or skipped when switching to `impit`. # Without the mock library, it's difficult to reproduce, maybe with monkeypatch? @@ -172,14 +179,18 @@ def test_specific_timeouts_for_specific_endpoints_sync( ('client_type', 'method', 'expected_timeout', 'kwargs'), _timeout_params, ) -@respx.mock async def test_specific_timeouts_for_specific_endpoints_async( client_type: type[DatasetClient | KeyValueStoreClient | RequestQueueClient], method: str, kwargs: dict, expected_timeout: int, + patch_request: list[float | None], + httpserver: HTTPServer, ) -> None: - respx.route(host='example.com').mock(side_effect=partial(assert_timeout, expected_timeout)) - client = client_type(base_url='https://example.com', root_client=ApifyClient(), http_client=HTTPClient()) + httpserver.expect_request('/').respond_with_data(status=200) + client = client_type(base_url=httpserver.url_for('/'), root_client=ApifyClient(), http_client=HTTPClient()) with pytest.raises(EndOfTestError): await getattr(client, method)(**kwargs) + + assert len(patch_request) == 1 + assert patch_request[0] == expected_timeout diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index c518935b..315aba70 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -115,9 +115,8 @@ def _streaming_log_handler(_request: Request) -> Response: """Handler for streaming log requests.""" def generate_logs() -> Iterator[bytes]: - for chunk in _MOCKED_ACTOR_LOGS: + for chunk in _MOCKED_ACTOR_LOGS: # noqa: UP028 yield chunk - time.sleep(0.01) total_size = sum(len(chunk) for chunk in _MOCKED_ACTOR_LOGS) @@ -242,7 +241,9 @@ def test_redirected_logs_sync( with caplog.at_level(logging.DEBUG, logger=logger_name), streamed_log: # Do stuff while the log from the other Actor is being redirected to the logs. + print(1) time.sleep(1) + print(2) # Ensure logs are propagated assert {(record.message, record.levelno) for record in caplog.records} == set( diff --git a/uv.lock b/uv.lock index cddc278d..5f2b1cf3 100644 --- a/uv.lock +++ b/uv.lock @@ -29,6 +29,7 @@ dependencies = [ { name = "apify-shared" }, { name = "colorama" }, { name = "httpx" }, + { name = "impit" }, { name = "more-itertools" }, ] @@ -58,6 +59,7 @@ requires-dist = [ { name = "apify-shared", specifier = "<2.0.0" }, { name = "colorama", specifier = ">=0.4.0" }, { name = "httpx", specifier = ">=0.25" }, + { name = "impit", specifier = ">=0.5.1" }, { name = "more-itertools", specifier = ">=10.0.0" }, ] @@ -571,6 +573,55 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "impit" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/ba/975dfb789380932210d7e40cd3809ef2a9d6eb55b6e7a4edbbe93b56eaef/impit-0.5.1.tar.gz", hash = "sha256:11c8e6333ac7ac1d5db43f36503b7f488c2bc498aa3227daa31c0c4a4d072efe", size = 88421, upload-time = "2025-08-05T11:51:09.791Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/af/ef54998f5bcbd511b88b54498972349930f834bbbee763ed6d19cb120133/impit-0.5.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4d581912fa6160aca6e2e3b65c22a0a96267b14bfd61de8db650762ecae39fc3", size = 3840802, upload-time = "2025-08-05T11:49:58.169Z" }, + { url = "https://files.pythonhosted.org/packages/52/a7/1f24ef1dd6dd40a9332d49c74ac95af68f62949fd4f2498087e8f7cbf7c9/impit-0.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4981d4ceb726f5fdf51586dd6363c41b11b77f67dba41609ad71c84fdd7a8671", size = 3667962, upload-time = "2025-08-05T11:49:59.933Z" }, + { url = "https://files.pythonhosted.org/packages/4e/24/2c7b9a32f4d6df7bdc194f5ae9e3ad28a577b6392ddc13e3639aa107ba71/impit-0.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b720fb96a7efe140ad2e103c8389016b1ca2c93037c968f804d6c9f8d62ffb56", size = 6071842, upload-time = "2025-08-05T11:50:01.846Z" }, + { url = "https://files.pythonhosted.org/packages/1c/37/71e9e251b794403959f3159e5b6c5b808bd36333e8e528bfcc0d2d0d998b/impit-0.5.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9d57c371438143a57b7f8c27997b24f3543827ea2ff13dfcd85146a146ecfe74", size = 6363182, upload-time = "2025-08-05T11:50:03.49Z" }, + { url = "https://files.pythonhosted.org/packages/38/20/1937db3bca25fb623732b1ef9e071abee5db9456dcce5a313948a2089a75/impit-0.5.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:575b8e25a1c59634916777f648b8a1a27e5a98789cdd91fea9a9026e86232f0f", size = 6223927, upload-time = "2025-08-05T11:50:05.126Z" }, + { url = "https://files.pythonhosted.org/packages/5e/07/1233b7b965f368d3105f07fb979fb962628326d9948a6475b1a376acad05/impit-0.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:71bd03a60e44f0a1c44dc8274deb30e53204789edf22ed14c898ab146192c9f0", size = 3876835, upload-time = "2025-08-05T11:50:06.585Z" }, + { url = "https://files.pythonhosted.org/packages/63/83/be68a3389a217f6ce81b081a2c5dd82027e09d5d3e1c781150c3030fe339/impit-0.5.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9fbe14ab26a60a7711cce5e7ba7fa3c0b90eb2749bd693e9756c67eafdbd96a7", size = 3840861, upload-time = "2025-08-05T11:50:08.461Z" }, + { url = "https://files.pythonhosted.org/packages/c6/f2/ce1f9903a373fd06a40888588025779b019ebce96cee8346477b301a0096/impit-0.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1bc1be288cfa40a7d0ba8aca946a4f071d8dbe86d9e9bc00af12f2fd4b068043", size = 3667760, upload-time = "2025-08-05T11:50:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/92/bc/8ddf9c6a03afcb8df9b7a2e8e2e9abbc06f7be392f0fe9e3408da5f559a3/impit-0.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113f3bd183770ff4d97609882605dd14463ff327d53763c9f24aa92d40c749b", size = 6072129, upload-time = "2025-08-05T11:50:11.727Z" }, + { url = "https://files.pythonhosted.org/packages/ad/23/4b63e5178107e078425f06683f4a7daa1da61815fd9c88fb7b53bbfb8333/impit-0.5.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a2e53b1f4dc436564618aaafe9e43e2cd3e739a29a7ad4fbf03e1c284cb8a72c", size = 6363040, upload-time = "2025-08-05T11:50:13.602Z" }, + { url = "https://files.pythonhosted.org/packages/a3/6d/45a03a5c517f6db81e2f175abd656798ba3145c53b504e5dbac773e22f6c/impit-0.5.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f10ce3a39e97427b7b0b97ce39345bdddb5aa0ca639b6db3dd9b0155c5179e1c", size = 6223810, upload-time = "2025-08-05T11:50:15.475Z" }, + { url = "https://files.pythonhosted.org/packages/3a/96/519ad5eac1aaa87c1735186a57cf72ab0cf5f8cad340e4d3cd495161b66b/impit-0.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:30f82371edd542ff9e35d61cf05716ebf9b60dd6711ba99154fa21b1e06e476f", size = 3876980, upload-time = "2025-08-05T11:50:17.439Z" }, + { url = "https://files.pythonhosted.org/packages/3d/91/4f1ee59937d7686314ad049c3fa1866cb988e8ac3ed83c6624cd00b68faa/impit-0.5.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:23098462c9d26298f845c8e921203c794846870553e3c0ecef2f2bc89a0fbd8a", size = 3840403, upload-time = "2025-08-05T11:50:19.358Z" }, + { url = "https://files.pythonhosted.org/packages/0d/7f/f186e15efd4b0a5568956fffdbda9489826d0dd2cda770df3e0cca245449/impit-0.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d8a10f9ea91ab8c383469e1e0aaf311f489f89fbb52f2115964293af7c97cca", size = 3667029, upload-time = "2025-08-05T11:50:20.975Z" }, + { url = "https://files.pythonhosted.org/packages/ee/33/dca7f7b705e04dfd1ae5ced5e1042ccf105a7681cdc8ad1f0569e109b7f9/impit-0.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6d2aa1cca05df9e6ce9cbb875700af4e6bd26367fa34a40049801b23b73899a", size = 6071269, upload-time = "2025-08-05T11:50:22.567Z" }, + { url = "https://files.pythonhosted.org/packages/15/e8/3a5a22aca5cd7da1efc5aec5561c788b20e927ab5c0e798a12b5ec6d8897/impit-0.5.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:43d57c1df017ddc39f4db83c69271571887f4e818f2ec96a85e0d942d38dda8e", size = 6361790, upload-time = "2025-08-05T11:50:24.441Z" }, + { url = "https://files.pythonhosted.org/packages/b8/a1/4b30e8c098495f8a7f4c9aeac099f9d263e638855329e0fcb5e8afe1ebad/impit-0.5.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad7aa18d9ebc9bc8169e86a6644f5c219d3ee9fc192b2fb6d02df06501f4ccba", size = 6218223, upload-time = "2025-08-05T11:50:25.921Z" }, + { url = "https://files.pythonhosted.org/packages/d2/76/07a2ebaf30db8c516a29c3068788082d8a0e7a9a66950e3bb50a43d105bf/impit-0.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:b304fe7712c7d1f986dc336a026db956d218ec5dc9e744b37e0751a7308b658c", size = 3876101, upload-time = "2025-08-05T11:50:27.514Z" }, + { url = "https://files.pythonhosted.org/packages/72/2d/a169d6d23d8869bc46a4f57b9733244141b488f19d6e14da90f3d555b22b/impit-0.5.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ac33d1b7753f8e0d6ead5936e239c4021a5d9ca3d30f66c6c466202682e5584b", size = 3840282, upload-time = "2025-08-05T11:50:28.898Z" }, + { url = "https://files.pythonhosted.org/packages/de/c2/6e4b2f24d055d9b3e3eff4ff593af88fd09cefb9edd6778edbe366986d05/impit-0.5.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2fda1d0a6fa7a1efb9fde9fe38a369c81f586195dc57129a2c45d40be0cc7b76", size = 3666852, upload-time = "2025-08-05T11:50:30.935Z" }, + { url = "https://files.pythonhosted.org/packages/09/70/c4065d036e149125b0e5f416ec11507f192154038177abcbb1617ab60c84/impit-0.5.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd6df468676d3de83b4a1cbffdeb053e231fa50d5ec6769f26339008396954a3", size = 6071225, upload-time = "2025-08-05T11:50:32.516Z" }, + { url = "https://files.pythonhosted.org/packages/c8/42/711916dbb12317d5df8b177e394993fe7d0d11edf4e2272dce7019c3acdb/impit-0.5.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e6ecffeb3fbbbdd412ec6949300f413b2944770e81c0b4074e8d448ed3f9d731", size = 6361700, upload-time = "2025-08-05T11:50:34.78Z" }, + { url = "https://files.pythonhosted.org/packages/58/8d/52d3c7874c2b267abdcfbc69f6971826f69e91112e989e39972d11079768/impit-0.5.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3de25bce67bb0c7b2f693a4412906ad87186dba55f930f260c77c91b27a551df", size = 6218178, upload-time = "2025-08-05T11:50:36.988Z" }, + { url = "https://files.pythonhosted.org/packages/dd/e2/45b171aef7c0c69de188ebe6ad06d5427ba63f74fad56fb02a1e8c45e253/impit-0.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:e0be67803b60edc608ee5d78be77d0d1858d7ae6fff538b41d21069b35b0626a", size = 3876129, upload-time = "2025-08-05T11:50:38.638Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e6/c8d539d0400404595352ff5d350f0f1ee8a22efbaaf266495b279250a369/impit-0.5.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:490d00488ff4067efdd1da0790bba45c9220e99054a83bceb131147cbfaf3089", size = 6362113, upload-time = "2025-08-05T11:50:40.354Z" }, + { url = "https://files.pythonhosted.org/packages/f9/d6/37f072e052355925ddec1300792e4f927e4d3d7125d81616ec381c60c64e/impit-0.5.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e3ff31bc3ae6991cd5747c9184d469a5a47b4e9b9b5320891d3ea12068bb0f4", size = 6223961, upload-time = "2025-08-05T11:50:42.184Z" }, + { url = "https://files.pythonhosted.org/packages/97/39/8ba656a669cbe4c7349b7f1313c2c63479b660654dcd67e2c574e3116f70/impit-0.5.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8674596577662af97b5b93917b7431c2c9ea78f0288de697e46b8cf3af7484", size = 6071438, upload-time = "2025-08-05T11:50:43.95Z" }, + { url = "https://files.pythonhosted.org/packages/79/48/c6248ac47e96d3866508215e13cfa5ae9d66b9061b2a9979aa1eeb6c9556/impit-0.5.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e6afdd777d1f3a970e442167d8ea57e9e3a049ca55ae414c1da103576174d137", size = 3841265, upload-time = "2025-08-05T11:50:45.586Z" }, + { url = "https://files.pythonhosted.org/packages/65/da/8a29032edf12cd7ad980707d63c87c1084fde9a24994689de599a96e5e0a/impit-0.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d626da45fea11ff67b710ccdc2c086b680ee7b8ebadfbe50f3c10d60205e9a53", size = 3668536, upload-time = "2025-08-05T11:50:47.241Z" }, + { url = "https://files.pythonhosted.org/packages/48/fa/6a71568e36c8f6238f2037db48bff66b264af44d74f5b76077b6ac10bf62/impit-0.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e919e709a041f74fc42afd85e6abd4d17b650dbb5104388a2ee6099bffeb3f93", size = 6072434, upload-time = "2025-08-05T11:50:48.88Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/82a2f3afd297cad98e19767eb959aa85ef31af6f7b723db7de97d35ff83c/impit-0.5.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9cb8e9c6e3221ec6205633e002fc42f20b28f95af5ce75b90a243c3840ea89c9", size = 6363677, upload-time = "2025-08-05T11:50:50.799Z" }, + { url = "https://files.pythonhosted.org/packages/70/4a/83ae4e89981d7536858dc1e4fa9aa009ee749014d78cb83b1f89a5d7bcd7/impit-0.5.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a73785ff7626eebbdc0b09f5e015a1274111807b5573603e725695a95b43786d", size = 6224149, upload-time = "2025-08-05T11:50:52.58Z" }, + { url = "https://files.pythonhosted.org/packages/91/3b/62e5c7c2e3945872573fd70845a2b8fe3ac6aaee4a9c8022102aa37b6fd2/impit-0.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:547b48c388f93408c46717588b1fa2867f13eed5bbb139d440733b2adc5d59af", size = 3877436, upload-time = "2025-08-05T11:50:54.284Z" }, + { url = "https://files.pythonhosted.org/packages/10/10/42a9f94205688b64b613a87558673cf9d7f78566129ea2c9ff12119ec9f6/impit-0.5.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad3cd69078cd0d2c845b4c4a46098889db1dbc4ec943467c95ff5322b27bf40d", size = 6072356, upload-time = "2025-08-05T11:50:56.079Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5a/75ab47d739b8a4a57fcea5b977ca329daa31a6aa05f87e7fb2a61e89e76c/impit-0.5.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8f9808ec58fcfaa89a85b23e597745e0d74d7d1f3cfd4331cefb0ba4fc5cd0a7", size = 6362932, upload-time = "2025-08-05T11:50:57.811Z" }, + { url = "https://files.pythonhosted.org/packages/28/68/54231959d0eaab26a1ccb979cf5d80bf946fab7a9e3420b64b8710138b5b/impit-0.5.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:714bea666bb5c01b81fac5d2ea2e78b87dc23c472da95b0420ce271722459c15", size = 6221015, upload-time = "2025-08-05T11:50:59.547Z" }, + { url = "https://files.pythonhosted.org/packages/17/3e/7f5dd147f10f787585c7a8bd365aa557c7b64bb5244f48c1c06589f838b2/impit-0.5.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfdbf369232f18096318f32f52f4d5947fc5165fc26da8c87c024656e3d5c2ff", size = 6072509, upload-time = "2025-08-05T11:51:01.265Z" }, + { url = "https://files.pythonhosted.org/packages/34/81/368a7f2886c6e5096a72663d446f3426c80cc20b160a34b697655b89d00f/impit-0.5.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:cf8b416075daf0c1bf630db35fe2ab6c277092a16673bf727728bf85b0463dc2", size = 6363396, upload-time = "2025-08-05T11:51:03.02Z" }, + { url = "https://files.pythonhosted.org/packages/85/ca/c46c607d9cf1dd7f9e1a8eb7ca4c0a66f661d054c853a47f7f3f805a2be9/impit-0.5.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:7d1aa08919502c901a6fbd68e6e5a5cfd81b534bf57f97ba1309315a12cf98eb", size = 6220976, upload-time = "2025-08-05T11:51:04.827Z" }, + { url = "https://files.pythonhosted.org/packages/79/e9/6e60a412edcfa28466849b917c76cd6f7650adbc09673057a8e94948ff41/impit-0.5.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6576af21ee097d489e6a669cb2688d74be86a120acbc5929eeb303ad93bf2fd9", size = 6363062, upload-time = "2025-08-05T11:51:06.627Z" }, + { url = "https://files.pythonhosted.org/packages/90/b7/6c4d02b063e19b2f9511f3c85de2e04c8dc5d4223316c95ca5139663badd/impit-0.5.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:93a5f474cc9773ed720e0629f6df72180fb91ba209f63318b285a8df45b574bc", size = 6221354, upload-time = "2025-08-05T11:51:08.339Z" }, +] + [[package]] name = "iniconfig" version = "2.1.0" From 3c2e4bad4fd9780b182f863212cad08ae78108f7 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Thu, 7 Aug 2025 22:39:27 +0000 Subject: [PATCH 09/15] clear --- tests/unit/test_client_timeouts.py | 4 ---- tests/unit/test_logging.py | 3 ++- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/unit/test_client_timeouts.py b/tests/unit/test_client_timeouts.py index bc1efca2..c67118cb 100644 --- a/tests/unit/test_client_timeouts.py +++ b/tests/unit/test_client_timeouts.py @@ -150,8 +150,6 @@ def mock_request(*_args: Any, **kwargs: Any) -> Response: ] -# This test will probably need to be reworked or skipped when switching to `impit`. -# Without the mock library, it's difficult to reproduce, maybe with monkeypatch? @pytest.mark.parametrize( ('client_type', 'method', 'expected_timeout', 'kwargs'), _timeout_params, @@ -173,8 +171,6 @@ def test_specific_timeouts_for_specific_endpoints_sync( assert patch_request[0] == expected_timeout -# This test will probably need to be reworked or skipped when switching to `impit`. -# Without the mock library, it's difficult to reproduce, maybe with monkeypatch? @pytest.mark.parametrize( ('client_type', 'method', 'expected_timeout', 'kwargs'), _timeout_params, diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 315aba70..ff15aa56 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -115,8 +115,9 @@ def _streaming_log_handler(_request: Request) -> Response: """Handler for streaming log requests.""" def generate_logs() -> Iterator[bytes]: - for chunk in _MOCKED_ACTOR_LOGS: # noqa: UP028 + for chunk in _MOCKED_ACTOR_LOGS: yield chunk + time.sleep(0.01) total_size = sum(len(chunk) for chunk in _MOCKED_ACTOR_LOGS) From 513ea090f16e91b50184877d0f9b4bf7d5b340a0 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Fri, 8 Aug 2025 00:24:51 +0000 Subject: [PATCH 10/15] remove respx --- pyproject.toml | 3 +-- uv.lock | 14 -------------- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 57ade99a..679895f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,7 +55,6 @@ dev = [ "pytest~=8.4.0", "pytest-httpserver>=1.1.3", "redbaron~=0.9.0", - "respx~=0.22.0", "ruff~=0.12.0", "setuptools", # setuptools are used by pytest but not explicitly required "types-colorama~=0.4.15.20240106", @@ -176,7 +175,7 @@ warn_unused_ignores = true exclude = [] [[tool.mypy.overrides]] -module = ["pandas", "respx"] +module = ["pandas"] ignore_missing_imports = true [tool.basedpyright] diff --git a/uv.lock b/uv.lock index 5f2b1cf3..316e4f3e 100644 --- a/uv.lock +++ b/uv.lock @@ -47,7 +47,6 @@ dev = [ { name = "pytest-timeout" }, { name = "pytest-xdist" }, { name = "redbaron" }, - { name = "respx" }, { name = "ruff" }, { name = "setuptools" }, { name = "types-colorama" }, @@ -77,7 +76,6 @@ dev = [ { name = "pytest-timeout", specifier = ">=2.4.0" }, { name = "pytest-xdist", specifier = "~=3.8.0" }, { name = "redbaron", specifier = "~=0.9.0" }, - { name = "respx", specifier = "~=0.22.0" }, { name = "ruff", specifier = "~=0.12.0" }, { name = "setuptools" }, { name = "types-colorama", specifier = "~=0.4.15.20240106" }, @@ -1064,18 +1062,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, ] -[[package]] -name = "respx" -version = "0.22.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "httpx" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, -] - [[package]] name = "rply" version = "0.7.8" From 8a353156a8e62a17c793d698a95a3977321bbac8 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Fri, 8 Aug 2025 13:48:40 +0000 Subject: [PATCH 11/15] update test for error stream --- tests/unit/test_client_errors.py | 61 ++++++++++++++++---------------- 1 file changed, 30 insertions(+), 31 deletions(-) diff --git a/tests/unit/test_client_errors.py b/tests/unit/test_client_errors.py index fc1783e0..ca410ce4 100644 --- a/tests/unit/test_client_errors.py +++ b/tests/unit/test_client_errors.py @@ -1,19 +1,20 @@ from __future__ import annotations import json +import time from typing import TYPE_CHECKING -import httpx import pytest -import respx +from werkzeug import Response from apify_client._errors import ApifyApiError from apify_client._http_client import HTTPClient, HTTPClientAsync if TYPE_CHECKING: - from collections.abc import AsyncIterator, Iterator + from collections.abc import Iterator from pytest_httpserver import HTTPServer + from werkzeug import Request _TEST_PATH = '/errors' _EXPECTED_MESSAGE = 'some_message' @@ -41,6 +42,22 @@ def test_endpoint(httpserver: HTTPServer) -> str: return str(httpserver.url_for(_TEST_PATH)) +def streaming_handler(_request: Request) -> Response: + """Handler for streaming log requests.""" + + def generate_response() -> Iterator[bytes]: + for i in range(len(RAW_ERROR)): + yield RAW_ERROR[i : i + 1] + time.sleep(0.01) + + return Response( + response=(RAW_ERROR[i : i + 1] for i in range(len(RAW_ERROR))), + status=403, + mimetype='application/octet-stream', + headers={'Content-Length': str(len(RAW_ERROR))}, + ) + + def test_client_apify_api_error_with_data(test_endpoint: str) -> None: """Test that client correctly throws ApifyApiError with error data from response.""" client = HTTPClient() @@ -65,51 +82,33 @@ async def test_async_client_apify_api_error_with_data(test_endpoint: str) -> Non assert e.value.data == _EXPECTED_DATA -def test_client_apify_api_error_streamed() -> None: +def test_client_apify_api_error_streamed(httpserver: HTTPServer) -> None: """Test that client correctly throws ApifyApiError when the response has stream.""" error = json.loads(RAW_ERROR.decode()) - class ByteStream(httpx._types.SyncByteStream): - def __iter__(self) -> Iterator[bytes]: - yield RAW_ERROR - - def close(self) -> None: - pass - - stream_url = 'http://some-stream-url.com' - client = HTTPClient() - with respx.mock() as respx_mock: - respx_mock.get(url=stream_url).mock(return_value=httpx.Response(stream=ByteStream(), status_code=403)) - with pytest.raises(ApifyApiError) as e: - client.call(method='GET', url=stream_url, stream=True, parse_response=False) + httpserver.expect_request('/stream_error').respond_with_handler(streaming_handler) + + with pytest.raises(ApifyApiError) as e: + client.call(method='GET', url=httpserver.url_for('/stream_error'), stream=True, parse_response=False) assert e.value.message == error['error']['message'] assert e.value.type == error['error']['type'] -async def test_async_client_apify_api_error_streamed() -> None: +async def test_async_client_apify_api_error_streamed(httpserver: HTTPServer) -> None: """Test that async client correctly throws ApifyApiError when the response has stream.""" error = json.loads(RAW_ERROR.decode()) - class AsyncByteStream(httpx._types.AsyncByteStream): - async def __aiter__(self) -> AsyncIterator[bytes]: - yield RAW_ERROR - - async def aclose(self) -> None: - pass - - stream_url = 'http://some-stream-url.com' - client = HTTPClientAsync() - with respx.mock() as respx_mock: - respx_mock.get(url=stream_url).mock(return_value=httpx.Response(stream=AsyncByteStream(), status_code=403)) - with pytest.raises(ApifyApiError) as e: - await client.call(method='GET', url=stream_url, stream=True, parse_response=False) + httpserver.expect_request('/stream_error').respond_with_handler(streaming_handler) + + with pytest.raises(ApifyApiError) as e: + await client.call(method='GET', url=httpserver.url_for('/stream_error'), stream=True, parse_response=False) assert e.value.message == error['error']['message'] assert e.value.type == error['error']['type'] From 19f6dad1dc70c0b5b8dd5d78f2e234bfb597681b Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Mon, 11 Aug 2025 16:30:16 +0000 Subject: [PATCH 12/15] up impit --- pyproject.toml | 2 +- src/apify_client/_http_client.py | 6 +- tests/unit/test_client_timeouts.py | 8 +-- uv.lock | 92 +++++++++++++++--------------- 4 files changed, 51 insertions(+), 57 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e15a8fd6..6efcf5e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ "apify-shared<2.0.0", "colorama>=0.4.0", "httpx>=0.25", - "impit>=0.5.1", + "impit>=0.5.2", "more_itertools>=10.0.0", ] diff --git a/src/apify_client/_http_client.py b/src/apify_client/_http_client.py index f0df3a24..d6e2f1f6 100644 --- a/src/apify_client/_http_client.py +++ b/src/apify_client/_http_client.py @@ -198,8 +198,7 @@ def _make_request(stop_retrying: Callable, attempt: int) -> impit.Response: # If response status is < 300, the request was successful, and we can return the result if response.status_code < 300: # noqa: PLR2004 logger.debug('Request successful', extra={'status_code': response.status_code}) - # TODODO Impit does not support setting custom attributes on the response object, - if not stream and response.content == b'A unique condition for checking types. ABRACADABRA': + if not stream: _maybe_parsed_body = ( self._maybe_parse_response(response) if parse_response else response.content ) @@ -284,8 +283,7 @@ async def _make_request(stop_retrying: Callable, attempt: int) -> impit.Response # If response status is < 300, the request was successful, and we can return the result if response.status_code < 300: # noqa: PLR2004 logger.debug('Request successful', extra={'status_code': response.status_code}) - # TODODO Impit does not support setting custom attributes on the response object, - if not stream and response.content == b'A unique condition for checking types. ABRACADABRA': + if not stream: _maybe_parsed_body = ( self._maybe_parse_response(response) if parse_response else response.content ) diff --git a/tests/unit/test_client_timeouts.py b/tests/unit/test_client_timeouts.py index bc1efca2..70d09b90 100644 --- a/tests/unit/test_client_timeouts.py +++ b/tests/unit/test_client_timeouts.py @@ -60,9 +60,7 @@ async def mock_request(*_args: Any, **kwargs: Any) -> Response: if should_raise: raise TimeoutException - response = Response - response.status_code = 200 - return response # type: ignore[return-value] + return Response(status_code=200) monkeypatch.setattr('impit.AsyncClient.request', mock_request) @@ -98,9 +96,7 @@ def mock_request(*_args: Any, **kwargs: Any) -> Response: if should_raise: raise TimeoutException - response = Response - response.status_code = 200 - return response # type: ignore[return-value] + return Response(status_code=200) monkeypatch.setattr('impit.Client.request', mock_request) diff --git a/uv.lock b/uv.lock index cb54c786..b7261224 100644 --- a/uv.lock +++ b/uv.lock @@ -58,7 +58,7 @@ requires-dist = [ { name = "apify-shared", specifier = "<2.0.0" }, { name = "colorama", specifier = ">=0.4.0" }, { name = "httpx", specifier = ">=0.25" }, - { name = "impit", specifier = ">=0.5.1" }, + { name = "impit", specifier = ">=0.5.2" }, { name = "more-itertools", specifier = ">=10.0.0" }, ] @@ -573,51 +573,51 @@ wheels = [ [[package]] name = "impit" -version = "0.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/ba/975dfb789380932210d7e40cd3809ef2a9d6eb55b6e7a4edbbe93b56eaef/impit-0.5.1.tar.gz", hash = "sha256:11c8e6333ac7ac1d5db43f36503b7f488c2bc498aa3227daa31c0c4a4d072efe", size = 88421, upload-time = "2025-08-05T11:51:09.791Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/af/ef54998f5bcbd511b88b54498972349930f834bbbee763ed6d19cb120133/impit-0.5.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4d581912fa6160aca6e2e3b65c22a0a96267b14bfd61de8db650762ecae39fc3", size = 3840802, upload-time = "2025-08-05T11:49:58.169Z" }, - { url = "https://files.pythonhosted.org/packages/52/a7/1f24ef1dd6dd40a9332d49c74ac95af68f62949fd4f2498087e8f7cbf7c9/impit-0.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4981d4ceb726f5fdf51586dd6363c41b11b77f67dba41609ad71c84fdd7a8671", size = 3667962, upload-time = "2025-08-05T11:49:59.933Z" }, - { url = "https://files.pythonhosted.org/packages/4e/24/2c7b9a32f4d6df7bdc194f5ae9e3ad28a577b6392ddc13e3639aa107ba71/impit-0.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b720fb96a7efe140ad2e103c8389016b1ca2c93037c968f804d6c9f8d62ffb56", size = 6071842, upload-time = "2025-08-05T11:50:01.846Z" }, - { url = "https://files.pythonhosted.org/packages/1c/37/71e9e251b794403959f3159e5b6c5b808bd36333e8e528bfcc0d2d0d998b/impit-0.5.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9d57c371438143a57b7f8c27997b24f3543827ea2ff13dfcd85146a146ecfe74", size = 6363182, upload-time = "2025-08-05T11:50:03.49Z" }, - { url = "https://files.pythonhosted.org/packages/38/20/1937db3bca25fb623732b1ef9e071abee5db9456dcce5a313948a2089a75/impit-0.5.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:575b8e25a1c59634916777f648b8a1a27e5a98789cdd91fea9a9026e86232f0f", size = 6223927, upload-time = "2025-08-05T11:50:05.126Z" }, - { url = "https://files.pythonhosted.org/packages/5e/07/1233b7b965f368d3105f07fb979fb962628326d9948a6475b1a376acad05/impit-0.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:71bd03a60e44f0a1c44dc8274deb30e53204789edf22ed14c898ab146192c9f0", size = 3876835, upload-time = "2025-08-05T11:50:06.585Z" }, - { url = "https://files.pythonhosted.org/packages/63/83/be68a3389a217f6ce81b081a2c5dd82027e09d5d3e1c781150c3030fe339/impit-0.5.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9fbe14ab26a60a7711cce5e7ba7fa3c0b90eb2749bd693e9756c67eafdbd96a7", size = 3840861, upload-time = "2025-08-05T11:50:08.461Z" }, - { url = "https://files.pythonhosted.org/packages/c6/f2/ce1f9903a373fd06a40888588025779b019ebce96cee8346477b301a0096/impit-0.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1bc1be288cfa40a7d0ba8aca946a4f071d8dbe86d9e9bc00af12f2fd4b068043", size = 3667760, upload-time = "2025-08-05T11:50:10.164Z" }, - { url = "https://files.pythonhosted.org/packages/92/bc/8ddf9c6a03afcb8df9b7a2e8e2e9abbc06f7be392f0fe9e3408da5f559a3/impit-0.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113f3bd183770ff4d97609882605dd14463ff327d53763c9f24aa92d40c749b", size = 6072129, upload-time = "2025-08-05T11:50:11.727Z" }, - { url = "https://files.pythonhosted.org/packages/ad/23/4b63e5178107e078425f06683f4a7daa1da61815fd9c88fb7b53bbfb8333/impit-0.5.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a2e53b1f4dc436564618aaafe9e43e2cd3e739a29a7ad4fbf03e1c284cb8a72c", size = 6363040, upload-time = "2025-08-05T11:50:13.602Z" }, - { url = "https://files.pythonhosted.org/packages/a3/6d/45a03a5c517f6db81e2f175abd656798ba3145c53b504e5dbac773e22f6c/impit-0.5.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f10ce3a39e97427b7b0b97ce39345bdddb5aa0ca639b6db3dd9b0155c5179e1c", size = 6223810, upload-time = "2025-08-05T11:50:15.475Z" }, - { url = "https://files.pythonhosted.org/packages/3a/96/519ad5eac1aaa87c1735186a57cf72ab0cf5f8cad340e4d3cd495161b66b/impit-0.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:30f82371edd542ff9e35d61cf05716ebf9b60dd6711ba99154fa21b1e06e476f", size = 3876980, upload-time = "2025-08-05T11:50:17.439Z" }, - { url = "https://files.pythonhosted.org/packages/3d/91/4f1ee59937d7686314ad049c3fa1866cb988e8ac3ed83c6624cd00b68faa/impit-0.5.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:23098462c9d26298f845c8e921203c794846870553e3c0ecef2f2bc89a0fbd8a", size = 3840403, upload-time = "2025-08-05T11:50:19.358Z" }, - { url = "https://files.pythonhosted.org/packages/0d/7f/f186e15efd4b0a5568956fffdbda9489826d0dd2cda770df3e0cca245449/impit-0.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d8a10f9ea91ab8c383469e1e0aaf311f489f89fbb52f2115964293af7c97cca", size = 3667029, upload-time = "2025-08-05T11:50:20.975Z" }, - { url = "https://files.pythonhosted.org/packages/ee/33/dca7f7b705e04dfd1ae5ced5e1042ccf105a7681cdc8ad1f0569e109b7f9/impit-0.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6d2aa1cca05df9e6ce9cbb875700af4e6bd26367fa34a40049801b23b73899a", size = 6071269, upload-time = "2025-08-05T11:50:22.567Z" }, - { url = "https://files.pythonhosted.org/packages/15/e8/3a5a22aca5cd7da1efc5aec5561c788b20e927ab5c0e798a12b5ec6d8897/impit-0.5.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:43d57c1df017ddc39f4db83c69271571887f4e818f2ec96a85e0d942d38dda8e", size = 6361790, upload-time = "2025-08-05T11:50:24.441Z" }, - { url = "https://files.pythonhosted.org/packages/b8/a1/4b30e8c098495f8a7f4c9aeac099f9d263e638855329e0fcb5e8afe1ebad/impit-0.5.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad7aa18d9ebc9bc8169e86a6644f5c219d3ee9fc192b2fb6d02df06501f4ccba", size = 6218223, upload-time = "2025-08-05T11:50:25.921Z" }, - { url = "https://files.pythonhosted.org/packages/d2/76/07a2ebaf30db8c516a29c3068788082d8a0e7a9a66950e3bb50a43d105bf/impit-0.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:b304fe7712c7d1f986dc336a026db956d218ec5dc9e744b37e0751a7308b658c", size = 3876101, upload-time = "2025-08-05T11:50:27.514Z" }, - { url = "https://files.pythonhosted.org/packages/72/2d/a169d6d23d8869bc46a4f57b9733244141b488f19d6e14da90f3d555b22b/impit-0.5.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ac33d1b7753f8e0d6ead5936e239c4021a5d9ca3d30f66c6c466202682e5584b", size = 3840282, upload-time = "2025-08-05T11:50:28.898Z" }, - { url = "https://files.pythonhosted.org/packages/de/c2/6e4b2f24d055d9b3e3eff4ff593af88fd09cefb9edd6778edbe366986d05/impit-0.5.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2fda1d0a6fa7a1efb9fde9fe38a369c81f586195dc57129a2c45d40be0cc7b76", size = 3666852, upload-time = "2025-08-05T11:50:30.935Z" }, - { url = "https://files.pythonhosted.org/packages/09/70/c4065d036e149125b0e5f416ec11507f192154038177abcbb1617ab60c84/impit-0.5.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd6df468676d3de83b4a1cbffdeb053e231fa50d5ec6769f26339008396954a3", size = 6071225, upload-time = "2025-08-05T11:50:32.516Z" }, - { url = "https://files.pythonhosted.org/packages/c8/42/711916dbb12317d5df8b177e394993fe7d0d11edf4e2272dce7019c3acdb/impit-0.5.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e6ecffeb3fbbbdd412ec6949300f413b2944770e81c0b4074e8d448ed3f9d731", size = 6361700, upload-time = "2025-08-05T11:50:34.78Z" }, - { url = "https://files.pythonhosted.org/packages/58/8d/52d3c7874c2b267abdcfbc69f6971826f69e91112e989e39972d11079768/impit-0.5.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3de25bce67bb0c7b2f693a4412906ad87186dba55f930f260c77c91b27a551df", size = 6218178, upload-time = "2025-08-05T11:50:36.988Z" }, - { url = "https://files.pythonhosted.org/packages/dd/e2/45b171aef7c0c69de188ebe6ad06d5427ba63f74fad56fb02a1e8c45e253/impit-0.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:e0be67803b60edc608ee5d78be77d0d1858d7ae6fff538b41d21069b35b0626a", size = 3876129, upload-time = "2025-08-05T11:50:38.638Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e6/c8d539d0400404595352ff5d350f0f1ee8a22efbaaf266495b279250a369/impit-0.5.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:490d00488ff4067efdd1da0790bba45c9220e99054a83bceb131147cbfaf3089", size = 6362113, upload-time = "2025-08-05T11:50:40.354Z" }, - { url = "https://files.pythonhosted.org/packages/f9/d6/37f072e052355925ddec1300792e4f927e4d3d7125d81616ec381c60c64e/impit-0.5.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e3ff31bc3ae6991cd5747c9184d469a5a47b4e9b9b5320891d3ea12068bb0f4", size = 6223961, upload-time = "2025-08-05T11:50:42.184Z" }, - { url = "https://files.pythonhosted.org/packages/97/39/8ba656a669cbe4c7349b7f1313c2c63479b660654dcd67e2c574e3116f70/impit-0.5.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8674596577662af97b5b93917b7431c2c9ea78f0288de697e46b8cf3af7484", size = 6071438, upload-time = "2025-08-05T11:50:43.95Z" }, - { url = "https://files.pythonhosted.org/packages/79/48/c6248ac47e96d3866508215e13cfa5ae9d66b9061b2a9979aa1eeb6c9556/impit-0.5.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e6afdd777d1f3a970e442167d8ea57e9e3a049ca55ae414c1da103576174d137", size = 3841265, upload-time = "2025-08-05T11:50:45.586Z" }, - { url = "https://files.pythonhosted.org/packages/65/da/8a29032edf12cd7ad980707d63c87c1084fde9a24994689de599a96e5e0a/impit-0.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d626da45fea11ff67b710ccdc2c086b680ee7b8ebadfbe50f3c10d60205e9a53", size = 3668536, upload-time = "2025-08-05T11:50:47.241Z" }, - { url = "https://files.pythonhosted.org/packages/48/fa/6a71568e36c8f6238f2037db48bff66b264af44d74f5b76077b6ac10bf62/impit-0.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e919e709a041f74fc42afd85e6abd4d17b650dbb5104388a2ee6099bffeb3f93", size = 6072434, upload-time = "2025-08-05T11:50:48.88Z" }, - { url = "https://files.pythonhosted.org/packages/37/57/82a2f3afd297cad98e19767eb959aa85ef31af6f7b723db7de97d35ff83c/impit-0.5.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9cb8e9c6e3221ec6205633e002fc42f20b28f95af5ce75b90a243c3840ea89c9", size = 6363677, upload-time = "2025-08-05T11:50:50.799Z" }, - { url = "https://files.pythonhosted.org/packages/70/4a/83ae4e89981d7536858dc1e4fa9aa009ee749014d78cb83b1f89a5d7bcd7/impit-0.5.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a73785ff7626eebbdc0b09f5e015a1274111807b5573603e725695a95b43786d", size = 6224149, upload-time = "2025-08-05T11:50:52.58Z" }, - { url = "https://files.pythonhosted.org/packages/91/3b/62e5c7c2e3945872573fd70845a2b8fe3ac6aaee4a9c8022102aa37b6fd2/impit-0.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:547b48c388f93408c46717588b1fa2867f13eed5bbb139d440733b2adc5d59af", size = 3877436, upload-time = "2025-08-05T11:50:54.284Z" }, - { url = "https://files.pythonhosted.org/packages/10/10/42a9f94205688b64b613a87558673cf9d7f78566129ea2c9ff12119ec9f6/impit-0.5.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad3cd69078cd0d2c845b4c4a46098889db1dbc4ec943467c95ff5322b27bf40d", size = 6072356, upload-time = "2025-08-05T11:50:56.079Z" }, - { url = "https://files.pythonhosted.org/packages/8e/5a/75ab47d739b8a4a57fcea5b977ca329daa31a6aa05f87e7fb2a61e89e76c/impit-0.5.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8f9808ec58fcfaa89a85b23e597745e0d74d7d1f3cfd4331cefb0ba4fc5cd0a7", size = 6362932, upload-time = "2025-08-05T11:50:57.811Z" }, - { url = "https://files.pythonhosted.org/packages/28/68/54231959d0eaab26a1ccb979cf5d80bf946fab7a9e3420b64b8710138b5b/impit-0.5.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:714bea666bb5c01b81fac5d2ea2e78b87dc23c472da95b0420ce271722459c15", size = 6221015, upload-time = "2025-08-05T11:50:59.547Z" }, - { url = "https://files.pythonhosted.org/packages/17/3e/7f5dd147f10f787585c7a8bd365aa557c7b64bb5244f48c1c06589f838b2/impit-0.5.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfdbf369232f18096318f32f52f4d5947fc5165fc26da8c87c024656e3d5c2ff", size = 6072509, upload-time = "2025-08-05T11:51:01.265Z" }, - { url = "https://files.pythonhosted.org/packages/34/81/368a7f2886c6e5096a72663d446f3426c80cc20b160a34b697655b89d00f/impit-0.5.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:cf8b416075daf0c1bf630db35fe2ab6c277092a16673bf727728bf85b0463dc2", size = 6363396, upload-time = "2025-08-05T11:51:03.02Z" }, - { url = "https://files.pythonhosted.org/packages/85/ca/c46c607d9cf1dd7f9e1a8eb7ca4c0a66f661d054c853a47f7f3f805a2be9/impit-0.5.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:7d1aa08919502c901a6fbd68e6e5a5cfd81b534bf57f97ba1309315a12cf98eb", size = 6220976, upload-time = "2025-08-05T11:51:04.827Z" }, - { url = "https://files.pythonhosted.org/packages/79/e9/6e60a412edcfa28466849b917c76cd6f7650adbc09673057a8e94948ff41/impit-0.5.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6576af21ee097d489e6a669cb2688d74be86a120acbc5929eeb303ad93bf2fd9", size = 6363062, upload-time = "2025-08-05T11:51:06.627Z" }, - { url = "https://files.pythonhosted.org/packages/90/b7/6c4d02b063e19b2f9511f3c85de2e04c8dc5d4223316c95ca5139663badd/impit-0.5.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:93a5f474cc9773ed720e0629f6df72180fb91ba209f63318b285a8df45b574bc", size = 6221354, upload-time = "2025-08-05T11:51:08.339Z" }, +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/b9/713caad9ebb3fe3d016b6715a8f2fa8dad070159907b36bb743ea6a13919/impit-0.5.2.tar.gz", hash = "sha256:027641ca72c807372101ae4c3533fc2c3c837413ead4b7461a044ec5b52f3b8e", size = 89508, upload-time = "2025-08-11T15:51:00.009Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/54/48f04b0ceeeaa8ed569c61e032448fad2960cd6c942ab25d86ffcf1d1661/impit-0.5.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:06a0fc3a2d87ea2b4e43c7c109aebe94ff77135685874e9371df07504306367d", size = 3842507, upload-time = "2025-08-11T15:49:53.199Z" }, + { url = "https://files.pythonhosted.org/packages/f3/00/22030ddbc0af4fc5c09049567922288dac7b72d07853b0a8637e090fc97a/impit-0.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f970b8f0d5f6ab06bbc7b2cb19f027d8126e9123d9fd566de552e6ad7e015738", size = 3670030, upload-time = "2025-08-11T15:49:55.175Z" }, + { url = "https://files.pythonhosted.org/packages/af/28/6712b06553576c2a014229be5fc89541afbd28769c525ae82b9212b4aadd/impit-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e45512070d2d8eb4262a2ff29475326b507df7b10a74dceb3022045ecf851e6b", size = 6075245, upload-time = "2025-08-11T15:49:56.917Z" }, + { url = "https://files.pythonhosted.org/packages/01/c0/8efb430c964b6fd0d51b695ac99e76c421d740340257be5f24673ab2705c/impit-0.5.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:14a49fbfec6f10449758adfefb142e5df4a8f8acb0368a4f0a4e7e41e0d0ce3d", size = 6375009, upload-time = "2025-08-11T15:49:58.699Z" }, + { url = "https://files.pythonhosted.org/packages/0c/10/b3b66bd0e1867a1b9d2e6b0834154bf787d8750acca445e0f98dbce95f96/impit-0.5.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15b2454fe01980cc078d07c47451eb12b4965b8c06cf37d0f7b0d14d1a17f3b2", size = 6223166, upload-time = "2025-08-11T15:50:00.617Z" }, + { url = "https://files.pythonhosted.org/packages/70/f0/0011b88aca3047e1bb878dd1be417939546eeb69e348d82249bf77b58e1c/impit-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:a315e931fc9dc4063694c36a31eb54d0bdedb984d5f7ee3af2f1e9ce9597417d", size = 3878639, upload-time = "2025-08-11T15:50:02.218Z" }, + { url = "https://files.pythonhosted.org/packages/3c/73/49efd83800151e3e4fda9ee4c6195d645cc0f9d4f154b0fb8c469fc53097/impit-0.5.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:1d94c20d167942d0cc2728ffd69a87e0dfbfe7c8610f924f7628b3c8d2e1a63a", size = 3842528, upload-time = "2025-08-11T15:50:04.312Z" }, + { url = "https://files.pythonhosted.org/packages/e1/c9/2a810dbb05c6a706ec57e69ddce178cf04f6a047f1ca628a412285cd99fa/impit-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:91f0ab2b18134cd9f73d0d7ed0954144e3fce01048267d76fa36e98361e7c01e", size = 3669966, upload-time = "2025-08-11T15:50:05.72Z" }, + { url = "https://files.pythonhosted.org/packages/b6/05/0a954a0ed83445a6e0c667cc20939f879c4ff15a4e45fd04099636ce75d6/impit-0.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a0d9bb6038da12dbb4cb98fa00d0ec460ac022e5c91b0a4947587919e57997b", size = 6075012, upload-time = "2025-08-11T15:50:07.203Z" }, + { url = "https://files.pythonhosted.org/packages/d7/b3/b5189879471db1077d105a4ef33c9eb2c8a578ca1dfd526b1589c8d86a2d/impit-0.5.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a3e68cfea0ae3ba47623924f0a94ad2ce9392310e38ebb8468de8bea0dfea2cb", size = 6374946, upload-time = "2025-08-11T15:50:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/4f/fb/6f165cf5b4dd87354b64c9be58a7c1da006f7c2df2b048818f0ab8a9f233/impit-0.5.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:98186d654216cc6bbe615fa75f36359d4e865ebff919e6544f52f63d8c0f9a76", size = 6223234, upload-time = "2025-08-11T15:50:10.478Z" }, + { url = "https://files.pythonhosted.org/packages/91/cb/ab9db5d701c27bb567c90aca98e3feafe9fc30a1487498abbf4ed5781b1a/impit-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:576885567b26dba214369b0eb96c8a605833d019d9e0d9a703ef2f97ac99b7d8", size = 3878579, upload-time = "2025-08-11T15:50:11.898Z" }, + { url = "https://files.pythonhosted.org/packages/8a/10/1eaf80842668992ed0c09d426d8429ec4f5bee28351cc200c2db44cc140e/impit-0.5.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5c5f3c20c9bedbdda8f61ee30d0b1eee291882045028c3c784dcb13849bf92f8", size = 3842234, upload-time = "2025-08-11T15:50:13.396Z" }, + { url = "https://files.pythonhosted.org/packages/5b/84/5a862c24df552e8774c1f8a54391e9d46f7ed23cad445c01e9ea2bcb39d3/impit-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:22c6a7d313b98bcefe0d341912ea5cc9dc78c6f4a7ff5ebf0eef8abca0a74b8f", size = 3668917, upload-time = "2025-08-11T15:50:15.352Z" }, + { url = "https://files.pythonhosted.org/packages/36/96/f56f277c53434517a1e6542a55131fd97ed663f5abe6bd89fd750abf0f7a/impit-0.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:688302db9bca919c0facb865cbbcbdba6a40ee69785250c6317090d5d6a6bf48", size = 6074079, upload-time = "2025-08-11T15:50:16.822Z" }, + { url = "https://files.pythonhosted.org/packages/35/4d/ee9afc72406b11fe743132ccdc3d994dc23c3eacb92e37d6a73990aea2fe/impit-0.5.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f81654a9ba181cc8ff974d79063446a1072549c37a12f1184a215c18e73def21", size = 6373260, upload-time = "2025-08-11T15:50:18.366Z" }, + { url = "https://files.pythonhosted.org/packages/0d/73/80e39340b940296f04586c2b14992fb7f43b55be630e3a5e78f106473fbe/impit-0.5.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd10a83f7e848fe8e7ae965615397641334eb036d5c3bbc1cdcef709dd42a9d3", size = 6222186, upload-time = "2025-08-11T15:50:19.91Z" }, + { url = "https://files.pythonhosted.org/packages/02/43/13e5f382ab37cdd0ff0f9335f6f3eddc67fecc36449a32a5ce6f1a18cded/impit-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:d273022353c640af064f51e25192fe4a71b84ffc451e57c92c632c4bb75af2c9", size = 3877409, upload-time = "2025-08-11T15:50:21.345Z" }, + { url = "https://files.pythonhosted.org/packages/29/7b/edf5661c19e3fbd257b525d22ae041ffb9704452591ab670962b834a73c4/impit-0.5.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:160c43a7affa1f530dd9a18df0584c5a38c3081d6574ce544b06a1f4736b20b7", size = 3841954, upload-time = "2025-08-11T15:50:23.173Z" }, + { url = "https://files.pythonhosted.org/packages/08/f3/f718e3c6e7e8f0ad256588a00114e6ed9134160fd4ae32556db2c3a126e6/impit-0.5.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:01a0c99566e1d198b83d007545c36cb578786266bda7b203841d9cd910f91516", size = 3668427, upload-time = "2025-08-11T15:50:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/a2/cf/d23eaf20f300c6298769b588ca660cc590ffaa21dd13a0c2700dcd96a226/impit-0.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf8e8d5322980face7e5824fcc4c000fd842d90c3bc58d2fd2fec3e08a8981", size = 6074706, upload-time = "2025-08-11T15:50:26.496Z" }, + { url = "https://files.pythonhosted.org/packages/c2/d2/9d48b6f0e84af3875579c1a2ebbd678eefe29e4b2d8e2679a1a2f7693d6f/impit-0.5.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:109f7b6e45936b1e9abf8df8909753e94156cb475aa8f255a27297b6672ea1c7", size = 6373178, upload-time = "2025-08-11T15:50:28.196Z" }, + { url = "https://files.pythonhosted.org/packages/2b/53/6c839dd06427d0515ab343f011b053342ced17e6431d1095c8e0a271cb54/impit-0.5.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7bfc390c8e4b08e89be07a9027c020d9b0df97adc4fdc03224e69351e8be592c", size = 6222122, upload-time = "2025-08-11T15:50:29.761Z" }, + { url = "https://files.pythonhosted.org/packages/1d/01/e97e8dd4365e6dab52905e511d1fc4ce83a106d4f50419c2e33c40644906/impit-0.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:27bdb876336ed1de1ba6794da791761911f2f4c10242dc6ac639371080a5027a", size = 3877549, upload-time = "2025-08-11T15:50:31.362Z" }, + { url = "https://files.pythonhosted.org/packages/9d/16/40147ec10b0c37a05ee97aef5fce028fb22ef040c21a5912030687dfb0b7/impit-0.5.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef34e005f128a83bf1e3e9da53551d508ac50baecaa2f32f2a77750581e2270f", size = 6373218, upload-time = "2025-08-11T15:50:32.87Z" }, + { url = "https://files.pythonhosted.org/packages/7c/eb/d16e74d2412d8660f6bfc6c01fbb86499c6af750e36ab30d0a0250a27aa2/impit-0.5.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:940a6a0fe277867da9efdf6c266d64ba8c625da447b2cdc6d30fe408c2c6b6ca", size = 6223153, upload-time = "2025-08-11T15:50:34.456Z" }, + { url = "https://files.pythonhosted.org/packages/a1/d6/14a531d4b433a095f252f3b89dc1c37932fa5480453afc5ed2381c055aeb/impit-0.5.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdd250277c8bbfec4b84c88d5b129438d43e1a9642c0aaaf7885884bd1bc22", size = 6074030, upload-time = "2025-08-11T15:50:35.922Z" }, + { url = "https://files.pythonhosted.org/packages/65/be/fcae13746a8d9a1854ba446d0910faa24d0c0a32052d6da41cd794bcd310/impit-0.5.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6e3a0982533f63c142e3852b52024ecc10d2af62d6f2314baf80cee18253870b", size = 3842910, upload-time = "2025-08-11T15:50:37.463Z" }, + { url = "https://files.pythonhosted.org/packages/eb/27/a2c34835f64154fc91b9aba87ecc9f642e38a44bf23d0b414271371f7db4/impit-0.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c36c9311ef74944e0ebf9cf2bb0cf660c3ea8521d2766fae2bc8ccfefeb423c2", size = 3670390, upload-time = "2025-08-11T15:50:38.938Z" }, + { url = "https://files.pythonhosted.org/packages/be/a8/cfc4d2bede1d4ae930a8b8ac3b86b7dc2fbe612fb41893c236bdcbc2c79c/impit-0.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68042849ed5178df7d2254371e70c84ab86974dbdd379e201a30897ee8e0a67c", size = 6075478, upload-time = "2025-08-11T15:50:40.61Z" }, + { url = "https://files.pythonhosted.org/packages/fb/6f/a4cf7b04b30ba8187985adac252436b74668503459e87ce4c2559cba8c42/impit-0.5.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5aeb9b58856d0587485c3f670d146d67e5219a48ec6842a7c2bc32a9578f3bf3", size = 6375760, upload-time = "2025-08-11T15:50:42.457Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ec/24ce94d93283ddc0cf5703f23d41a27e9779a6700f6385e0b3ea22c89e1a/impit-0.5.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d55aa40b8a8795358a2b51bb2e27aa2224de4a413816df431e6abdf9029955b", size = 6223400, upload-time = "2025-08-11T15:50:44.302Z" }, + { url = "https://files.pythonhosted.org/packages/04/10/0e29f978d97d9d56cfb652b4e9c832be068c5fbca3fc62e735c1a9ca7cbd/impit-0.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:e8011d3484d87f54c7919f971820edac8f10d368f6db49330aa64f1a7586019b", size = 3878261, upload-time = "2025-08-11T15:50:46.012Z" }, + { url = "https://files.pythonhosted.org/packages/74/f9/db47144fc13a1ed12bdf3e0897f05f80fe6c031ad7c18998f390e3a83e68/impit-0.5.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bfc41c77fccc4e6be2ed7647553d32c35aaa879b528550aee48222995fff42", size = 6075110, upload-time = "2025-08-11T15:50:47.425Z" }, + { url = "https://files.pythonhosted.org/packages/74/dc/a1c26e977e2b27f039744da58e5e8a08a21f183b8687a5dff02426fa6663/impit-0.5.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c831ff21e08a52d720798208eeb32cf8bdec3a933ae2c9fcbe98c7bf27a18d1a", size = 6375638, upload-time = "2025-08-11T15:50:48.966Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8e/7fdc9ea49e74a1814ce0bc9e0d48d3968ce870abafbb14928de75f3526aa/impit-0.5.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:b961185932c83753d95432dd6885934984f3bdfa0991b18e3554205e42f955d6", size = 6223026, upload-time = "2025-08-11T15:50:50.749Z" }, + { url = "https://files.pythonhosted.org/packages/f8/02/2fdcfb828751939a39b9093449b53f04ab769c5634cca21524e3e15f89f3/impit-0.5.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b8c2f3c8dfa1c0e3defab72ba72fe10bdd31bb026f52a9a041f094b2f642f8d", size = 6075198, upload-time = "2025-08-11T15:50:52.662Z" }, + { url = "https://files.pythonhosted.org/packages/0d/41/0fdb5d609218d86daec041f77854b10ae78a14100466f612bda82c142bf8/impit-0.5.2-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:26ddc9925be14d18afe025f04bb88bb40c9e9d240436cfe640533149634ea143", size = 6375549, upload-time = "2025-08-11T15:50:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/ef/cb/85adf6ede6f2eb5814c21972ca73747e2437be91f5d7f2bed5e0fcd66471/impit-0.5.2-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:b55b5ba8f17d0981d2a7d9209ef6d97cf7b8deb2f7e982c0b5852763974d34c6", size = 6223078, upload-time = "2025-08-11T15:50:55.624Z" }, + { url = "https://files.pythonhosted.org/packages/36/e2/75a868f1cb15c1fa67c1864a10aa714cda6dfe7cb16454d0a21b3ec4fed6/impit-0.5.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2f9897a93666b16c7ae663a76cf5d94c27b23a90a69e6028ba059d27186f0a04", size = 6375441, upload-time = "2025-08-11T15:50:57.143Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ad/2760aca73d6c86e853a65031a1ded926ec8801a95681ee15b6c9b11757e2/impit-0.5.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0c2cb45d500c6fdace0c38a951f9ac19575371c11ceb593d46ba191ed63e9288", size = 6223254, upload-time = "2025-08-11T15:50:58.611Z" }, ] [[package]] From 2f5a174cb3cfa1b9c41af92eac7b42637a6d33fe Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Tue, 12 Aug 2025 17:03:39 +0000 Subject: [PATCH 13/15] upgrade docs --- docs/04_upgrading/upgrading_to_v2.md | 15 +++++++ pyproject.toml | 1 - uv.lock | 63 ---------------------------- website/sidebars.js | 22 +++++----- 4 files changed, 26 insertions(+), 75 deletions(-) create mode 100644 docs/04_upgrading/upgrading_to_v2.md diff --git a/docs/04_upgrading/upgrading_to_v2.md b/docs/04_upgrading/upgrading_to_v2.md new file mode 100644 index 00000000..bd426d47 --- /dev/null +++ b/docs/04_upgrading/upgrading_to_v2.md @@ -0,0 +1,15 @@ +--- +id: upgrading-to-v2 +title: Upgrading to v2 +--- + +This page summarizes the breaking changes between Apify Python API client v1.x and v2.0. + +## Python version support + + + +## Change underlying HTTP library + +In v2.0, the Apify Python API client switched from using `httpx` to [`impit`](https://github.com/apify/impit) as the underlying HTTP library. This +change was made to improve performance and reduce the bundle size. diff --git a/pyproject.toml b/pyproject.toml index 7b798f08..634e43b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,6 @@ keywords = ["apify", "api", "client", "automation", "crawling", "scraping"] dependencies = [ "apify-shared<2.0.0", "colorama>=0.4.0", - "httpx>=0.25", "impit>=0.5.2", "more_itertools>=10.0.0", ] diff --git a/uv.lock b/uv.lock index 0c742fd8..3e15ac6e 100644 --- a/uv.lock +++ b/uv.lock @@ -6,21 +6,6 @@ resolution-markers = [ "python_full_version < '3.10'", ] -[[package]] -name = "anyio" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, -] - [[package]] name = "apify-client" version = "1.12.3" @@ -28,7 +13,6 @@ source = { editable = "." } dependencies = [ { name = "apify-shared" }, { name = "colorama" }, - { name = "httpx" }, { name = "impit" }, { name = "more-itertools" }, ] @@ -57,7 +41,6 @@ dev = [ requires-dist = [ { name = "apify-shared", specifier = "<2.0.0" }, { name = "colorama", specifier = ">=0.4.0" }, - { name = "httpx", specifier = ">=0.25" }, { name = "impit", specifier = ">=0.5.2" }, { name = "more-itertools", specifier = ">=10.0.0" }, ] @@ -516,43 +499,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/55/588425bdbe8097b621db813e9b33f0a8a7257771683e0f5369c6c8eb66ab/griffe-1.11.0-py3-none-any.whl", hash = "sha256:dc56cc6af8d322807ecdb484b39838c7a51ca750cf21ccccf890500c4d6389d8", size = 137576, upload-time = "2025-08-07T18:23:34.859Z" }, ] -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - [[package]] name = "identify" version = "2.6.12" @@ -1108,15 +1054,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, ] -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, -] - [[package]] name = "tomli" version = "2.2.1" diff --git a/website/sidebars.js b/website/sidebars.js index e45f2828..448eac9c 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -33,17 +33,17 @@ module.exports = { }, ], }, - // { - // type: 'category', - // label: 'Upgrading', - // collapsed: false, - // items: [ - // { - // type: 'autogenerated', - // dirName: '04_upgrading', - // }, - // ], - // }, + { + type: 'category', + label: 'Upgrading', + collapsed: false, + items: [ + { + type: 'autogenerated', + dirName: '04_upgrading', + }, + ], + }, { type: 'doc', id: 'changelog', From 3e712b500ac5a9e0f7124e599e22ade75ee14426 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Tue, 12 Aug 2025 17:20:12 +0000 Subject: [PATCH 14/15] add TODO section for upgrade doc --- docs/04_upgrading/upgrading_to_v2.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/04_upgrading/upgrading_to_v2.md b/docs/04_upgrading/upgrading_to_v2.md index bd426d47..6869e40e 100644 --- a/docs/04_upgrading/upgrading_to_v2.md +++ b/docs/04_upgrading/upgrading_to_v2.md @@ -13,3 +13,7 @@ This page summarizes the breaking changes between Apify Python API client v1.x a In v2.0, the Apify Python API client switched from using `httpx` to [`impit`](https://github.com/apify/impit) as the underlying HTTP library. This change was made to improve performance and reduce the bundle size. + +## Update signature of methods + + From 3ab12a5d8ec3a0f8c9e994b319bc147fcd7952a3 Mon Sep 17 00:00:00 2001 From: Max Bohomolov Date: Tue, 12 Aug 2025 20:09:43 +0000 Subject: [PATCH 15/15] clear doc --- docs/04_upgrading/upgrading_to_v2.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/04_upgrading/upgrading_to_v2.md b/docs/04_upgrading/upgrading_to_v2.md index 6869e40e..9de34225 100644 --- a/docs/04_upgrading/upgrading_to_v2.md +++ b/docs/04_upgrading/upgrading_to_v2.md @@ -11,8 +11,7 @@ This page summarizes the breaking changes between Apify Python API client v1.x a ## Change underlying HTTP library -In v2.0, the Apify Python API client switched from using `httpx` to [`impit`](https://github.com/apify/impit) as the underlying HTTP library. This -change was made to improve performance and reduce the bundle size. +In v2.0, the Apify Python API client switched from using `httpx` to [`impit`](https://github.com/apify/impit) as the underlying HTTP library. ## Update signature of methods