From 82f6f33676a8c0a72a6e81c8b40a8e53468e4fc1 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Fri, 27 Jun 2025 00:35:12 +0300 Subject: [PATCH 01/23] refactor storages --- fast_cache_middleware/storages/__init__.py | 2 + .../storages/base_storage.py | 46 +++++++++++++++++++ .../in_memory_storage.py} | 45 ++---------------- tests/storages/test_in_memory_storage.py | 3 +- 4 files changed, 53 insertions(+), 43 deletions(-) create mode 100644 fast_cache_middleware/storages/__init__.py create mode 100644 fast_cache_middleware/storages/base_storage.py rename fast_cache_middleware/{storages.py => storages/in_memory_storage.py} (83%) diff --git a/fast_cache_middleware/storages/__init__.py b/fast_cache_middleware/storages/__init__.py new file mode 100644 index 0000000..bd14ee4 --- /dev/null +++ b/fast_cache_middleware/storages/__init__.py @@ -0,0 +1,2 @@ +from .base_storage import BaseStorage +from .in_memory_storage import InMemoryStorage \ No newline at end of file diff --git a/fast_cache_middleware/storages/base_storage.py b/fast_cache_middleware/storages/base_storage.py new file mode 100644 index 0000000..50e1916 --- /dev/null +++ b/fast_cache_middleware/storages/base_storage.py @@ -0,0 +1,46 @@ +import re +import typing as tp +from typing import TypeAlias + +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import Metadata, BaseSerializer, JSONSerializer + +StoredResponse: TypeAlias = tp.Tuple[Response, Request, Metadata] + + +class BaseStorage: + """Base class for cache storage. + + Args: + serializer: Serializer for converting Response/Request to string/bytes + ttl: Cache lifetime in seconds. None for permanent storage + """ + + def __init__( + self, + serializer: tp.Optional[BaseSerializer] = None, + ttl: tp.Optional[tp.Union[int, float]] = None, + ) -> None: + self._serializer = serializer or JSONSerializer() + + if ttl is not None and ttl <= 0: + raise StorageError("TTL must be positive") + + self._ttl = ttl + + async def store( + self, key: str, response: Response, request: Request, metadata: Metadata + ) -> None: + raise NotImplementedError() + + async def retrieve(self, key: str) -> tp.Optional[StoredResponse]: + raise NotImplementedError() + + async def remove(self, path: re.Pattern) -> None: + raise NotImplementedError() + + async def close(self) -> None: + raise NotImplementedError() diff --git a/fast_cache_middleware/storages.py b/fast_cache_middleware/storages/in_memory_storage.py similarity index 83% rename from fast_cache_middleware/storages.py rename to fast_cache_middleware/storages/in_memory_storage.py index 8e04030..771e3db 100644 --- a/fast_cache_middleware/storages.py +++ b/fast_cache_middleware/storages/in_memory_storage.py @@ -6,52 +6,13 @@ from starlette.requests import Request from starlette.responses import Response -from typing_extensions import TypeAlias -from .exceptions import StorageError -from .serializers import BaseSerializer, JSONSerializer, Metadata +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import BaseSerializer, Metadata +from .base_storage import BaseStorage, StoredResponse logger = logging.getLogger(__name__) -# Define type for stored response -StoredResponse: TypeAlias = tp.Tuple[Response, Request, Metadata] - - -# Define base class for cache storage -class BaseStorage: - """Base class for cache storage. - - Args: - serializer: Serializer for converting Response/Request to string/bytes - ttl: Cache lifetime in seconds. None for permanent storage - """ - - def __init__( - self, - serializer: tp.Optional[BaseSerializer] = None, - ttl: tp.Optional[tp.Union[int, float]] = None, - ) -> None: - self._serializer = serializer or JSONSerializer() - - if ttl is not None and ttl <= 0: - raise StorageError("TTL must be positive") - - self._ttl = ttl - - async def store( - self, key: str, response: Response, request: Request, metadata: Metadata - ) -> None: - raise NotImplementedError() - - async def retrieve(self, key: str) -> tp.Optional[StoredResponse]: - raise NotImplementedError() - - async def remove(self, path: re.Pattern) -> None: - raise NotImplementedError() - - async def close(self) -> None: - raise NotImplementedError() - class InMemoryStorage(BaseStorage): """In-memory cache storage with TTL and LRU eviction support. diff --git a/tests/storages/test_in_memory_storage.py b/tests/storages/test_in_memory_storage.py index 2483287..af1c3bd 100644 --- a/tests/storages/test_in_memory_storage.py +++ b/tests/storages/test_in_memory_storage.py @@ -10,7 +10,8 @@ from starlette.responses import Response from fast_cache_middleware.serializers import Metadata -from fast_cache_middleware.storages import InMemoryStorage, StorageError +from fast_cache_middleware.storages import InMemoryStorage +from fast_cache_middleware.exceptions import StorageError @pytest.fixture From 2bce822b1132ac6c3d4f377fe0d2a8ad356a92a5 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 14:09:48 +0300 Subject: [PATCH 02/23] add redis storage --- .../storages/redis_storage.py | 107 ++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 fast_cache_middleware/storages/redis_storage.py diff --git a/fast_cache_middleware/storages/redis_storage.py b/fast_cache_middleware/storages/redis_storage.py new file mode 100644 index 0000000..1220c72 --- /dev/null +++ b/fast_cache_middleware/storages/redis_storage.py @@ -0,0 +1,107 @@ +import logging +import re +import time +import typing as tp + +from redis import Redis +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import BaseSerializer, JSONSerializer, Metadata +from .base_storage import BaseStorage, StoredResponse + +logger = logging.getLogger(__name__) + + +class RedisStorage(BaseStorage): + def __init__( + self, + redis_client: Redis, + serializer: tp.Optional[BaseSerializer] = None, + ttl: tp.Optional[tp.Union[int, float]] = None, + namespace: str = "cache", + ) -> None: + super().__init__(serializer, ttl) + self._serializer = serializer or JSONSerializer() + + if ttl is not None and ttl <= 0: + raise StorageError("TTL must be positive") + + self._ttl = ttl + self._storage = redis_client + self._namespace = namespace + + async def store( + self, key: str, response: Response, request: Request, metadata: Metadata + ) -> None: + """ + Saves response to cache with TTL. Redis automatically remove cache if TTL expired. + """ + current_time = time.time() + + metadata["write_time"] = current_time + + value = self._serializer.dumps(response, request, metadata) + logger.debug("Serialized data: %s", value) + ttl = metadata.get("ttl", self._ttl) + logger.debug(f"TTL: %s", ttl) + + full_key = self._full_key(key) + logger.debug(f"Full key: %s", full_key) + + if await self._storage.exists(full_key): + logger.info("Element %s removed from cache - overwrite", key) + await self._storage.delete(full_key) + + await self._storage.set(full_key, value, ex=ttl) + logger.info("Data written to Redis") + + async def retrieve(self, key: str) -> tp.Optional[StoredResponse]: + """ + Get response from Redis. If TTL expired returns None. + """ + full_key = self._full_key(key) + raw_data = await self._storage.get(full_key) + + if raw_data is None: + logger.debug("Key %s will be removed from Redis - TTL expired", full_key) + return None + + logger.debug(f"Takin data from Redis: %s", raw_data) + try: + return self._serializer.loads(raw_data) + except Exception as e: + logger.warning( + "Failed to deserialize cached response for key %s: %s", key, e + ) + return None + + async def remove(self, path: re.Pattern) -> None: + """ + Deleting the cache using the specified path + """ + raw = path.pattern + if raw.startswith("^"): + raw = raw[1:] + + pattern = self._full_key(str(raw.rstrip("$") + "/*")) + logger.debug(f"Removing key: %s", pattern) + + result = await self._storage.scan(match=pattern) + + if not result[1]: + logger.warning("The search did not find any matches") + return + + logger.debug(f"Result: %s", result[1]) + for value in result[1]: + await self._storage.delete(value) + logger.info(f"Key deleted from Redis: %s", value) + + async def close(self) -> None: + await self._storage.flushdb() + logger.debug("Cache storage cleared") + + def _full_key(self, key: str) -> str: + return f"{self._namespace}:{key}" From ae69834c27a465ffd85b2e5d596d3caccc04e45b Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 14:09:55 +0300 Subject: [PATCH 03/23] add redis example --- examples/redis_example.py | 188 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 188 insertions(+) create mode 100644 examples/redis_example.py diff --git a/examples/redis_example.py b/examples/redis_example.py new file mode 100644 index 0000000..3a3ffe5 --- /dev/null +++ b/examples/redis_example.py @@ -0,0 +1,188 @@ +"""Базовый пример использования FastCacheMiddleware с резолюцией роутов. + +Демонстрирует: +1. Анализ роутов на старте приложения +2. Извлечение кеш конфигураций из dependencies +3. Автоматическое кеширование GET запросов +4. Инвалидация кеша при модифицирующих запросах +""" + +import asyncio +import logging +import time +import typing as tp + +import uvicorn +from fastapi import FastAPI, HTTPException, Request +from pydantic import BaseModel, Field +from redis.asyncio import Redis + +from fast_cache_middleware import CacheConfig, CacheDropConfig, FastCacheMiddleware + +# Создаем FastAPI приложение +app = FastAPI(title="FastCacheMiddleware Redis Example") +redis = Redis(host="127.0.0.1", port=6379, db=0, decode_responses=True) + +# Добавляем middleware - он проанализирует роуты при первом запросе +app.add_middleware(FastCacheMiddleware, storage=redis) + + +def custom_key_func(request: Request) -> str: + # Ключ включает user-id из заголовков если есть + user_id = request.headers.get("user-id", "anonymous") + return f"{request.url.path}:user:{user_id}" + + +class User(BaseModel): + name: str + email: str + + +class FullUser(User): + user_id: int + + +class UserResponse(FullUser): + timestamp: float = Field(default_factory=time.time) + + +_USERS_STORAGE: tp.Dict[int, User] = { + 1: User(name="John Doe", email="john.doe@example.com"), + 2: User(name="Jane Doe", email="jane.doe@example.com"), +} + + +# Роуты с различными конфигурациями кеширования + + +@app.get("/") +async def root() -> tp.Dict[str, tp.Union[str, float]]: + """Корневой роут без кеширования.""" + return { + "message": "Without cache response", + "timestamp": time.time(), + "cache_duration": "0 seconds", + } + + +@app.get( + "/fast", dependencies=[CacheConfig(max_age=30)], openapi_extra={"x-cache-age": 30} +) +async def fast_endpoint() -> tp.Dict[str, tp.Union[str, float]]: + """Быстрый endpoint с коротким кешированием (30 секунд).""" + + return { + "message": "Fast cached response", + "timestamp": time.time(), + "cache_duration": "30 seconds", + } + + +@app.get( + "/slow", dependencies=[CacheConfig(max_age=300)], openapi_extra={"x-cache-age": 300} +) +async def slow_endpoint() -> tp.Dict[str, tp.Union[str, float]]: + """Медленный endpoint с длинным кешированием (5 минут).""" + await asyncio.sleep(0.5) + + return { + "message": "Slow cached response", + "timestamp": time.time(), + "cache_duration": "300 seconds", + } + + +@app.get( + "/users/{user_id}", + dependencies=[CacheConfig(max_age=60, key_func=custom_key_func)], +) +async def get_user(user_id: int) -> UserResponse: + """Получение пользователя с кастомным ключом кеширования. + + Ключ кеша включает user-id из заголовков для персонализации. + """ + user = _USERS_STORAGE.get(user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + + return UserResponse(user_id=user_id, name=user.name, email=user.email) + + +@app.get("/users", dependencies=[CacheConfig(max_age=30)]) +async def get_users() -> tp.List[UserResponse]: + return [ + UserResponse(user_id=user_id, name=user.name, email=user.email) + for user_id, user in _USERS_STORAGE.items() + ] + + +@app.post("/users/{user_id}", dependencies=[CacheDropConfig(paths=["/users"])]) +async def create_user(user_id: int, user_data: User) -> UserResponse: + """Создание пользователя с инвалидацией кеша. + + Этот POST запрос инвалидирует кеш для всех /users/* путей. + """ + _USERS_STORAGE[user_id] = user_data + + return UserResponse(user_id=user_id, name=user_data.name, email=user_data.email) + + +@app.put("/users/{user_id}", dependencies=[CacheDropConfig(paths=["/users"])]) +async def update_user(user_id: int, user_data: User) -> UserResponse: + """Обновление пользователя с инвалидацией кеша.""" + user = _USERS_STORAGE.get(user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + _USERS_STORAGE[user_id] = user_data + + return UserResponse(user_id=user_id, name=user_data.name, email=user_data.email) + + +@app.delete("/users/{user_id}", dependencies=[CacheDropConfig(paths=["/users"])]) +async def delete_user(user_id: int) -> UserResponse: + """Удаление пользователя с инвалидацией кеша.""" + user = _USERS_STORAGE.get(user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + del _USERS_STORAGE[user_id] + + return UserResponse(user_id=user_id, name=user.name, email=user.email) + + +if __name__ == "__main__": + logging.basicConfig( + level=logging.DEBUG, + format="[-] %(asctime)s [%(levelname)s] %(module)s-%(lineno)d - %(message)s", + ) + + print("🚀 Запуск FastCacheMiddleware Redis Example...") + print("\n📋 Доступные endpoints:") + print(" GET / - корневой роут (без кеша)") + print(" GET /fast - короткий кеш (30s)") + print(" GET /slow - длинный кеш (5m)") + print(" GET /users/{user_id} - получение пользователя (кеш 3 мин)") + print(" GET /users - список пользователей (кеш 3 мин)") + print(" POST /users/{user_id} - создание пользователя (инвалидация /users)") + print( + " PUT /users/{user_id} - обновление пользователя (инвалидация /users и /users/*)" + ) + print(" DELETE /users/{user_id} - удаление пользователя (инвалидация /users)") + + print("\n🔧 Как работает middleware:") + print(" 1. При старте анализирует все роуты") + print(" 2. Извлекает CacheConfig/CacheDropConfig из dependencies") + print(" 3. При запросе находит соответствующий роут") + print(" 4. Применяет кеширование согласно конфигурации") + + print("\n💡 Для тестирования:") + print(" curl http://localhost:8000/users/1") + print(" curl http://localhost:8000/users") + print( + ' curl -X POST http://localhost:8000/users/1 -H "Content-Type: application/json" -d \'{"name": "John", "email": "john@example.com"}\'' + ) + print( + ' curl -X PUT http://localhost:8000/users/1 -H "Content-Type: application/json" -d \'{"name": "John Updated", "email": "john@example.com"}\'' + ) + print(" curl -X DELETE http://localhost:8000/users/1") + + uvicorn.run(app, host="127.0.0.1", port=8000) From 725496168174b12e8b825ca154ff3c95f4f3869a Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 14:10:55 +0300 Subject: [PATCH 04/23] add methods for dumps json --- fast_cache_middleware/serializers.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/fast_cache_middleware/serializers.py b/fast_cache_middleware/serializers.py index 2314e97..23e1b73 100644 --- a/fast_cache_middleware/serializers.py +++ b/fast_cache_middleware/serializers.py @@ -27,7 +27,25 @@ def is_binary(self) -> bool: class JSONSerializer(BaseSerializer): def dumps(self, response: Response, request: Request, metadata: Metadata) -> str: - raise NotImplementedError() # fixme: bad implementation now, maybe async? + request_data = { + "method": request.method, + "url": str(request.url), + "headers": dict(request.headers), + } + response_data = { + "status_code": response.status_code, + "headers": dict(response.headers), + "content": response.body.decode("utf-8", errors="ignore") + if response.body + else None, + } + payload = { + "response": response_data, + "request": request_data, + "metadata": metadata, + } + + return json.dumps(payload) def loads(self, data: tp.Union[str, bytes]) -> StoredResponse: if isinstance(data, bytes): From 991c2d403ea10ead755dd78d7529692915c762c1 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 17:07:52 +0300 Subject: [PATCH 05/23] move storage fixtures to conftest --- tests/storages/conftest.py | 29 ++++++++++++++++++++++++ tests/storages/test_in_memory_storage.py | 22 ------------------ 2 files changed, 29 insertions(+), 22 deletions(-) create mode 100644 tests/storages/conftest.py diff --git a/tests/storages/conftest.py b/tests/storages/conftest.py new file mode 100644 index 0000000..b496203 --- /dev/null +++ b/tests/storages/conftest.py @@ -0,0 +1,29 @@ +import typing as tp + +import pytest +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.serializers import Metadata + + +@pytest.fixture +def mock_request() -> Request: + return Request(scope={"type": "http", "method": "GET", "path": "/test"}) + + +@pytest.fixture +def mock_response() -> Response: + return Response(content="test content", status_code=200) + + +@pytest.fixture +def mock_metadata() -> Metadata: + return {"test": "value"} + + +@pytest.fixture +def mock_store_data( + mock_request: Request, mock_response: Response, mock_metadata: Metadata +) -> tp.Tuple[Response, Request, Metadata]: + return mock_response, mock_request, mock_metadata diff --git a/tests/storages/test_in_memory_storage.py b/tests/storages/test_in_memory_storage.py index af1c3bd..c19aa7c 100644 --- a/tests/storages/test_in_memory_storage.py +++ b/tests/storages/test_in_memory_storage.py @@ -14,28 +14,6 @@ from fast_cache_middleware.exceptions import StorageError -@pytest.fixture -def mock_request() -> Request: - return Request(scope={"type": "http", "method": "GET", "path": "/test"}) - - -@pytest.fixture -def mock_response() -> Response: - return Response(content="test content", status_code=200) - - -@pytest.fixture -def mock_metadata() -> Metadata: - return {"test": "value"} - - -@pytest.fixture -def mock_store_data( - mock_request: Request, mock_response: Response, mock_metadata: Metadata -) -> tp.Tuple[Response, Request, Metadata]: - return mock_response, mock_request, mock_metadata - - @pytest.mark.parametrize( "max_size, ttl, expected_error", [ From 3e4126fda0b3e2ff387af6189eee2a00db794402 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 17:08:03 +0300 Subject: [PATCH 06/23] add tests for serializers.py --- tests/test_serializers.py | 102 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 tests/test_serializers.py diff --git a/tests/test_serializers.py b/tests/test_serializers.py new file mode 100644 index 0000000..19fcab7 --- /dev/null +++ b/tests/test_serializers.py @@ -0,0 +1,102 @@ +import json + +import pytest +from starlette import status +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.serializers import JSONSerializer, Metadata + + +@pytest.fixture +def test_request() -> Request: + return Request( + scope={ + "type": "http", + "method": "GET", + "path": "/test", + "headers": [(b"host", b"test.com"), (b"user-agent", b"pytest")], + } + ) + + +@pytest.fixture +def test_response() -> Response: + return Response( + content="hello world", status_code=status.HTTP_200_OK, headers={"X-Test": "yes"} + ) + + +@pytest.fixture +def test_metadata() -> Metadata: + return {"meta": "value", "ttl": 123} + + +def test_dumps_output_is_valid_json(test_request, test_response, test_metadata): + serializer = JSONSerializer() + + result = serializer.dumps(test_response, test_request, test_metadata) + parsed = json.loads(result) + + assert "response" in parsed + assert "request" in parsed + assert "metadata" in parsed + + assert parsed["response"]["status_code"] == status.HTTP_200_OK + assert parsed["response"]["content"] == "hello world" + assert parsed["response"]["headers"]["x-test"] == "yes" + + assert parsed["request"]["method"] == "GET" + assert parsed["request"]["headers"]["host"] == "test.com" + assert parsed["metadata"]["ttl"] == 123 + + +def test_loads_reconstructs_response_request( + test_request, test_response, test_metadata +): + serializer = JSONSerializer() + + json_data = serializer.dumps(test_response, test_request, test_metadata) + response, request, metadata = serializer.loads(json_data) + + assert isinstance(response, Response) + assert response.body == b"hello world" + assert response.status_code == status.HTTP_200_OK + assert response.headers["x-test"] == "yes" + + assert isinstance(request, Request) + assert request.method == "GET" + assert request.url.path == "/test" + assert request.headers["host"] == "test.com" + assert request.headers["user-agent"] == "pytest" + + assert metadata == test_metadata + + +def test_loads_accepts_bytes_input(test_request, test_response, test_metadata): + serializer = JSONSerializer() + + json_data_str = serializer.dumps(test_response, test_request, test_metadata) + json_data_bytes = json_data_str.encode("utf-8") + + response, request, metadata = serializer.loads(json_data_bytes) + + assert isinstance(response, Response) + assert isinstance(request, Request) + assert metadata == test_metadata + + +def test_dumps_handles_empty_body(test_request, test_metadata): + response = Response(status_code=status.HTTP_204_NO_CONTENT) + serializer = JSONSerializer() + + json_str = serializer.dumps(response, test_request, test_metadata) + parsed = json.loads(json_str) + + assert parsed["response"]["status_code"] == status.HTTP_204_NO_CONTENT + assert parsed["response"]["content"] is None + + +def test_is_binary_property(): + serializer = JSONSerializer() + assert not serializer.is_binary From 822adc125127d6feffc02fedd9491d6f196f86bb Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 17:08:17 +0300 Subject: [PATCH 07/23] get items from dict --- fast_cache_middleware/serializers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/fast_cache_middleware/serializers.py b/fast_cache_middleware/serializers.py index 23e1b73..a8dfe67 100644 --- a/fast_cache_middleware/serializers.py +++ b/fast_cache_middleware/serializers.py @@ -1,5 +1,6 @@ import json import typing as tp +from urllib.parse import urlparse from starlette.requests import Request from starlette.responses import Response @@ -69,7 +70,6 @@ def loads(self, data: tp.Union[str, bytes]) -> StoredResponse: request_data = parsed["request"] # Create minimal scope for Request - from urllib.parse import urlparse parsed_url = urlparse(request_data["url"]) scope = { @@ -77,7 +77,9 @@ def loads(self, data: tp.Union[str, bytes]) -> StoredResponse: "method": request_data["method"], "path": parsed_url.path, "query_string": parsed_url.query.encode() if parsed_url.query else b"", - "headers": [[k.encode(), v.encode()] for k, v in request_data["headers"]], + "headers": [ + [k.encode(), v.encode()] for k, v in request_data["headers"].items() + ], } # Create empty receive function From 7b57ddf2fbece13703a8e8e160388589361486f7 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 17:08:56 +0300 Subject: [PATCH 08/23] import Redis storage --- fast_cache_middleware/storages/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/fast_cache_middleware/storages/__init__.py b/fast_cache_middleware/storages/__init__.py index bd14ee4..43d4afa 100644 --- a/fast_cache_middleware/storages/__init__.py +++ b/fast_cache_middleware/storages/__init__.py @@ -1,2 +1,3 @@ from .base_storage import BaseStorage -from .in_memory_storage import InMemoryStorage \ No newline at end of file +from .in_memory_storage import InMemoryStorage +from .redis_storage import RedisStorage From dc1aed3ca3aceb2e8d6f2dd94b1ad907a81c9d02 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 17:09:06 +0300 Subject: [PATCH 09/23] add redis storage tests --- tests/storages/test_redis_storage.py | 145 +++++++++++++++++++++++++++ 1 file changed, 145 insertions(+) create mode 100644 tests/storages/test_redis_storage.py diff --git a/tests/storages/test_redis_storage.py b/tests/storages/test_redis_storage.py new file mode 100644 index 0000000..6c10e23 --- /dev/null +++ b/tests/storages/test_redis_storage.py @@ -0,0 +1,145 @@ +import re +from unittest.mock import AsyncMock, MagicMock + +import pytest +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import JSONSerializer +from fast_cache_middleware.storages import RedisStorage + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "ttl, expect_error", + [ + (60.0, None), + (None, None), + (-1, StorageError), + (0, StorageError), + ], +) +async def test_redis_storage_init_validation(ttl, expect_error): + mock_redis = AsyncMock() + + if expect_error: + with pytest.raises(expect_error): + RedisStorage(redis_client=mock_redis, ttl=ttl) + else: + storage = RedisStorage(redis_client=mock_redis, ttl=ttl) + assert storage._ttl == ttl + assert isinstance(storage._serializer, JSONSerializer) + + +@pytest.mark.asyncio +async def test_store_and_retrieve_works(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis, ttl=1) + + request = Request(scope={"type": "http", "method": "GET", "path": "/test"}) + response = Response(content="hello", status_code=200) + metadata = {} + + serialized_value = b"serialized" + storage._serializer.dumps = MagicMock(return_value=serialized_value) + storage._serializer.loads = MagicMock( + return_value=("deserialized_response", "req", {"meta": "data"}) + ) + + mock_redis.exists.return_value = False + + await storage.store("key1", response, request, metadata) + mock_redis.set.assert_awaited_with("cache:key1", serialized_value, ex=1) + + mock_redis.get.return_value = serialized_value + result = await storage.retrieve("key1") + + assert result == ("deserialized_response", "req", {"meta": "data"}) + + +@pytest.mark.asyncio +async def test_store_overwrites_existing_key(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis, ttl=10) + + request = Request(scope={"type": "http", "method": "GET", "path": "/overwrite"}) + response = Response(content="updated", status_code=200) + metadata = {} + + storage._serializer.dumps = MagicMock(return_value=b"new_value") + + mock_redis.exists.return_value = True + + await storage.store("existing_key", response, request, metadata) + + mock_redis.delete.assert_awaited_with("cache:existing_key") + mock_redis.set.assert_awaited_with("cache:existing_key", b"new_value", ex=10) + + +@pytest.mark.asyncio +async def test_retrieve_returns_none_on_missing_key(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis) + mock_redis.get.return_value = None + + result = await storage.retrieve("missing") + assert result is None + + +@pytest.mark.asyncio +async def test_retrieve_returns_none_on_deserialization_error(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis) + mock_redis.get.return_value = b"invalid" + + def raise_error(_): + raise ValueError("bad format") + + storage._serializer.loads = raise_error + + result = await storage.retrieve("corrupt") + assert result is None + + +@pytest.mark.asyncio +async def test_remove_by_regex(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis, namespace="myspace") + + pattern = re.compile(r"^/api/.*") + mock_redis.scan.return_value = (0, ["myspace:/api/test1", "myspace:/api/test2"]) + + await storage.remove(pattern) + + mock_redis.delete.assert_any_await("myspace:/api/test1") + mock_redis.delete.assert_any_await("myspace:/api/test2") + assert mock_redis.delete.await_count == 2 + + +@pytest.mark.asyncio +async def test_remove_with_no_matches_logs_warning(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis, namespace="myspace") + + pattern = re.compile(r"^/nothing.*") + mock_redis.scan.return_value = (0, []) + + await storage.remove(pattern) + mock_redis.delete.assert_not_called() + + +@pytest.mark.asyncio +async def test_close_flushes_database(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis) + + await storage.close() + mock_redis.flushdb.assert_awaited_once() + + +def test_full_key(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis, namespace="custom") + + assert storage._full_key("abc") == "custom:abc" From 7fcc18c866f47c2c8a46206d89bd22b974e70525 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 17:09:37 +0300 Subject: [PATCH 10/23] fix linters --- fast_cache_middleware/storages/base_storage.py | 2 +- fast_cache_middleware/storages/in_memory_storage.py | 1 + fast_cache_middleware/storages/redis_storage.py | 1 + tests/storages/test_in_memory_storage.py | 2 +- 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/fast_cache_middleware/storages/base_storage.py b/fast_cache_middleware/storages/base_storage.py index 50e1916..ddc681c 100644 --- a/fast_cache_middleware/storages/base_storage.py +++ b/fast_cache_middleware/storages/base_storage.py @@ -6,7 +6,7 @@ from starlette.responses import Response from fast_cache_middleware.exceptions import StorageError -from fast_cache_middleware.serializers import Metadata, BaseSerializer, JSONSerializer +from fast_cache_middleware.serializers import BaseSerializer, JSONSerializer, Metadata StoredResponse: TypeAlias = tp.Tuple[Response, Request, Metadata] diff --git a/fast_cache_middleware/storages/in_memory_storage.py b/fast_cache_middleware/storages/in_memory_storage.py index 771e3db..50d8033 100644 --- a/fast_cache_middleware/storages/in_memory_storage.py +++ b/fast_cache_middleware/storages/in_memory_storage.py @@ -9,6 +9,7 @@ from fast_cache_middleware.exceptions import StorageError from fast_cache_middleware.serializers import BaseSerializer, Metadata + from .base_storage import BaseStorage, StoredResponse logger = logging.getLogger(__name__) diff --git a/fast_cache_middleware/storages/redis_storage.py b/fast_cache_middleware/storages/redis_storage.py index 1220c72..ce01662 100644 --- a/fast_cache_middleware/storages/redis_storage.py +++ b/fast_cache_middleware/storages/redis_storage.py @@ -9,6 +9,7 @@ from fast_cache_middleware.exceptions import StorageError from fast_cache_middleware.serializers import BaseSerializer, JSONSerializer, Metadata + from .base_storage import BaseStorage, StoredResponse logger = logging.getLogger(__name__) diff --git a/tests/storages/test_in_memory_storage.py b/tests/storages/test_in_memory_storage.py index c19aa7c..58b9c95 100644 --- a/tests/storages/test_in_memory_storage.py +++ b/tests/storages/test_in_memory_storage.py @@ -9,9 +9,9 @@ from starlette.requests import Request from starlette.responses import Response +from fast_cache_middleware.exceptions import StorageError from fast_cache_middleware.serializers import Metadata from fast_cache_middleware.storages import InMemoryStorage -from fast_cache_middleware.exceptions import StorageError @pytest.mark.parametrize( From e39538fd0eec16f010e57aa9262cb04744468b31 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 17:18:29 +0300 Subject: [PATCH 11/23] mypy fix --- fast_cache_middleware/serializers.py | 8 +++++--- tests/storages/test_redis_storage.py | 3 ++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/fast_cache_middleware/serializers.py b/fast_cache_middleware/serializers.py index a8dfe67..cdaf655 100644 --- a/fast_cache_middleware/serializers.py +++ b/fast_cache_middleware/serializers.py @@ -36,9 +36,11 @@ def dumps(self, response: Response, request: Request, metadata: Metadata) -> str response_data = { "status_code": response.status_code, "headers": dict(response.headers), - "content": response.body.decode("utf-8", errors="ignore") - if response.body - else None, + "content": ( + bytes(response.body).decode("utf-8", errors="ignore") + if response.body + else None + ), } payload = { "response": response_data, diff --git a/tests/storages/test_redis_storage.py b/tests/storages/test_redis_storage.py index 6c10e23..94c991b 100644 --- a/tests/storages/test_redis_storage.py +++ b/tests/storages/test_redis_storage.py @@ -1,4 +1,5 @@ import re +from typing import cast from unittest.mock import AsyncMock, MagicMock import pytest @@ -65,7 +66,7 @@ async def test_store_overwrites_existing_key(): request = Request(scope={"type": "http", "method": "GET", "path": "/overwrite"}) response = Response(content="updated", status_code=200) - metadata = {} + metadata: dict = {} storage._serializer.dumps = MagicMock(return_value=b"new_value") From b8788b81ec9786d223dc0bd1938bafbd966e00a4 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Tue, 1 Jul 2025 17:21:24 +0300 Subject: [PATCH 12/23] add redis as optional dep --- .../storages/redis_storage.py | 7 +- poetry.lock | 79 +++++++++---------- pyproject.toml | 3 + 3 files changed, 48 insertions(+), 41 deletions(-) diff --git a/fast_cache_middleware/storages/redis_storage.py b/fast_cache_middleware/storages/redis_storage.py index ce01662..a853d44 100644 --- a/fast_cache_middleware/storages/redis_storage.py +++ b/fast_cache_middleware/storages/redis_storage.py @@ -3,7 +3,12 @@ import time import typing as tp -from redis import Redis +try: + import redis + from redis import Redis +except ImportError: # pragma: no cover + redis = None # type: ignore + from starlette.requests import Request from starlette.responses import Response diff --git a/poetry.lock b/poetry.lock index 1639233..3c5da06 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,7 +6,6 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -18,7 +17,6 @@ version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, @@ -31,16 +29,26 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + [[package]] name = "black" version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, @@ -75,7 +83,7 @@ platformdirs = ">=2" [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -85,7 +93,6 @@ version = "2025.6.15" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057"}, {file = "certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b"}, @@ -97,7 +104,6 @@ version = "8.2.1" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" -groups = ["dev"] files = [ {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, @@ -112,8 +118,6 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] -markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -125,7 +129,6 @@ version = "7.9.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "coverage-7.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cc94d7c5e8423920787c33d811c0be67b7be83c705f001f7180c7b186dcf10ca"}, {file = "coverage-7.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16aa0830d0c08a2c40c264cef801db8bc4fc0e1892782e45bcacbd5889270509"}, @@ -197,7 +200,7 @@ files = [ ] [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "fastapi" @@ -205,7 +208,6 @@ version = "0.115.13" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "fastapi-0.115.13-py3-none-any.whl", hash = "sha256:0a0cab59afa7bab22f5eb347f8c9864b681558c278395e94035a741fc10cd865"}, {file = "fastapi-0.115.13.tar.gz", hash = "sha256:55d1d25c2e1e0a0a50aceb1c8705cd932def273c102bff0b1c1da88b3c6eb307"}, @@ -226,7 +228,6 @@ version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, @@ -238,7 +239,6 @@ version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, @@ -260,7 +260,6 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -273,7 +272,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -285,7 +284,6 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -300,7 +298,6 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -312,7 +309,6 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" -groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -327,7 +323,6 @@ version = "1.16.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a"}, {file = "mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72"}, @@ -381,7 +376,6 @@ version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, @@ -393,7 +387,6 @@ version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, @@ -405,7 +398,6 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -417,7 +409,6 @@ version = "4.3.8" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, @@ -434,7 +425,6 @@ version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, @@ -450,7 +440,6 @@ version = "2.11.7" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, @@ -464,7 +453,7 @@ typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] +timezone = ["tzdata"] [[package]] name = "pydantic-core" @@ -472,7 +461,6 @@ version = "2.33.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, @@ -584,7 +572,6 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -605,7 +592,6 @@ version = "0.21.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, @@ -624,7 +610,6 @@ version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, @@ -637,13 +622,31 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "redis" +version = "6.2.0" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.9" +files = [ + {file = "redis-6.2.0-py3-none-any.whl", hash = "sha256:c8ddf316ee0aab65f04a11229e94a64b2618451dab7a67cb2f77eb799d872d5e"}, + {file = "redis-6.2.0.tar.gz", hash = "sha256:e821f129b75dde6cb99dd35e5c76e8c49512a5a0d8dfdc560b2fbd44b85ca977"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} + +[package.extras] +hiredis = ["hiredis (>=3.2.0)"] +jwt = ["pyjwt (>=2.9.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (>=20.0.1)", "requests (>=2.31.0)"] + [[package]] name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -655,7 +658,6 @@ version = "0.46.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"}, {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, @@ -673,7 +675,6 @@ version = "4.14.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, @@ -685,7 +686,6 @@ version = "0.4.1" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, @@ -700,7 +700,6 @@ version = "0.34.3" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "uvicorn-0.34.3-py3-none-any.whl", hash = "sha256:16246631db62bdfbf069b0645177d6e8a77ba950cfedbfd093acef9444e4d885"}, {file = "uvicorn-0.34.3.tar.gz", hash = "sha256:35919a9a979d7a59334b6b10e05d77c1d0d574c50e0fc98b8b1a0f165708b55a"}, @@ -711,9 +710,9 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [metadata] -lock-version = "2.1" +lock-version = "2.0" python-versions = "^3.11" -content-hash = "9f9180b91ecef4ff4515c5ee6672dcde7de0740ada48aabf240683bc75de5d21" +content-hash = "7b018f29d22c294e303a5da2df537cdadd05dd501c9218e73e61a2febe668cf8" diff --git a/pyproject.toml b/pyproject.toml index ea31fa5..4dca123 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,10 @@ license = "MIT" [tool.poetry.dependencies] python = "^3.11" fastapi = ">=0.111.1,<1.0.0" +redis = { version = "^6.2.0", optional = true } +[tool.poetry.extras] +redis = ["redis"] [tool.poetry.group.dev.dependencies] pytest = "^7.4.3" From c1746d475d134530c2fa76e7bae8787482613d56 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Wed, 2 Jul 2025 09:18:42 +0300 Subject: [PATCH 13/23] resolve conflicts --- fast_cache_middleware/serializers.py | 32 ++++++++++--- .../storages/base_storage.py | 11 ++--- .../storages/in_memory_storage.py | 48 ++----------------- poetry.lock | 39 ++++++++++++++- pyproject.toml | 4 ++ 5 files changed, 78 insertions(+), 56 deletions(-) diff --git a/fast_cache_middleware/serializers.py b/fast_cache_middleware/serializers.py index 365e235..4779057 100644 --- a/fast_cache_middleware/serializers.py +++ b/fast_cache_middleware/serializers.py @@ -1,5 +1,6 @@ import json -from typing import Any, Callable, Dict, Optional, Tuple, TypeAlias, Union +from typing import Any, Dict, Tuple, TypeAlias, Union +from urllib.parse import urlparse from starlette.requests import Request from starlette.responses import Response @@ -25,7 +26,27 @@ def is_binary(self) -> bool: class JSONSerializer(BaseSerializer): def dumps(self, response: Response, request: Request, metadata: Metadata) -> str: - raise NotImplementedError() # fixme: bad implementation now, maybe async? + request_data = { + "method": request.method, + "url": str(request.url), + "headers": dict(request.headers), + } + response_data = { + "status_code": response.status_code, + "headers": dict(response.headers), + "content": ( + bytes(response.body).decode("utf-8", errors="ignore") + if response.body + else None + ), + } + payload = { + "response": response_data, + "request": request_data, + "metadata": metadata, + } + + return json.dumps(payload) def loads(self, data: Union[str, bytes]) -> StoredResponse: if isinstance(data, bytes): @@ -48,16 +69,15 @@ def loads(self, data: Union[str, bytes]) -> StoredResponse: # Restore Request - create mock object for compatibility request_data = parsed["request"] - # Create minimal scope for Request - from urllib.parse import urlparse - parsed_url = urlparse(request_data["url"]) scope = { "type": "http", "method": request_data["method"], "path": parsed_url.path, "query_string": parsed_url.query.encode() if parsed_url.query else b"", - "headers": [[k.encode(), v.encode()] for k, v in request_data["headers"]], + "headers": [ + [k.encode(), v.encode()] for k, v in request_data["headers"].items() + ], } # Create empty receive function diff --git a/fast_cache_middleware/storages/base_storage.py b/fast_cache_middleware/storages/base_storage.py index ddc681c..8da4f8c 100644 --- a/fast_cache_middleware/storages/base_storage.py +++ b/fast_cache_middleware/storages/base_storage.py @@ -1,6 +1,5 @@ import re -import typing as tp -from typing import TypeAlias +from typing import Optional, Tuple, TypeAlias, Union from starlette.requests import Request from starlette.responses import Response @@ -8,7 +7,7 @@ from fast_cache_middleware.exceptions import StorageError from fast_cache_middleware.serializers import BaseSerializer, JSONSerializer, Metadata -StoredResponse: TypeAlias = tp.Tuple[Response, Request, Metadata] +StoredResponse: TypeAlias = Tuple[Response, Request, Metadata] class BaseStorage: @@ -21,8 +20,8 @@ class BaseStorage: def __init__( self, - serializer: tp.Optional[BaseSerializer] = None, - ttl: tp.Optional[tp.Union[int, float]] = None, + serializer: Optional[BaseSerializer] = None, + ttl: Optional[Union[int, float]] = None, ) -> None: self._serializer = serializer or JSONSerializer() @@ -36,7 +35,7 @@ async def store( ) -> None: raise NotImplementedError() - async def retrieve(self, key: str) -> tp.Optional[StoredResponse]: + async def retrieve(self, key: str) -> Optional[StoredResponse]: raise NotImplementedError() async def remove(self, path: re.Pattern) -> None: diff --git a/fast_cache_middleware/storages/in_memory_storage.py b/fast_cache_middleware/storages/in_memory_storage.py index 8dbeed8..07eae3d 100644 --- a/fast_cache_middleware/storages/in_memory_storage.py +++ b/fast_cache_middleware/storages/in_memory_storage.py @@ -2,55 +2,17 @@ import re import time from collections import OrderedDict -from typing import Any, Dict, Optional, Tuple, Union +from typing import Dict, Optional, Union from starlette.requests import Request from starlette.responses import Response -from typing_extensions import TypeAlias -from .exceptions import StorageError -from .serializers import BaseSerializer, JSONSerializer, Metadata +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import BaseSerializer, Metadata -logger = logging.getLogger(__name__) - -# Define type for stored response -StoredResponse: TypeAlias = Tuple[Response, Request, Metadata] - - -# Define base class for cache storage -class BaseStorage: - """Base class for cache storage. - - Args: - serializer: Serializer for converting Response/Request to string/bytes - ttl: Cache lifetime in seconds. None for permanent storage - """ - - def __init__( - self, - serializer: Optional[BaseSerializer] = None, - ttl: Optional[Union[int, float]] = None, - ) -> None: - self._serializer = serializer or JSONSerializer() - - if ttl is not None and ttl <= 0: - raise StorageError("TTL must be positive") - - self._ttl = ttl - - async def store( - self, key: str, response: Response, request: Request, metadata: Metadata - ) -> None: - raise NotImplementedError() +from .base_storage import BaseStorage, StoredResponse - async def retrieve(self, key: str) -> Optional[StoredResponse]: - raise NotImplementedError() - - async def remove(self, path: re.Pattern) -> None: - raise NotImplementedError() - - async def close(self) -> None: - raise NotImplementedError() +logger = logging.getLogger(__name__) class InMemoryStorage(BaseStorage): diff --git a/poetry.lock b/poetry.lock index bea52e4..e24bc9e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -34,6 +34,19 @@ doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"redis\" and python_full_version < \"3.11.3\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + [[package]] name = "black" version = "23.12.1" @@ -637,6 +650,27 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "redis" +version = "6.2.0" +description = "Python client for Redis database and key-value store" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"redis\"" +files = [ + {file = "redis-6.2.0-py3-none-any.whl", hash = "sha256:c8ddf316ee0aab65f04a11229e94a64b2618451dab7a67cb2f77eb799d872d5e"}, + {file = "redis-6.2.0.tar.gz", hash = "sha256:e821f129b75dde6cb99dd35e5c76e8c49512a5a0d8dfdc560b2fbd44b85ca977"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} + +[package.extras] +hiredis = ["hiredis (>=3.2.0)"] +jwt = ["pyjwt (>=2.9.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (>=20.0.1)", "requests (>=2.31.0)"] + [[package]] name = "sniffio" version = "1.3.1" @@ -713,7 +747,10 @@ h11 = ">=0.8" [package.extras] standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] +[extras] +redis = ["redis"] + [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "4ffaeb11a66f7eda3ccdc421323e30e544db3574dcc602984aebb713faae6a8b" +content-hash = "effd115e8f41e51a1c40edce2fee7257d1b0fffd9db16cf7e123e49840b1ed88" diff --git a/pyproject.toml b/pyproject.toml index b05656b..3bc3960 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,6 +11,10 @@ license = "MIT" python = "^3.11" fastapi = ">=0.111.1,<1.0.0" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +redis = { version = "^6.2.0", optional = true } + +[tool.poetry.extras] +redis = ["redis"] [tool.poetry.group.dev.dependencies] From cbfea2bdb65593ca891fa343cf8d6573f8346d28 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Thu, 3 Jul 2025 11:36:19 +0300 Subject: [PATCH 14/23] edit example --- examples/redis_example.py | 125 +++++++++++--------------------------- 1 file changed, 35 insertions(+), 90 deletions(-) diff --git a/examples/redis_example.py b/examples/redis_example.py index 3a3ffe5..fa42860 100644 --- a/examples/redis_example.py +++ b/examples/redis_example.py @@ -1,13 +1,12 @@ -"""Базовый пример использования FastCacheMiddleware с резолюцией роутов. +"""An example of using Fast Cache Middleware with rout resolution and Redis storage. -Демонстрирует: -1. Анализ роутов на старте приложения -2. Извлечение кеш конфигураций из dependencies -3. Автоматическое кеширование GET запросов -4. Инвалидация кеша при модифицирующих запросах +Demonstrates: +1. Analysis of routes at the start of the application; +2. Extracting configuration cache from dependencies; +3. Automatic caching of GET requests in Redis; +4. Cache invalidation in case of modifying requests. """ -import asyncio import logging import time import typing as tp @@ -17,18 +16,23 @@ from pydantic import BaseModel, Field from redis.asyncio import Redis -from fast_cache_middleware import CacheConfig, CacheDropConfig, FastCacheMiddleware +from fast_cache_middleware import ( + CacheConfig, + CacheDropConfig, + FastCacheMiddleware, + RedisStorage, +) -# Создаем FastAPI приложение +# Creating a Flash API application app = FastAPI(title="FastCacheMiddleware Redis Example") +# Initializing Redis redis = Redis(host="127.0.0.1", port=6379, db=0, decode_responses=True) -# Добавляем middleware - он проанализирует роуты при первом запросе -app.add_middleware(FastCacheMiddleware, storage=redis) +# Adding middleware - it will analyze the routes at the first request. +app.add_middleware(FastCacheMiddleware, storage=RedisStorage(redis_client=redis)) def custom_key_func(request: Request) -> str: - # Ключ включает user-id из заголовков если есть user_id = request.headers.get("user-id", "anonymous") return f"{request.url.path}:user:{user_id}" @@ -52,54 +56,17 @@ class UserResponse(FullUser): } -# Роуты с различными конфигурациями кеширования - - -@app.get("/") -async def root() -> tp.Dict[str, tp.Union[str, float]]: - """Корневой роут без кеширования.""" - return { - "message": "Without cache response", - "timestamp": time.time(), - "cache_duration": "0 seconds", - } - - -@app.get( - "/fast", dependencies=[CacheConfig(max_age=30)], openapi_extra={"x-cache-age": 30} -) -async def fast_endpoint() -> tp.Dict[str, tp.Union[str, float]]: - """Быстрый endpoint с коротким кешированием (30 секунд).""" - - return { - "message": "Fast cached response", - "timestamp": time.time(), - "cache_duration": "30 seconds", - } - - -@app.get( - "/slow", dependencies=[CacheConfig(max_age=300)], openapi_extra={"x-cache-age": 300} -) -async def slow_endpoint() -> tp.Dict[str, tp.Union[str, float]]: - """Медленный endpoint с длинным кешированием (5 минут).""" - await asyncio.sleep(0.5) - - return { - "message": "Slow cached response", - "timestamp": time.time(), - "cache_duration": "300 seconds", - } +# Routers with different caching configurations @app.get( "/users/{user_id}", - dependencies=[CacheConfig(max_age=60, key_func=custom_key_func)], + dependencies=[CacheConfig(max_age=120, key_func=custom_key_func)], ) async def get_user(user_id: int) -> UserResponse: - """Получение пользователя с кастомным ключом кеширования. + """Getting a user with a custom caching key. - Ключ кеша включает user-id из заголовков для персонализации. + The cache key includes the user-id from the headers for personalization. """ user = _USERS_STORAGE.get(user_id) if not user: @@ -108,7 +75,7 @@ async def get_user(user_id: int) -> UserResponse: return UserResponse(user_id=user_id, name=user.name, email=user.email) -@app.get("/users", dependencies=[CacheConfig(max_age=30)]) +@app.get("/users", dependencies=[CacheConfig(max_age=120)]) async def get_users() -> tp.List[UserResponse]: return [ UserResponse(user_id=user_id, name=user.name, email=user.email) @@ -118,29 +85,21 @@ async def get_users() -> tp.List[UserResponse]: @app.post("/users/{user_id}", dependencies=[CacheDropConfig(paths=["/users"])]) async def create_user(user_id: int, user_data: User) -> UserResponse: - """Создание пользователя с инвалидацией кеша. + """Creating a user with a cache disability. - Этот POST запрос инвалидирует кеш для всех /users/* путей. + This POST request disables the cache for all /users/* paths. """ _USERS_STORAGE[user_id] = user_data return UserResponse(user_id=user_id, name=user_data.name, email=user_data.email) -@app.put("/users/{user_id}", dependencies=[CacheDropConfig(paths=["/users"])]) -async def update_user(user_id: int, user_data: User) -> UserResponse: - """Обновление пользователя с инвалидацией кеша.""" - user = _USERS_STORAGE.get(user_id) - if not user: - raise HTTPException(status_code=404, detail="User not found") - _USERS_STORAGE[user_id] = user_data - - return UserResponse(user_id=user_id, name=user_data.name, email=user_data.email) - - @app.delete("/users/{user_id}", dependencies=[CacheDropConfig(paths=["/users"])]) async def delete_user(user_id: int) -> UserResponse: - """Удаление пользователя с инвалидацией кеша.""" + """Deleting a user with a cache disability. + + This DELETE request disables the cache for all /users/* paths. + """ user = _USERS_STORAGE.get(user_id) if not user: raise HTTPException(status_code=404, detail="User not found") @@ -155,34 +114,20 @@ async def delete_user(user_id: int) -> UserResponse: format="[-] %(asctime)s [%(levelname)s] %(module)s-%(lineno)d - %(message)s", ) - print("🚀 Запуск FastCacheMiddleware Redis Example...") - print("\n📋 Доступные endpoints:") - print(" GET / - корневой роут (без кеша)") - print(" GET /fast - короткий кеш (30s)") - print(" GET /slow - длинный кеш (5m)") - print(" GET /users/{user_id} - получение пользователя (кеш 3 мин)") - print(" GET /users - список пользователей (кеш 3 мин)") - print(" POST /users/{user_id} - создание пользователя (инвалидация /users)") - print( - " PUT /users/{user_id} - обновление пользователя (инвалидация /users и /users/*)" - ) - print(" DELETE /users/{user_id} - удаление пользователя (инвалидация /users)") + print("🚀 Running Fast Cache Middleware Redis Example...") + print("\n📋 Available endpoints:") + print(" GET /users/{user_id} - getting the user (2 min cache)") + print(" GET /users - list of users (2 min cache)") + print(" POST /users/{user_id} - user creation (disability /users)") + print(" DELETE /users/{user_id} - deleting a user (invalidation /users)") - print("\n🔧 Как работает middleware:") - print(" 1. При старте анализирует все роуты") - print(" 2. Извлекает CacheConfig/CacheDropConfig из dependencies") - print(" 3. При запросе находит соответствующий роут") - print(" 4. Применяет кеширование согласно конфигурации") - - print("\n💡 Для тестирования:") + print("\n💡 For testing purposes:") print(" curl http://localhost:8000/users/1") print(" curl http://localhost:8000/users") print( ' curl -X POST http://localhost:8000/users/1 -H "Content-Type: application/json" -d \'{"name": "John", "email": "john@example.com"}\'' ) - print( - ' curl -X PUT http://localhost:8000/users/1 -H "Content-Type: application/json" -d \'{"name": "John Updated", "email": "john@example.com"}\'' - ) print(" curl -X DELETE http://localhost:8000/users/1") + print() uvicorn.run(app, host="127.0.0.1", port=8000) From 45849042686e079a110b3b027959641fb9baabba Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Thu, 3 Jul 2025 11:37:50 +0300 Subject: [PATCH 15/23] method dumps at jsonserializer has become async --- fast_cache_middleware/serializers.py | 14 +++++++++++--- fast_cache_middleware/storages/redis_storage.py | 2 +- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/fast_cache_middleware/serializers.py b/fast_cache_middleware/serializers.py index 4779057..f962cfa 100644 --- a/fast_cache_middleware/serializers.py +++ b/fast_cache_middleware/serializers.py @@ -11,7 +11,7 @@ class BaseSerializer: - def dumps( + async def dumps( self, response: Response, request: Request, metadata: Metadata ) -> Union[str, bytes]: raise NotImplementedError() @@ -25,12 +25,19 @@ def is_binary(self) -> bool: class JSONSerializer(BaseSerializer): - def dumps(self, response: Response, request: Request, metadata: Metadata) -> str: + async def dumps( + self, response: Response, request: Request, metadata: Metadata + ) -> Union[str, bytes]: + body_bytes = await request.body() request_data = { "method": request.method, "url": str(request.url), "headers": dict(request.headers), + "body": ( + body_bytes.decode("utf-8", errors="ignore") if body_bytes else None + ), } + response_data = { "status_code": response.status_code, "headers": dict(response.headers), @@ -40,9 +47,10 @@ def dumps(self, response: Response, request: Request, metadata: Metadata) -> str else None ), } + payload = { - "response": response_data, "request": request_data, + "response": response_data, "metadata": metadata, } diff --git a/fast_cache_middleware/storages/redis_storage.py b/fast_cache_middleware/storages/redis_storage.py index a853d44..f508b06 100644 --- a/fast_cache_middleware/storages/redis_storage.py +++ b/fast_cache_middleware/storages/redis_storage.py @@ -48,7 +48,7 @@ async def store( metadata["write_time"] = current_time - value = self._serializer.dumps(response, request, metadata) + value = await self._serializer.dumps(response, request, metadata) logger.debug("Serialized data: %s", value) ttl = metadata.get("ttl", self._ttl) logger.debug(f"TTL: %s", ttl) From 3cda130620bebf3b5fb7190f36b43db4f83d3ab7 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Thu, 3 Jul 2025 11:38:30 +0300 Subject: [PATCH 16/23] RedisStorage for imports --- fast_cache_middleware/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/fast_cache_middleware/__init__.py b/fast_cache_middleware/__init__.py index b6425eb..4dabaa3 100644 --- a/fast_cache_middleware/__init__.py +++ b/fast_cache_middleware/__init__.py @@ -14,7 +14,7 @@ from .controller import Controller from .depends import BaseCacheConfigDepends, CacheConfig, CacheDropConfig from .middleware import FastCacheMiddleware -from .storages import BaseStorage, InMemoryStorage +from .storages import BaseStorage, InMemoryStorage, RedisStorage __version__ = "1.0.0" @@ -29,6 +29,7 @@ # Storages "BaseStorage", "InMemoryStorage", + "RedisStorage", # Serialization "BaseSerializer", "DefaultSerializer", From e98f898d83fd3c0b30726a90951ba5dd166a4c20 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Thu, 3 Jul 2025 14:06:23 +0300 Subject: [PATCH 17/23] edit unit-tests --- tests/storages/test_redis_storage.py | 40 +++++++++++++++++----------- tests/test_serializers.py | 30 ++++++++++++++------- 2 files changed, 45 insertions(+), 25 deletions(-) diff --git a/tests/storages/test_redis_storage.py b/tests/storages/test_redis_storage.py index 94c991b..e10b5cf 100644 --- a/tests/storages/test_redis_storage.py +++ b/tests/storages/test_redis_storage.py @@ -36,18 +36,20 @@ async def test_redis_storage_init_validation(ttl, expect_error): @pytest.mark.asyncio async def test_store_and_retrieve_works(): mock_redis = AsyncMock() - storage = RedisStorage(redis_client=mock_redis, ttl=1) - - request = Request(scope={"type": "http", "method": "GET", "path": "/test"}) - response = Response(content="hello", status_code=200) - metadata = {} + mock_serializer = MagicMock() serialized_value = b"serialized" - storage._serializer.dumps = MagicMock(return_value=serialized_value) - storage._serializer.loads = MagicMock( + mock_serializer.dumps = AsyncMock(return_value=serialized_value) + mock_serializer.loads = MagicMock( return_value=("deserialized_response", "req", {"meta": "data"}) ) + storage = RedisStorage(redis_client=mock_redis, ttl=1, serializer=mock_serializer) + + request = Request(scope={"type": "http", "method": "GET", "path": "/test"}) + response = Response(content="hello", status_code=200) + metadata: dict[str, str | int] = {} + mock_redis.exists.return_value = False await storage.store("key1", response, request, metadata) @@ -62,20 +64,23 @@ async def test_store_and_retrieve_works(): @pytest.mark.asyncio async def test_store_overwrites_existing_key(): mock_redis = AsyncMock() - storage = RedisStorage(redis_client=mock_redis, ttl=10) + + mock_serializer = MagicMock() + serialized_value = b"serialized" + mock_serializer.dumps = AsyncMock(return_value=serialized_value) + + storage = RedisStorage(redis_client=mock_redis, ttl=10, serializer=mock_serializer) request = Request(scope={"type": "http", "method": "GET", "path": "/overwrite"}) response = Response(content="updated", status_code=200) - metadata: dict = {} - - storage._serializer.dumps = MagicMock(return_value=b"new_value") + metadata: dict[str, str] = {} mock_redis.exists.return_value = True await storage.store("existing_key", response, request, metadata) mock_redis.delete.assert_awaited_with("cache:existing_key") - mock_redis.set.assert_awaited_with("cache:existing_key", b"new_value", ex=10) + mock_redis.set.assert_awaited_with("cache:existing_key", serialized_value, ex=10) @pytest.mark.asyncio @@ -91,13 +96,18 @@ async def test_retrieve_returns_none_on_missing_key(): @pytest.mark.asyncio async def test_retrieve_returns_none_on_deserialization_error(): mock_redis = AsyncMock() - storage = RedisStorage(redis_client=mock_redis) - mock_redis.get.return_value = b"invalid" def raise_error(_): raise ValueError("bad format") - storage._serializer.loads = raise_error + mock_serializer = MagicMock() + mock_serializer.loads = raise_error + + mock_serializer.dumps = AsyncMock(return_value=b"serialized") + + storage = RedisStorage(redis_client=mock_redis, serializer=mock_serializer) + + mock_redis.get.return_value = b"invalid" result = await storage.retrieve("corrupt") assert result is None diff --git a/tests/test_serializers.py b/tests/test_serializers.py index 19fcab7..1dbcbe5 100644 --- a/tests/test_serializers.py +++ b/tests/test_serializers.py @@ -10,13 +10,19 @@ @pytest.fixture def test_request() -> Request: + body = b'{"key":"value"}' + + async def receive(): + return {"type": "http.request", "body": body, "more_body": False} + return Request( scope={ "type": "http", "method": "GET", "path": "/test", "headers": [(b"host", b"test.com"), (b"user-agent", b"pytest")], - } + }, + receive=receive, ) @@ -32,10 +38,11 @@ def test_metadata() -> Metadata: return {"meta": "value", "ttl": 123} -def test_dumps_output_is_valid_json(test_request, test_response, test_metadata): +@pytest.mark.asyncio +async def test_dumps_output_is_valid_json(test_request, test_response, test_metadata): serializer = JSONSerializer() - result = serializer.dumps(test_response, test_request, test_metadata) + result = await serializer.dumps(test_response, test_request, test_metadata) parsed = json.loads(result) assert "response" in parsed @@ -51,12 +58,13 @@ def test_dumps_output_is_valid_json(test_request, test_response, test_metadata): assert parsed["metadata"]["ttl"] == 123 -def test_loads_reconstructs_response_request( +@pytest.mark.asyncio +async def test_loads_reconstructs_response_request( test_request, test_response, test_metadata ): serializer = JSONSerializer() - json_data = serializer.dumps(test_response, test_request, test_metadata) + json_data = await serializer.dumps(test_response, test_request, test_metadata) response, request, metadata = serializer.loads(json_data) assert isinstance(response, Response) @@ -73,11 +81,12 @@ def test_loads_reconstructs_response_request( assert metadata == test_metadata -def test_loads_accepts_bytes_input(test_request, test_response, test_metadata): +@pytest.mark.asyncio +async def test_loads_accepts_bytes_input(test_request, test_response, test_metadata): serializer = JSONSerializer() - json_data_str = serializer.dumps(test_response, test_request, test_metadata) - json_data_bytes = json_data_str.encode("utf-8") + json_data_str = await serializer.dumps(test_response, test_request, test_metadata) + json_data_bytes = str(json_data_str).encode("utf-8") response, request, metadata = serializer.loads(json_data_bytes) @@ -86,11 +95,12 @@ def test_loads_accepts_bytes_input(test_request, test_response, test_metadata): assert metadata == test_metadata -def test_dumps_handles_empty_body(test_request, test_metadata): +@pytest.mark.asyncio +async def test_dumps_handles_empty_body(test_request, test_metadata): response = Response(status_code=status.HTTP_204_NO_CONTENT) serializer = JSONSerializer() - json_str = serializer.dumps(response, test_request, test_metadata) + json_str = await serializer.dumps(response, test_request, test_metadata) parsed = json.loads(json_str) assert parsed["response"]["status_code"] == status.HTTP_204_NO_CONTENT From 02c57ed0f6245b8e5f7d730b5b29b5bf852dde25 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Thu, 3 Jul 2025 14:19:31 +0300 Subject: [PATCH 18/23] edit redis import --- fast_cache_middleware/storages/redis_storage.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/fast_cache_middleware/storages/redis_storage.py b/fast_cache_middleware/storages/redis_storage.py index f508b06..b30ce6c 100644 --- a/fast_cache_middleware/storages/redis_storage.py +++ b/fast_cache_middleware/storages/redis_storage.py @@ -5,9 +5,11 @@ try: import redis - from redis import Redis + + Redis = redis.Redis except ImportError: # pragma: no cover redis = None # type: ignore + Redis = None # type: ignore from starlette.requests import Request from starlette.responses import Response From 9be6d8e0f634fdc5124555291a635318753bd027 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Thu, 3 Jul 2025 14:28:22 +0300 Subject: [PATCH 19/23] edit redis import x2 --- .../storages/redis_storage.py | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/fast_cache_middleware/storages/redis_storage.py b/fast_cache_middleware/storages/redis_storage.py index b30ce6c..6f51912 100644 --- a/fast_cache_middleware/storages/redis_storage.py +++ b/fast_cache_middleware/storages/redis_storage.py @@ -1,15 +1,13 @@ import logging import re import time -import typing as tp +from typing import TYPE_CHECKING, Optional, Union -try: - import redis - - Redis = redis.Redis -except ImportError: # pragma: no cover - redis = None # type: ignore - Redis = None # type: ignore +if TYPE_CHECKING: + try: + from redis import Redis + except ImportError: + Redis = None # type: ignore from starlette.requests import Request from starlette.responses import Response @@ -26,8 +24,8 @@ class RedisStorage(BaseStorage): def __init__( self, redis_client: Redis, - serializer: tp.Optional[BaseSerializer] = None, - ttl: tp.Optional[tp.Union[int, float]] = None, + serializer: Optional[BaseSerializer] = None, + ttl: Optional[Union[int, float]] = None, namespace: str = "cache", ) -> None: super().__init__(serializer, ttl) @@ -65,7 +63,7 @@ async def store( await self._storage.set(full_key, value, ex=ttl) logger.info("Data written to Redis") - async def retrieve(self, key: str) -> tp.Optional[StoredResponse]: + async def retrieve(self, key: str) -> Optional[StoredResponse]: """ Get response from Redis. If TTL expired returns None. """ From f409ce1fb50b31b26c3bc8fc12662f22b2b891c6 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Thu, 3 Jul 2025 14:31:57 +0300 Subject: [PATCH 20/23] ci mypy fix --- poetry.lock | 215 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 2 + 2 files changed, 216 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index e24bc9e..97e5a99 100644 --- a/poetry.lock +++ b/poetry.lock @@ -104,6 +104,87 @@ files = [ {file = "certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "click" version = "8.2.1" @@ -212,6 +293,66 @@ files = [ [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +[[package]] +name = "cryptography" +version = "45.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["dev"] +files = [ + {file = "cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27"}, + {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e"}, + {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174"}, + {file = "cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9"}, + {file = "cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63"}, + {file = "cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492"}, + {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0"}, + {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a"}, + {file = "cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f"}, + {file = "cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:206210d03c1193f4e1ff681d22885181d47efa1ab3018766a7b32a7b3d6e6afd"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c648025b6840fe62e57107e0a25f604db740e728bd67da4f6f060f03017d5097"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b8fa8b0a35a9982a3c60ec79905ba5bb090fc0b9addcfd3dc2dd04267e45f25e"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:14d96584701a887763384f3c47f0ca7c1cce322aa1c31172680eb596b890ec30"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57c816dfbd1659a367831baca4b775b2a5b43c003daf52e9d57e1d30bc2e1b0e"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b9e38e0a83cd51e07f5a48ff9691cae95a79bea28fe4ded168a8e5c6c77e819d"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f"}, + {file = "cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a"}, +] + +[package.dependencies] +cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""] +pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==45.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "fastapi" version = "0.115.13" @@ -457,6 +598,19 @@ files = [ dev = ["pre-commit", "tox"] testing = ["coverage", "pytest", "pytest-benchmark"] +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pydantic" version = "2.11.7" @@ -701,6 +855,65 @@ anyio = ">=3.6.2,<5" [package.extras] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] +[[package]] +name = "types-cffi" +version = "1.17.0.20250523" +description = "Typing stubs for cffi" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_cffi-1.17.0.20250523-py3-none-any.whl", hash = "sha256:e98c549d8e191f6220e440f9f14315d6775a21a0e588c32c20476be885b2fad9"}, + {file = "types_cffi-1.17.0.20250523.tar.gz", hash = "sha256:e7110f314c65590533adae1b30763be08ca71ad856a1ae3fe9b9d8664d49ec22"}, +] + +[package.dependencies] +types-setuptools = "*" + +[[package]] +name = "types-pyopenssl" +version = "24.1.0.20240722" +description = "Typing stubs for pyOpenSSL" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, + {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-cffi = "*" + +[[package]] +name = "types-redis" +version = "4.6.0.20241004" +description = "Typing stubs for redis" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, + {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-pyOpenSSL = "*" + +[[package]] +name = "types-setuptools" +version = "80.9.0.20250529" +description = "Typing stubs for setuptools" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_setuptools-80.9.0.20250529-py3-none-any.whl", hash = "sha256:00dfcedd73e333a430e10db096e4d46af93faf9314f832f13b6bbe3d6757e95f"}, + {file = "types_setuptools-80.9.0.20250529.tar.gz", hash = "sha256:79e088ba0cba2186c8d6499cbd3e143abb142d28a44b042c28d3148b1e353c91"}, +] + [[package]] name = "typing-extensions" version = "4.14.0" @@ -753,4 +966,4 @@ redis = ["redis"] [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "effd115e8f41e51a1c40edce2fee7257d1b0fffd9db16cf7e123e49840b1ed88" +content-hash = "1446cc737dafbbe64d69d52f66130f8216963465001a9b962b4617c3e518cc69" diff --git a/pyproject.toml b/pyproject.toml index 3bc3960..015fb42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ mypy = "^1.6.1" pytest-cov = "^4.1.0" httpx = "^0.28.1" uvicorn = "^0.34.3" +types-redis = "^4.6.0.20241004" [tool.black] line-length = 88 @@ -54,6 +55,7 @@ warn_unused_ignores = true warn_no_return = true warn_unreachable = true strict_optional = true +ignore_missing_imports = true [[tool.mypy.overrides]] module = ["tests.*"] From 166a1e5408589d19be841261ce0a8c300ee85f79 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Fri, 4 Jul 2025 13:03:38 +0300 Subject: [PATCH 21/23] remove ignore imports --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 015fb42..5255a98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,7 +55,6 @@ warn_unused_ignores = true warn_no_return = true warn_unreachable = true strict_optional = true -ignore_missing_imports = true [[tool.mypy.overrides]] module = ["tests.*"] From 0243ad7d066fd4feb997dc3aa19555a87f58bbac Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Fri, 4 Jul 2025 13:03:50 +0300 Subject: [PATCH 22/23] async type for Redis --- examples/redis_example.py | 2 +- fast_cache_middleware/storages/redis_storage.py | 11 +++++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/examples/redis_example.py b/examples/redis_example.py index fa42860..5fffa57 100644 --- a/examples/redis_example.py +++ b/examples/redis_example.py @@ -14,7 +14,7 @@ import uvicorn from fastapi import FastAPI, HTTPException, Request from pydantic import BaseModel, Field -from redis.asyncio import Redis +from redis.asyncio import Redis # async only from fast_cache_middleware import ( CacheConfig, diff --git a/fast_cache_middleware/storages/redis_storage.py b/fast_cache_middleware/storages/redis_storage.py index 6f51912..6af7c5f 100644 --- a/fast_cache_middleware/storages/redis_storage.py +++ b/fast_cache_middleware/storages/redis_storage.py @@ -1,13 +1,12 @@ import logging import re import time -from typing import TYPE_CHECKING, Optional, Union +from typing import Optional, Union -if TYPE_CHECKING: - try: - from redis import Redis - except ImportError: - Redis = None # type: ignore +try: + from redis.asyncio import Redis +except ImportError: + Redis = None # type: ignore from starlette.requests import Request from starlette.responses import Response From 7f5d6745f7b4378609aac03f4fdd7e264e624e10 Mon Sep 17 00:00:00 2001 From: Nikita Yakovlev Date: Sun, 6 Jul 2025 19:22:27 +0300 Subject: [PATCH 23/23] add command for install with redis --- examples/redis_example.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/examples/redis_example.py b/examples/redis_example.py index 5fffa57..aa7e1b9 100644 --- a/examples/redis_example.py +++ b/examples/redis_example.py @@ -1,5 +1,7 @@ """An example of using Fast Cache Middleware with rout resolution and Redis storage. +to install using Redis, run this command: pip install fast-cache-middleware[redis] + Demonstrates: 1. Analysis of routes at the start of the application; 2. Extracting configuration cache from dependencies;