diff --git a/examples/redis_example.py b/examples/redis_example.py new file mode 100644 index 0000000..aa7e1b9 --- /dev/null +++ b/examples/redis_example.py @@ -0,0 +1,135 @@ +"""An example of using Fast Cache Middleware with rout resolution and Redis storage. + +to install using Redis, run this command: pip install fast-cache-middleware[redis] + +Demonstrates: +1. Analysis of routes at the start of the application; +2. Extracting configuration cache from dependencies; +3. Automatic caching of GET requests in Redis; +4. Cache invalidation in case of modifying requests. +""" + +import logging +import time +import typing as tp + +import uvicorn +from fastapi import FastAPI, HTTPException, Request +from pydantic import BaseModel, Field +from redis.asyncio import Redis # async only + +from fast_cache_middleware import ( + CacheConfig, + CacheDropConfig, + FastCacheMiddleware, + RedisStorage, +) + +# Creating a Flash API application +app = FastAPI(title="FastCacheMiddleware Redis Example") +# Initializing Redis +redis = Redis(host="127.0.0.1", port=6379, db=0, decode_responses=True) + +# Adding middleware - it will analyze the routes at the first request. +app.add_middleware(FastCacheMiddleware, storage=RedisStorage(redis_client=redis)) + + +def custom_key_func(request: Request) -> str: + user_id = request.headers.get("user-id", "anonymous") + return f"{request.url.path}:user:{user_id}" + + +class User(BaseModel): + name: str + email: str + + +class FullUser(User): + user_id: int + + +class UserResponse(FullUser): + timestamp: float = Field(default_factory=time.time) + + +_USERS_STORAGE: tp.Dict[int, User] = { + 1: User(name="John Doe", email="john.doe@example.com"), + 2: User(name="Jane Doe", email="jane.doe@example.com"), +} + + +# Routers with different caching configurations + + +@app.get( + "/users/{user_id}", + dependencies=[CacheConfig(max_age=120, key_func=custom_key_func)], +) +async def get_user(user_id: int) -> UserResponse: + """Getting a user with a custom caching key. + + The cache key includes the user-id from the headers for personalization. + """ + user = _USERS_STORAGE.get(user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + + return UserResponse(user_id=user_id, name=user.name, email=user.email) + + +@app.get("/users", dependencies=[CacheConfig(max_age=120)]) +async def get_users() -> tp.List[UserResponse]: + return [ + UserResponse(user_id=user_id, name=user.name, email=user.email) + for user_id, user in _USERS_STORAGE.items() + ] + + +@app.post("/users/{user_id}", dependencies=[CacheDropConfig(paths=["/users"])]) +async def create_user(user_id: int, user_data: User) -> UserResponse: + """Creating a user with a cache disability. + + This POST request disables the cache for all /users/* paths. + """ + _USERS_STORAGE[user_id] = user_data + + return UserResponse(user_id=user_id, name=user_data.name, email=user_data.email) + + +@app.delete("/users/{user_id}", dependencies=[CacheDropConfig(paths=["/users"])]) +async def delete_user(user_id: int) -> UserResponse: + """Deleting a user with a cache disability. + + This DELETE request disables the cache for all /users/* paths. + """ + user = _USERS_STORAGE.get(user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + del _USERS_STORAGE[user_id] + + return UserResponse(user_id=user_id, name=user.name, email=user.email) + + +if __name__ == "__main__": + logging.basicConfig( + level=logging.DEBUG, + format="[-] %(asctime)s [%(levelname)s] %(module)s-%(lineno)d - %(message)s", + ) + + print("šŸš€ Running Fast Cache Middleware Redis Example...") + print("\nšŸ“‹ Available endpoints:") + print(" GET /users/{user_id} - getting the user (2 min cache)") + print(" GET /users - list of users (2 min cache)") + print(" POST /users/{user_id} - user creation (disability /users)") + print(" DELETE /users/{user_id} - deleting a user (invalidation /users)") + + print("\nšŸ’” For testing purposes:") + print(" curl http://localhost:8000/users/1") + print(" curl http://localhost:8000/users") + print( + ' curl -X POST http://localhost:8000/users/1 -H "Content-Type: application/json" -d \'{"name": "John", "email": "john@example.com"}\'' + ) + print(" curl -X DELETE http://localhost:8000/users/1") + print() + + uvicorn.run(app, host="127.0.0.1", port=8000) diff --git a/fast_cache_middleware/__init__.py b/fast_cache_middleware/__init__.py index b6425eb..4dabaa3 100644 --- a/fast_cache_middleware/__init__.py +++ b/fast_cache_middleware/__init__.py @@ -14,7 +14,7 @@ from .controller import Controller from .depends import BaseCacheConfigDepends, CacheConfig, CacheDropConfig from .middleware import FastCacheMiddleware -from .storages import BaseStorage, InMemoryStorage +from .storages import BaseStorage, InMemoryStorage, RedisStorage __version__ = "1.0.0" @@ -29,6 +29,7 @@ # Storages "BaseStorage", "InMemoryStorage", + "RedisStorage", # Serialization "BaseSerializer", "DefaultSerializer", diff --git a/fast_cache_middleware/serializers.py b/fast_cache_middleware/serializers.py index 365e235..f962cfa 100644 --- a/fast_cache_middleware/serializers.py +++ b/fast_cache_middleware/serializers.py @@ -1,5 +1,6 @@ import json -from typing import Any, Callable, Dict, Optional, Tuple, TypeAlias, Union +from typing import Any, Dict, Tuple, TypeAlias, Union +from urllib.parse import urlparse from starlette.requests import Request from starlette.responses import Response @@ -10,7 +11,7 @@ class BaseSerializer: - def dumps( + async def dumps( self, response: Response, request: Request, metadata: Metadata ) -> Union[str, bytes]: raise NotImplementedError() @@ -24,8 +25,36 @@ def is_binary(self) -> bool: class JSONSerializer(BaseSerializer): - def dumps(self, response: Response, request: Request, metadata: Metadata) -> str: - raise NotImplementedError() # fixme: bad implementation now, maybe async? + async def dumps( + self, response: Response, request: Request, metadata: Metadata + ) -> Union[str, bytes]: + body_bytes = await request.body() + request_data = { + "method": request.method, + "url": str(request.url), + "headers": dict(request.headers), + "body": ( + body_bytes.decode("utf-8", errors="ignore") if body_bytes else None + ), + } + + response_data = { + "status_code": response.status_code, + "headers": dict(response.headers), + "content": ( + bytes(response.body).decode("utf-8", errors="ignore") + if response.body + else None + ), + } + + payload = { + "request": request_data, + "response": response_data, + "metadata": metadata, + } + + return json.dumps(payload) def loads(self, data: Union[str, bytes]) -> StoredResponse: if isinstance(data, bytes): @@ -48,16 +77,15 @@ def loads(self, data: Union[str, bytes]) -> StoredResponse: # Restore Request - create mock object for compatibility request_data = parsed["request"] - # Create minimal scope for Request - from urllib.parse import urlparse - parsed_url = urlparse(request_data["url"]) scope = { "type": "http", "method": request_data["method"], "path": parsed_url.path, "query_string": parsed_url.query.encode() if parsed_url.query else b"", - "headers": [[k.encode(), v.encode()] for k, v in request_data["headers"]], + "headers": [ + [k.encode(), v.encode()] for k, v in request_data["headers"].items() + ], } # Create empty receive function diff --git a/fast_cache_middleware/storages/__init__.py b/fast_cache_middleware/storages/__init__.py new file mode 100644 index 0000000..43d4afa --- /dev/null +++ b/fast_cache_middleware/storages/__init__.py @@ -0,0 +1,3 @@ +from .base_storage import BaseStorage +from .in_memory_storage import InMemoryStorage +from .redis_storage import RedisStorage diff --git a/fast_cache_middleware/storages/base_storage.py b/fast_cache_middleware/storages/base_storage.py new file mode 100644 index 0000000..8da4f8c --- /dev/null +++ b/fast_cache_middleware/storages/base_storage.py @@ -0,0 +1,45 @@ +import re +from typing import Optional, Tuple, TypeAlias, Union + +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import BaseSerializer, JSONSerializer, Metadata + +StoredResponse: TypeAlias = Tuple[Response, Request, Metadata] + + +class BaseStorage: + """Base class for cache storage. + + Args: + serializer: Serializer for converting Response/Request to string/bytes + ttl: Cache lifetime in seconds. None for permanent storage + """ + + def __init__( + self, + serializer: Optional[BaseSerializer] = None, + ttl: Optional[Union[int, float]] = None, + ) -> None: + self._serializer = serializer or JSONSerializer() + + if ttl is not None and ttl <= 0: + raise StorageError("TTL must be positive") + + self._ttl = ttl + + async def store( + self, key: str, response: Response, request: Request, metadata: Metadata + ) -> None: + raise NotImplementedError() + + async def retrieve(self, key: str) -> Optional[StoredResponse]: + raise NotImplementedError() + + async def remove(self, path: re.Pattern) -> None: + raise NotImplementedError() + + async def close(self) -> None: + raise NotImplementedError() diff --git a/fast_cache_middleware/storages.py b/fast_cache_middleware/storages/in_memory_storage.py similarity index 82% rename from fast_cache_middleware/storages.py rename to fast_cache_middleware/storages/in_memory_storage.py index 8dbeed8..07eae3d 100644 --- a/fast_cache_middleware/storages.py +++ b/fast_cache_middleware/storages/in_memory_storage.py @@ -2,55 +2,17 @@ import re import time from collections import OrderedDict -from typing import Any, Dict, Optional, Tuple, Union +from typing import Dict, Optional, Union from starlette.requests import Request from starlette.responses import Response -from typing_extensions import TypeAlias -from .exceptions import StorageError -from .serializers import BaseSerializer, JSONSerializer, Metadata +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import BaseSerializer, Metadata -logger = logging.getLogger(__name__) - -# Define type for stored response -StoredResponse: TypeAlias = Tuple[Response, Request, Metadata] - - -# Define base class for cache storage -class BaseStorage: - """Base class for cache storage. - - Args: - serializer: Serializer for converting Response/Request to string/bytes - ttl: Cache lifetime in seconds. None for permanent storage - """ - - def __init__( - self, - serializer: Optional[BaseSerializer] = None, - ttl: Optional[Union[int, float]] = None, - ) -> None: - self._serializer = serializer or JSONSerializer() - - if ttl is not None and ttl <= 0: - raise StorageError("TTL must be positive") - - self._ttl = ttl - - async def store( - self, key: str, response: Response, request: Request, metadata: Metadata - ) -> None: - raise NotImplementedError() +from .base_storage import BaseStorage, StoredResponse - async def retrieve(self, key: str) -> Optional[StoredResponse]: - raise NotImplementedError() - - async def remove(self, path: re.Pattern) -> None: - raise NotImplementedError() - - async def close(self) -> None: - raise NotImplementedError() +logger = logging.getLogger(__name__) class InMemoryStorage(BaseStorage): diff --git a/fast_cache_middleware/storages/redis_storage.py b/fast_cache_middleware/storages/redis_storage.py new file mode 100644 index 0000000..6af7c5f --- /dev/null +++ b/fast_cache_middleware/storages/redis_storage.py @@ -0,0 +1,112 @@ +import logging +import re +import time +from typing import Optional, Union + +try: + from redis.asyncio import Redis +except ImportError: + Redis = None # type: ignore + +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import BaseSerializer, JSONSerializer, Metadata + +from .base_storage import BaseStorage, StoredResponse + +logger = logging.getLogger(__name__) + + +class RedisStorage(BaseStorage): + def __init__( + self, + redis_client: Redis, + serializer: Optional[BaseSerializer] = None, + ttl: Optional[Union[int, float]] = None, + namespace: str = "cache", + ) -> None: + super().__init__(serializer, ttl) + self._serializer = serializer or JSONSerializer() + + if ttl is not None and ttl <= 0: + raise StorageError("TTL must be positive") + + self._ttl = ttl + self._storage = redis_client + self._namespace = namespace + + async def store( + self, key: str, response: Response, request: Request, metadata: Metadata + ) -> None: + """ + Saves response to cache with TTL. Redis automatically remove cache if TTL expired. + """ + current_time = time.time() + + metadata["write_time"] = current_time + + value = await self._serializer.dumps(response, request, metadata) + logger.debug("Serialized data: %s", value) + ttl = metadata.get("ttl", self._ttl) + logger.debug(f"TTL: %s", ttl) + + full_key = self._full_key(key) + logger.debug(f"Full key: %s", full_key) + + if await self._storage.exists(full_key): + logger.info("Element %s removed from cache - overwrite", key) + await self._storage.delete(full_key) + + await self._storage.set(full_key, value, ex=ttl) + logger.info("Data written to Redis") + + async def retrieve(self, key: str) -> Optional[StoredResponse]: + """ + Get response from Redis. If TTL expired returns None. + """ + full_key = self._full_key(key) + raw_data = await self._storage.get(full_key) + + if raw_data is None: + logger.debug("Key %s will be removed from Redis - TTL expired", full_key) + return None + + logger.debug(f"Takin data from Redis: %s", raw_data) + try: + return self._serializer.loads(raw_data) + except Exception as e: + logger.warning( + "Failed to deserialize cached response for key %s: %s", key, e + ) + return None + + async def remove(self, path: re.Pattern) -> None: + """ + Deleting the cache using the specified path + """ + raw = path.pattern + if raw.startswith("^"): + raw = raw[1:] + + pattern = self._full_key(str(raw.rstrip("$") + "/*")) + logger.debug(f"Removing key: %s", pattern) + + result = await self._storage.scan(match=pattern) + + if not result[1]: + logger.warning("The search did not find any matches") + return + + logger.debug(f"Result: %s", result[1]) + for value in result[1]: + await self._storage.delete(value) + logger.info(f"Key deleted from Redis: %s", value) + + async def close(self) -> None: + await self._storage.flushdb() + logger.debug("Cache storage cleared") + + def _full_key(self, key: str) -> str: + return f"{self._namespace}:{key}" diff --git a/poetry.lock b/poetry.lock index bea52e4..97e5a99 100644 --- a/poetry.lock +++ b/poetry.lock @@ -34,6 +34,19 @@ doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"redis\" and python_full_version < \"3.11.3\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + [[package]] name = "black" version = "23.12.1" @@ -91,6 +104,87 @@ files = [ {file = "certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "click" version = "8.2.1" @@ -199,6 +293,66 @@ files = [ [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +[[package]] +name = "cryptography" +version = "45.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["dev"] +files = [ + {file = "cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9"}, + {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27"}, + {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e"}, + {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174"}, + {file = "cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9"}, + {file = "cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63"}, + {file = "cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42"}, + {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492"}, + {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0"}, + {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a"}, + {file = "cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f"}, + {file = "cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:206210d03c1193f4e1ff681d22885181d47efa1ab3018766a7b32a7b3d6e6afd"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c648025b6840fe62e57107e0a25f604db740e728bd67da4f6f060f03017d5097"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b8fa8b0a35a9982a3c60ec79905ba5bb090fc0b9addcfd3dc2dd04267e45f25e"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:14d96584701a887763384f3c47f0ca7c1cce322aa1c31172680eb596b890ec30"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57c816dfbd1659a367831baca4b775b2a5b43c003daf52e9d57e1d30bc2e1b0e"}, + {file = "cryptography-45.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b9e38e0a83cd51e07f5a48ff9691cae95a79bea28fe4ded168a8e5c6c77e819d"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1"}, + {file = "cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f"}, + {file = "cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a"}, +] + +[package.dependencies] +cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""] +pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==45.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "fastapi" version = "0.115.13" @@ -444,6 +598,19 @@ files = [ dev = ["pre-commit", "tox"] testing = ["coverage", "pytest", "pytest-benchmark"] +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pydantic" version = "2.11.7" @@ -637,6 +804,27 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "redis" +version = "6.2.0" +description = "Python client for Redis database and key-value store" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"redis\"" +files = [ + {file = "redis-6.2.0-py3-none-any.whl", hash = "sha256:c8ddf316ee0aab65f04a11229e94a64b2618451dab7a67cb2f77eb799d872d5e"}, + {file = "redis-6.2.0.tar.gz", hash = "sha256:e821f129b75dde6cb99dd35e5c76e8c49512a5a0d8dfdc560b2fbd44b85ca977"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} + +[package.extras] +hiredis = ["hiredis (>=3.2.0)"] +jwt = ["pyjwt (>=2.9.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (>=20.0.1)", "requests (>=2.31.0)"] + [[package]] name = "sniffio" version = "1.3.1" @@ -667,6 +855,65 @@ anyio = ">=3.6.2,<5" [package.extras] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] +[[package]] +name = "types-cffi" +version = "1.17.0.20250523" +description = "Typing stubs for cffi" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_cffi-1.17.0.20250523-py3-none-any.whl", hash = "sha256:e98c549d8e191f6220e440f9f14315d6775a21a0e588c32c20476be885b2fad9"}, + {file = "types_cffi-1.17.0.20250523.tar.gz", hash = "sha256:e7110f314c65590533adae1b30763be08ca71ad856a1ae3fe9b9d8664d49ec22"}, +] + +[package.dependencies] +types-setuptools = "*" + +[[package]] +name = "types-pyopenssl" +version = "24.1.0.20240722" +description = "Typing stubs for pyOpenSSL" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, + {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-cffi = "*" + +[[package]] +name = "types-redis" +version = "4.6.0.20241004" +description = "Typing stubs for redis" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, + {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-pyOpenSSL = "*" + +[[package]] +name = "types-setuptools" +version = "80.9.0.20250529" +description = "Typing stubs for setuptools" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_setuptools-80.9.0.20250529-py3-none-any.whl", hash = "sha256:00dfcedd73e333a430e10db096e4d46af93faf9314f832f13b6bbe3d6757e95f"}, + {file = "types_setuptools-80.9.0.20250529.tar.gz", hash = "sha256:79e088ba0cba2186c8d6499cbd3e143abb142d28a44b042c28d3148b1e353c91"}, +] + [[package]] name = "typing-extensions" version = "4.14.0" @@ -713,7 +960,10 @@ h11 = ">=0.8" [package.extras] standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] +[extras] +redis = ["redis"] + [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "4ffaeb11a66f7eda3ccdc421323e30e544db3574dcc602984aebb713faae6a8b" +content-hash = "1446cc737dafbbe64d69d52f66130f8216963465001a9b962b4617c3e518cc69" diff --git a/pyproject.toml b/pyproject.toml index b05656b..5255a98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,6 +11,10 @@ license = "MIT" python = "^3.11" fastapi = ">=0.111.1,<1.0.0" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +redis = { version = "^6.2.0", optional = true } + +[tool.poetry.extras] +redis = ["redis"] [tool.poetry.group.dev.dependencies] @@ -22,6 +26,7 @@ mypy = "^1.6.1" pytest-cov = "^4.1.0" httpx = "^0.28.1" uvicorn = "^0.34.3" +types-redis = "^4.6.0.20241004" [tool.black] line-length = 88 diff --git a/tests/storages/conftest.py b/tests/storages/conftest.py new file mode 100644 index 0000000..b496203 --- /dev/null +++ b/tests/storages/conftest.py @@ -0,0 +1,29 @@ +import typing as tp + +import pytest +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.serializers import Metadata + + +@pytest.fixture +def mock_request() -> Request: + return Request(scope={"type": "http", "method": "GET", "path": "/test"}) + + +@pytest.fixture +def mock_response() -> Response: + return Response(content="test content", status_code=200) + + +@pytest.fixture +def mock_metadata() -> Metadata: + return {"test": "value"} + + +@pytest.fixture +def mock_store_data( + mock_request: Request, mock_response: Response, mock_metadata: Metadata +) -> tp.Tuple[Response, Request, Metadata]: + return mock_response, mock_request, mock_metadata diff --git a/tests/storages/test_in_memory_storage.py b/tests/storages/test_in_memory_storage.py index 2483287..58b9c95 100644 --- a/tests/storages/test_in_memory_storage.py +++ b/tests/storages/test_in_memory_storage.py @@ -9,30 +9,9 @@ from starlette.requests import Request from starlette.responses import Response +from fast_cache_middleware.exceptions import StorageError from fast_cache_middleware.serializers import Metadata -from fast_cache_middleware.storages import InMemoryStorage, StorageError - - -@pytest.fixture -def mock_request() -> Request: - return Request(scope={"type": "http", "method": "GET", "path": "/test"}) - - -@pytest.fixture -def mock_response() -> Response: - return Response(content="test content", status_code=200) - - -@pytest.fixture -def mock_metadata() -> Metadata: - return {"test": "value"} - - -@pytest.fixture -def mock_store_data( - mock_request: Request, mock_response: Response, mock_metadata: Metadata -) -> tp.Tuple[Response, Request, Metadata]: - return mock_response, mock_request, mock_metadata +from fast_cache_middleware.storages import InMemoryStorage @pytest.mark.parametrize( diff --git a/tests/storages/test_redis_storage.py b/tests/storages/test_redis_storage.py new file mode 100644 index 0000000..e10b5cf --- /dev/null +++ b/tests/storages/test_redis_storage.py @@ -0,0 +1,156 @@ +import re +from typing import cast +from unittest.mock import AsyncMock, MagicMock + +import pytest +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.exceptions import StorageError +from fast_cache_middleware.serializers import JSONSerializer +from fast_cache_middleware.storages import RedisStorage + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "ttl, expect_error", + [ + (60.0, None), + (None, None), + (-1, StorageError), + (0, StorageError), + ], +) +async def test_redis_storage_init_validation(ttl, expect_error): + mock_redis = AsyncMock() + + if expect_error: + with pytest.raises(expect_error): + RedisStorage(redis_client=mock_redis, ttl=ttl) + else: + storage = RedisStorage(redis_client=mock_redis, ttl=ttl) + assert storage._ttl == ttl + assert isinstance(storage._serializer, JSONSerializer) + + +@pytest.mark.asyncio +async def test_store_and_retrieve_works(): + mock_redis = AsyncMock() + + mock_serializer = MagicMock() + serialized_value = b"serialized" + mock_serializer.dumps = AsyncMock(return_value=serialized_value) + mock_serializer.loads = MagicMock( + return_value=("deserialized_response", "req", {"meta": "data"}) + ) + + storage = RedisStorage(redis_client=mock_redis, ttl=1, serializer=mock_serializer) + + request = Request(scope={"type": "http", "method": "GET", "path": "/test"}) + response = Response(content="hello", status_code=200) + metadata: dict[str, str | int] = {} + + mock_redis.exists.return_value = False + + await storage.store("key1", response, request, metadata) + mock_redis.set.assert_awaited_with("cache:key1", serialized_value, ex=1) + + mock_redis.get.return_value = serialized_value + result = await storage.retrieve("key1") + + assert result == ("deserialized_response", "req", {"meta": "data"}) + + +@pytest.mark.asyncio +async def test_store_overwrites_existing_key(): + mock_redis = AsyncMock() + + mock_serializer = MagicMock() + serialized_value = b"serialized" + mock_serializer.dumps = AsyncMock(return_value=serialized_value) + + storage = RedisStorage(redis_client=mock_redis, ttl=10, serializer=mock_serializer) + + request = Request(scope={"type": "http", "method": "GET", "path": "/overwrite"}) + response = Response(content="updated", status_code=200) + metadata: dict[str, str] = {} + + mock_redis.exists.return_value = True + + await storage.store("existing_key", response, request, metadata) + + mock_redis.delete.assert_awaited_with("cache:existing_key") + mock_redis.set.assert_awaited_with("cache:existing_key", serialized_value, ex=10) + + +@pytest.mark.asyncio +async def test_retrieve_returns_none_on_missing_key(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis) + mock_redis.get.return_value = None + + result = await storage.retrieve("missing") + assert result is None + + +@pytest.mark.asyncio +async def test_retrieve_returns_none_on_deserialization_error(): + mock_redis = AsyncMock() + + def raise_error(_): + raise ValueError("bad format") + + mock_serializer = MagicMock() + mock_serializer.loads = raise_error + + mock_serializer.dumps = AsyncMock(return_value=b"serialized") + + storage = RedisStorage(redis_client=mock_redis, serializer=mock_serializer) + + mock_redis.get.return_value = b"invalid" + + result = await storage.retrieve("corrupt") + assert result is None + + +@pytest.mark.asyncio +async def test_remove_by_regex(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis, namespace="myspace") + + pattern = re.compile(r"^/api/.*") + mock_redis.scan.return_value = (0, ["myspace:/api/test1", "myspace:/api/test2"]) + + await storage.remove(pattern) + + mock_redis.delete.assert_any_await("myspace:/api/test1") + mock_redis.delete.assert_any_await("myspace:/api/test2") + assert mock_redis.delete.await_count == 2 + + +@pytest.mark.asyncio +async def test_remove_with_no_matches_logs_warning(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis, namespace="myspace") + + pattern = re.compile(r"^/nothing.*") + mock_redis.scan.return_value = (0, []) + + await storage.remove(pattern) + mock_redis.delete.assert_not_called() + + +@pytest.mark.asyncio +async def test_close_flushes_database(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis) + + await storage.close() + mock_redis.flushdb.assert_awaited_once() + + +def test_full_key(): + mock_redis = AsyncMock() + storage = RedisStorage(redis_client=mock_redis, namespace="custom") + + assert storage._full_key("abc") == "custom:abc" diff --git a/tests/test_serializers.py b/tests/test_serializers.py new file mode 100644 index 0000000..1dbcbe5 --- /dev/null +++ b/tests/test_serializers.py @@ -0,0 +1,112 @@ +import json + +import pytest +from starlette import status +from starlette.requests import Request +from starlette.responses import Response + +from fast_cache_middleware.serializers import JSONSerializer, Metadata + + +@pytest.fixture +def test_request() -> Request: + body = b'{"key":"value"}' + + async def receive(): + return {"type": "http.request", "body": body, "more_body": False} + + return Request( + scope={ + "type": "http", + "method": "GET", + "path": "/test", + "headers": [(b"host", b"test.com"), (b"user-agent", b"pytest")], + }, + receive=receive, + ) + + +@pytest.fixture +def test_response() -> Response: + return Response( + content="hello world", status_code=status.HTTP_200_OK, headers={"X-Test": "yes"} + ) + + +@pytest.fixture +def test_metadata() -> Metadata: + return {"meta": "value", "ttl": 123} + + +@pytest.mark.asyncio +async def test_dumps_output_is_valid_json(test_request, test_response, test_metadata): + serializer = JSONSerializer() + + result = await serializer.dumps(test_response, test_request, test_metadata) + parsed = json.loads(result) + + assert "response" in parsed + assert "request" in parsed + assert "metadata" in parsed + + assert parsed["response"]["status_code"] == status.HTTP_200_OK + assert parsed["response"]["content"] == "hello world" + assert parsed["response"]["headers"]["x-test"] == "yes" + + assert parsed["request"]["method"] == "GET" + assert parsed["request"]["headers"]["host"] == "test.com" + assert parsed["metadata"]["ttl"] == 123 + + +@pytest.mark.asyncio +async def test_loads_reconstructs_response_request( + test_request, test_response, test_metadata +): + serializer = JSONSerializer() + + json_data = await serializer.dumps(test_response, test_request, test_metadata) + response, request, metadata = serializer.loads(json_data) + + assert isinstance(response, Response) + assert response.body == b"hello world" + assert response.status_code == status.HTTP_200_OK + assert response.headers["x-test"] == "yes" + + assert isinstance(request, Request) + assert request.method == "GET" + assert request.url.path == "/test" + assert request.headers["host"] == "test.com" + assert request.headers["user-agent"] == "pytest" + + assert metadata == test_metadata + + +@pytest.mark.asyncio +async def test_loads_accepts_bytes_input(test_request, test_response, test_metadata): + serializer = JSONSerializer() + + json_data_str = await serializer.dumps(test_response, test_request, test_metadata) + json_data_bytes = str(json_data_str).encode("utf-8") + + response, request, metadata = serializer.loads(json_data_bytes) + + assert isinstance(response, Response) + assert isinstance(request, Request) + assert metadata == test_metadata + + +@pytest.mark.asyncio +async def test_dumps_handles_empty_body(test_request, test_metadata): + response = Response(status_code=status.HTTP_204_NO_CONTENT) + serializer = JSONSerializer() + + json_str = await serializer.dumps(response, test_request, test_metadata) + parsed = json.loads(json_str) + + assert parsed["response"]["status_code"] == status.HTTP_204_NO_CONTENT + assert parsed["response"]["content"] is None + + +def test_is_binary_property(): + serializer = JSONSerializer() + assert not serializer.is_binary