High-performance Python cache system with hierarchical scopes, pluggable storage backends, and intelligent eviction policies
Solves critical cache problems in enterprise applications: hierarchical data isolation (organization/user/tenant), flexible storage backends (In-Memory, Redis, MongoDB), intelligent memory management with LRU/TTL policies, and thread-safe operations with high performance.
Flexible Architecture with pluggable storage providers
Zero configuration for common use cases
Production-ready with comprehensive eviction policies
π Performance: Optimized for high-throughput operations with <1ms latency
π’ Multi-Tenant: Hierarchical scope isolation (global β organization β user β session)
π Pluggable Storage: In-Memory, Redis, MongoDB support out-of-the-box
β‘ Async/Sync: Full support for synchronous and asynchronous code
π§ Smart Eviction: LRU, TTL, and max-items policies with rule composition
π Observability: Detailed metrics for hits, misses, evictions, and storage stats
# Basic installation (in-memory only)
pip install cacheado
# With Redis support
pip install cacheado[redis]
# With MongoDB support
pip install cacheado[mongodb]
# With all backends
pip install cacheado[all]
# For development
pip install cacheado[dev]from cache import Cache
# Instant creation with default in-memory storage
cache = Cache()
# Simple cache with decorator
@cache.cache(ttl_seconds=300)
def expensive_calculation(x, y):
import time
time.sleep(2) # Simulates expensive operation
return x * y
# First call: 2 seconds
result = expensive_calculation(10, 20) # 200
# Second call: <1ms (cache hit!)
result = expensive_calculation(10, 20) # 200 (from cache)from cache import Cache
from utils.cache_scope_config import ScopeConfig, ScopeLevel
# Configure hierarchical scopes
scope_config = ScopeConfig([
ScopeLevel("organization", "org_id", [
ScopeLevel("user", "user_id")
])
])
cache = Cache(scope_config=scope_config)
# Cache isolated by organization and user
@cache.cache(ttl_seconds=600, scope="user")
def get_user_data(user_id, org_id=None):
return fetch_from_database(user_id)
# Data automatically isolated by scope
user_data_org1 = get_user_data("123", org_id="org1")
user_data_org2 = get_user_data("123", org_id="org2")
# Different caches, same user_id!Note: In-Memory storage is included by default. For Redis or MongoDB, install the respective extras.
from cache import Cache
from storages.in_memory import InMemory
cache = Cache(storage_provider=InMemory())# Install Redis support
pip install cacheado[redis]from cache import Cache
from storages.redis import RedisStorage
redis_storage = RedisStorage(
connection_string="redis://localhost:6379",
db=0
)
cache = Cache(storage_provider=redis_storage)# Install MongoDB support
pip install cacheado[mongodb]from cache import Cache
from storages.mongodb import MongoDBStorage
mongo_storage = MongoDBStorage(
connection_string="mongodb://localhost:27017",
db_name="cache_db",
collection_name="cache_collection"
)
cache = Cache(storage_provider=mongo_storage)from cache import Cache
from storages.in_memory import InMemory
from storages.rules.lru_evict import LRUEvict
storage = InMemory()
lru_rule = LRUEvict(max_items=1000)
cache = Cache(storage_provider=storage, storage_rules=[lru_rule])from storages.rules.lifetime_evict import LifeTimeEvict
storage = InMemory()
ttl_rule = LifeTimeEvict()
cache = Cache(storage_provider=storage, storage_rules=[ttl_rule])
# Items expire automatically based on TTL
cache.set("key1", "value1", ttl_seconds=60)from storages.rules.max_items_evict import MaxItemsEvict
storage = InMemory()
max_items_rule = MaxItemsEvict(max_items=500)
cache = Cache(storage_provider=storage, storage_rules=[max_items_rule])# Combine LRU + TTL for optimal memory management
storage = InMemory()
lru_rule = LRUEvict(max_items=1000)
ttl_rule = LifeTimeEvict()
cache = Cache(
storage_provider=storage,
storage_rules=[lru_rule, ttl_rule]
)stats = cache.stats()
print(stats)
# {
# "hits": 1250,
# "misses": 180,
# "evictions": 45,
# "storage_type": "in_memory",
# "total_keys": 8934
# }# Monitor cache effectiveness
stats = cache.stats()
hit_rate = stats["hits"] / (stats["hits"] + stats["misses"]) * 100
print(f"Cache hit rate: {hit_rate:.2f}%")import asyncio
# Native support for async/await
@cache.cache(ttl_seconds=180, scope="global")
async def fetch_api_data(endpoint):
async with httpx.AsyncClient() as client:
response = await client.get(endpoint)
return response.json()
# Async programmatic operations
await cache.aset("key1", "value1", ttl_seconds=300)
value = await cache.aget("key1")
await cache.aevict("key1")
await cache.aclear()# Direct cache operations
cache.set("user_settings", {"theme": "dark"}, ttl_seconds=3600,
scope="user", org_id="org_123", user_id="user_456")
settings = cache.get("user_settings",
scope="user", org_id="org_123", user_id="user_456")
# Evict specific key
cache.evict("user_settings", scope="user", org_id="org_123", user_id="user_456")# Remove all data from an organization
count = cache.evict_by_scope("organization", org_id="org_123")
print(f"Evicted {count} items")
# Remove data from a specific user
count = cache.evict_by_scope("user", org_id="org_123", user_id="user_456")@cache.cache(ttl_seconds=300, scope="user")
def get_user_preferences(user_id, org_id=None):
# org_id is automatically extracted for scope resolution
return load_preferences(user_id)
# Scope parameters extracted from kwargs
prefs = get_user_preferences("user_123", org_id="org_456")from utils.cache_scope_config import ScopeConfig, ScopeLevel
# Configure complex hierarchies
scope_config = ScopeConfig([
ScopeLevel("organization", "org_id", [
ScopeLevel("department", "dept_id", [
ScopeLevel("user", "user_id", [
ScopeLevel("session", "session_id")
])
])
])
])
cache = Cache(scope_config=scope_config)
# Use nested scopes
@cache.cache(ttl_seconds=600, scope="session")
def get_session_data(session_id, org_id=None, dept_id=None, user_id=None):
return fetch_session_data(session_id)from protocols.storage_provider import IStorageProvider
class CustomStorage(IStorageProvider):
def get(self, key: str):
# Implement custom get logic
pass
def set(self, key: str, value: Any, ttl_seconds: float):
# Implement custom set logic
pass
# Implement other required methods...
cache = Cache(storage_provider=CustomStorage())from protocols.storage_rule import IStorageRule
from utils.cache_types import RuleSideEffect, StorageRuleAction
class CustomRule(IStorageRule):
def on_get(self, key: str):
# Custom logic on get
return None
def on_set(self, key: str, value: Any, ttl_seconds: float):
# Custom logic on set
return None
# Implement other required methods...
cache = Cache(storage_rules=[CustomRule()])# Run all tests
make test
# Tests with coverage
make test-coverage
# Run specific test file
python -m pytest tests/test_cache.py -v
# Run with coverage report
python -m pytest --cov=. --cov-report=html --cov-report=term-missingcache/
βββ cache.py # Main Cache class
βββ protocols/ # Protocol definitions
β βββ storage_provider.py # Storage backend interface
β βββ storage_rule.py # Eviction rule interface
βββ storages/ # Storage implementations
β βββ in_memory.py # In-memory storage
β βββ redis.py # Redis storage
β βββ mongodb.py # MongoDB storage
β βββ rule_aware_storage.py # Rule decorator
β βββ rules/ # Eviction policies
β βββ lifetime_evict.py # TTL-based eviction
β βββ lru_evict.py # LRU eviction
β βββ max_items_evict.py # Max items eviction
βββ utils/ # Utilities
β βββ cache_types.py # Type definitions
β βββ cache_scope_config.py # Scope configuration
βββ tests/ # Test suite
βββ test_cache.py
βββ test_in_memory.py
βββ test_redis.py
βββ test_mongodb.py
βββ ...
Uses Python protocols for loose coupling and easy extensibility.
Storage providers and rules are injected, enabling flexible composition.
- Cache: High-level API and decorator logic
- Storage: Data persistence and retrieval
- Rules: Eviction policies and side effects
- Scopes: Hierarchical key resolution
All storage operations are atomic and thread-safe.
Full support for async/await with non-blocking operations.
This project is open source and available under the MIT license.
-
Fork and Clone
git clone https://github.com/GeorgeOgeorge/cacheado.git cd cacheado -
Install Dependencies
pip install -r requirements.txt pip install -r requirements-build.txt
-
Run Tests
make test-coverage
-
Code Quality
make lint make format
-
Submit Pull Request
- Maintain test coverage >90%
- Follow code standards (Black + isort + flake8)
- Add tests for new features
- Update documentation
- Redis and MongoDB require external services
- Async operations use
asyncio.to_threadfor sync storage backends - Scope validation happens at runtime, not compile-time
Built with β€οΈ for high-performance Python applications