Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 85 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,62 @@ except InvalidDimensionError as e:
print(f"Available dimensions: {e.available_dimensions}")
```

### Caching for Performance

Enable LRU caching to improve performance when you have many dimensions or high-frequency lookups with repeated action + scope combinations.

```python
# Enable cache on initialization
dispatcher = ActionDispatcher(
dimensions=['region', 'platform', 'version', 'tier'],
enable_cache=True,
cache_maxsize=512
)

@dispatcher.handler("get_data", region="asia", platform="mobile")
def get_data_asia_mobile(params):
return "data for asia mobile"

# First lookup - cache miss
result = dispatcher.get_handler("get_data", region="asia", platform="mobile")

# Second lookup - cache hit (faster)
result = dispatcher.get_handler("get_data", region="asia", platform="mobile")

# Check cache statistics
info = dispatcher.cache_info()
print(f"Cache hits: {info['hits']}, misses: {info['misses']}")
# Output: Cache hits: 1, misses: 1
```

#### Runtime Cache Control

```python
# Create dispatcher without cache
dispatcher = ActionDispatcher(dimensions=['platform'])

# Enable cache later
dispatcher.enable_cache(maxsize=256)
print(dispatcher.is_cache_enabled) # True

# Clear cache manually (useful after bulk handler registration)
dispatcher.clear_cache()

# Disable cache when no longer needed
dispatcher.disable_cache()
print(dispatcher.is_cache_enabled) # False
```

#### When to Use Caching

| Scenario | Recommendation |
|----------|----------------|
| 2-5 dimensions | Cache optional |
| 5-10 dimensions | Consider enabling cache |
| 10+ dimensions | Recommend enabling cache |
| High QPS (>10K) | Recommend enabling cache |
| Dynamic handler registration | Cache auto-invalidates on registration |

## Real-World Examples

### Web API with Role-Based Access Control
Expand Down Expand Up @@ -195,10 +251,12 @@ result = plugin_dispatcher.dispatch(context, "transform_data", data=input_data)

### ActionDispatcher

#### `__init__(dimensions=None)`
Create a new dispatcher with optional dimensions.
#### `__init__(dimensions=None, enable_cache=False, cache_maxsize=256)`
Create a new dispatcher with optional dimensions and caching.

- `dimensions` (list, optional): List of dimension names for routing
- `enable_cache` (bool, optional): Enable LRU cache for handler lookups (default: False)
- `cache_maxsize` (int, optional): Maximum size of the LRU cache (default: 256)

#### `@handler(action, **kwargs)`
Decorator to register a handler for specific action and dimensions.
Expand All @@ -225,6 +283,31 @@ Dispatch an action based on context.
- `action_name` (str): Action to dispatch
- `**kwargs`: Additional parameters passed to handler

#### Cache Methods

##### `enable_cache(maxsize=None)`
Enable LRU cache for handler lookups at runtime.

- `maxsize` (int, optional): Maximum cache size (uses existing value if None)

##### `disable_cache()`
Disable cache and clear cached data.

##### `clear_cache()`
Clear all cached handler lookups.

##### `cache_info()`
Get cache statistics. Returns `None` if cache is disabled.

Returns a dict with:
- `hits`: Number of cache hits
- `misses`: Number of cache misses
- `maxsize`: Maximum cache size
- `currsize`: Current number of cached items

##### `is_cache_enabled`
Property that returns `True` if cache is currently enabled.

### Exceptions

- `ActionDispatchError`: Base exception class
Expand Down
2 changes: 2 additions & 0 deletions action_dispatch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,15 @@
InvalidActionError,
InvalidDimensionError,
)
from .mixins import CacheMixin

__version__ = "0.1.1"
__author__ = "Eowl"
__email__ = "eowl@me.com"

__all__ = [
"ActionDispatcher",
"CacheMixin",
"ActionDispatchError",
"InvalidDimensionError",
"HandlerNotFoundError",
Expand Down
39 changes: 37 additions & 2 deletions action_dispatch/action_dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
from functools import partial
from typing import Any, Callable, Optional, Union

from .mixins import CacheMixin
from .mixins.cache import DEFAULT_CACHE_MAXSIZE

try:
from .exceptions import (
HandlerNotFoundError,
Expand All @@ -13,12 +16,27 @@
pass


class ActionDispatcher:
class ActionDispatcher(CacheMixin):
"""Action dispatcher with multi-dimensional routing and optional LRU cache."""

dimensions: list[str]
registry: dict[str, Any]
global_handlers: dict[str, Callable[[dict[str, Any]], Any]]

def __init__(self, dimensions: Optional[list[str]] = None) -> None:
def __init__(
self,
dimensions: Optional[list[str]] = None,
enable_cache: bool = False,
cache_maxsize: int = DEFAULT_CACHE_MAXSIZE,
) -> None:
"""
Initialize ActionDispatcher.

Args:
dimensions: List of dimension names for routing.
enable_cache: Whether to enable LRU cache (default: False).
cache_maxsize: Maximum cache size (default: 256).
"""
if dimensions is not None and not isinstance(dimensions, list):
warnings.warn(
f"ActionDispatcher dimensions should be a list, got "
Expand All @@ -31,6 +49,9 @@ def __init__(self, dimensions: Optional[list[str]] = None) -> None:
self.registry = self._create_nested_dict(len(self.dimensions))
self.global_handlers = {}

# Initialize cache from mixin
self._init_cache(enable_cache, cache_maxsize)

self._create_dynamic_methods()

def _create_nested_dict(
Expand Down Expand Up @@ -116,9 +137,19 @@ def _register_handler(

current_level[action] = handler

# Invalidate cache when new handler is registered
self._invalidate_cache()

def _find_handler(
self, action: str, scope_kwargs: dict[str, Any]
) -> Optional[Callable[[dict[str, Any]], Any]]:
"""Find handler (delegates to cache mixin)."""
return self._find_handler_with_cache(action, scope_kwargs)

def _match_handler(
self, action: str, scope_kwargs: dict[str, Any]
) -> Optional[Callable[[dict[str, Any]], Any]]:
"""Match handler based on action and scope dimensions."""
if action in self.global_handlers:
return self.global_handlers[action]
if not self.dimensions:
Expand Down Expand Up @@ -148,8 +179,12 @@ def _find_handler(
def register_global(
self, action: str, handler: Callable[[dict[str, Any]], Any]
) -> None:
"""Register a global handler for an action."""
self.global_handlers[action] = handler

# Invalidate cache when global handler is registered
self._invalidate_cache()

def global_handler(
self, action: str
) -> Callable[[Callable[[dict[str, Any]], Any]], Callable[[dict[str, Any]], Any]]:
Expand Down
5 changes: 5 additions & 0 deletions action_dispatch/mixins/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
"""Mixins for ActionDispatcher."""

from .cache import CacheMixin

__all__ = ["CacheMixin"]
129 changes: 129 additions & 0 deletions action_dispatch/mixins/cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
"""Cache mixin for ActionDispatcher."""

from functools import lru_cache
from typing import Any, Callable, Optional

# Default maximum size for LRU cache
DEFAULT_CACHE_MAXSIZE = 256


class CacheMixin:
"""
Mixin class that adds LRU cache support to ActionDispatcher.

This mixin provides optional caching for handler lookups,
which can improve performance when:
- You have many dimensions (10+)
- You have high-frequency repeated lookups
- The same action + scope combinations are called frequently

Usage:
dispatcher = ActionDispatcher(
dimensions=["region", "platform", "version"],
enable_cache=True,
cache_maxsize=512 # or use default: DEFAULT_CACHE_MAXSIZE
)
"""

_cache_enabled: bool
_cache_maxsize: int
_cached_find_handler: Any # lru_cache wrapped function

def _init_cache(
self,
enable_cache: bool = False,
cache_maxsize: int = DEFAULT_CACHE_MAXSIZE,
) -> None:
"""Initialize cache configuration."""
self._cache_enabled = False
self._cache_maxsize = cache_maxsize
self._cached_find_handler = None

if enable_cache:
self._setup_cache()
self._cache_enabled = True

def _setup_cache(self) -> None:
"""Setup LRU cache for handler lookup."""

@lru_cache(maxsize=self._cache_maxsize)
def cached_find(
action: str, scope_tuple: tuple[tuple[str, Any], ...]
) -> Optional[Callable[[dict[str, Any]], Any]]:
scope_kwargs = dict(scope_tuple)
return self._match_handler(action, scope_kwargs)

self._cached_find_handler = cached_find

def _find_handler_with_cache(
self, action: str, scope_kwargs: dict[str, Any]
) -> Optional[Callable[[dict[str, Any]], Any]]:
"""Find handler with optional caching."""
if self._cache_enabled and self._cached_find_handler is not None:
scope_tuple = tuple(sorted(scope_kwargs.items()))
result: Optional[Callable[[dict[str, Any]], Any]] = (
self._cached_find_handler(action, scope_tuple)
)
return result

return self._match_handler(action, scope_kwargs)

def _match_handler(
self, action: str, scope_kwargs: dict[str, Any]
) -> Optional[Callable[[dict[str, Any]], Any]]:
"""
Match handler based on action and scope dimensions.

This method should be overridden by the main class.
"""
raise NotImplementedError("Subclass must implement _match_handler")

def _invalidate_cache(self) -> None:
"""Invalidate cache when handlers change."""
if self._cache_enabled:
self.clear_cache()

def enable_cache(self, maxsize: Optional[int] = None) -> None:
"""
Enable LRU cache for handler lookup.

Args:
maxsize: Maximum cache size (uses existing value if None).
"""
if maxsize is not None:
self._cache_maxsize = maxsize
self._setup_cache()
self._cache_enabled = True

def disable_cache(self) -> None:
"""Disable cache and clear cached data."""
self._cache_enabled = False
self._cached_find_handler = None

def clear_cache(self) -> None:
"""Clear all cached handler lookups."""
if self._cached_find_handler is not None:
self._cached_find_handler.cache_clear()

def cache_info(self) -> Optional[dict[str, int]]:
"""
Get cache statistics.

Returns:
Dict with hits, misses, maxsize, currsize, or None if disabled.
"""
if not self._cache_enabled or self._cached_find_handler is None:
return None

info = self._cached_find_handler.cache_info()
return {
"hits": info.hits,
"misses": info.misses,
"maxsize": info.maxsize or 0,
"currsize": info.currsize,
}

@property
def is_cache_enabled(self) -> bool:
"""Check if cache is currently enabled."""
return self._cache_enabled
Loading