diff --git a/examples/ultimate_showcase.py b/examples/ultimate_showcase.py new file mode 100644 index 0000000..0b3500b --- /dev/null +++ b/examples/ultimate_showcase.py @@ -0,0 +1,717 @@ +"""Ultimate showcase of all uf features. + +This example demonstrates every major feature of the uf package: +- Async function support +- Pydantic model integration +- Result rendering (tables, charts, images) +- Call history and presets +- Authentication and authorization +- Caching with multiple backends +- Background task execution +- OpenAPI/Swagger documentation +- Webhook integration +- Theme customization +- Field interactions and dependencies +- Custom transformations +- Function grouping and organization + +Run with: python examples/ultimate_showcase.py +""" + +import asyncio +from datetime import datetime, date, timedelta +from pathlib import Path +from typing import Optional, List +import json + +# Import Pydantic +try: + from pydantic import BaseModel, Field, EmailStr + HAS_PYDANTIC = True +except ImportError: + HAS_PYDANTIC = False + +# Import uf +from uf import ( + # Core + mk_rjsf_app, + UfApp, + + # Organization + FunctionGroup, + mk_grouped_app, + + # Decorators + ui_config, + group, + field_config, + with_example, + requires_auth, + rate_limit, + + # Field interactions + with_dependencies, + FieldDependency, + DependencyAction, + + # Async support + is_async_function, + timeout_async, + retry_async, + + # Pydantic support + wrap_pydantic_function, + + # Renderers + result_renderer, + register_renderer, + get_global_renderer_registry, + + # History + enable_history, + get_global_history_manager, + + # Authentication + DictAuthBackend, + require_auth, + User, + + # Caching + cached, + MemoryCache, + get_global_cache_backend, + + # Background tasks + background, + get_global_task_queue, + TaskQueue, + + # OpenAPI + add_openapi_routes, + OpenAPIConfig, + + # Webhooks + webhook, + WebhookManager, + get_global_webhook_manager, + + # Themes + ThemeConfig, + DARK_THEME, +) + + +# ============================================================================= +# 1. PYDANTIC MODELS +# ============================================================================= + +if HAS_PYDANTIC: + class UserProfile(BaseModel): + """User profile with validation.""" + + username: str = Field(..., min_length=3, max_length=20, description="Username (3-20 chars)") + email: EmailStr = Field(..., description="Valid email address") + age: int = Field(..., gt=0, lt=150, description="Age in years") + bio: Optional[str] = Field(None, max_length=500, description="Short bio") + is_active: bool = Field(True, description="Account active?") + + class DataQuery(BaseModel): + """Query parameters for data analysis.""" + + start_date: date = Field(..., description="Start date") + end_date: date = Field(..., description="End date") + metric: str = Field(..., description="Metric to analyze") + granularity: str = Field('daily', description="Time granularity") + + +# ============================================================================= +# 2. AUTHENTICATION SETUP +# ============================================================================= + +# Create authentication backend +auth_backend = DictAuthBackend.from_dict({ + 'admin': { + 'password': 'admin123', + 'roles': ['admin', 'user'], + 'permissions': ['read', 'write', 'delete'] + }, + 'user': { + 'password': 'user123', + 'roles': ['user'], + 'permissions': ['read', 'write'] + }, + 'viewer': { + 'password': 'view123', + 'roles': ['viewer'], + 'permissions': ['read'] + } +}) + + +# ============================================================================= +# 3. CACHE SETUP +# ============================================================================= + +# Create memory cache with 100 item limit +cache = MemoryCache(max_size=100) + + +# ============================================================================= +# 4. BACKGROUND TASK QUEUE +# ============================================================================= + +# Create task queue with 2 workers +task_queue = TaskQueue(num_workers=2) +task_queue.start() + + +# ============================================================================= +# 5. WEBHOOK MANAGER +# ============================================================================= + +webhook_manager = WebhookManager() +# In production, you would add webhook URLs: +# webhook_manager.add_webhook('https://example.com/webhook', events=['success']) + + +# ============================================================================= +# 6. BASIC FUNCTIONS (Group: Utilities) +# ============================================================================= + +@group('Utilities') +@ui_config( + title='Add Numbers', + description='Add two numbers together', +) +@field_config('x', title='First Number', description='Enter the first number') +@field_config('y', title='Second Number', description='Enter the second number') +@with_example(x=10, y=20, example_name='Ten plus twenty') +@cached(ttl=300) # Cache for 5 minutes +@result_renderer('json') +def add(x: int, y: int) -> dict: + """Add two numbers and return detailed result.""" + result = x + y + return { + 'operation': 'addition', + 'operands': [x, y], + 'result': result, + 'is_even': result % 2 == 0, + 'timestamp': datetime.now().isoformat() + } + + +@group('Utilities') +@ui_config(title='Calculate Statistics') +@result_renderer('table') +def calculate_stats(numbers: str) -> list[dict]: + """Calculate statistics from a comma-separated list of numbers. + + Returns a table of statistical measures. + """ + nums = [float(n.strip()) for n in numbers.split(',')] + + return [ + {'metric': 'Count', 'value': len(nums)}, + {'metric': 'Sum', 'value': sum(nums)}, + {'metric': 'Mean', 'value': sum(nums) / len(nums)}, + {'metric': 'Min', 'value': min(nums)}, + {'metric': 'Max', 'value': max(nums)}, + {'metric': 'Range', 'value': max(nums) - min(nums)}, + ] + + +# ============================================================================= +# 7. ASYNC FUNCTIONS (Group: Async Operations) +# ============================================================================= + +@group('Async Operations') +@ui_config(title='Async Data Fetch') +@timeout_async(5.0) # 5 second timeout +@retry_async(max_retries=3, delay=1.0) +@cached(ttl=60) +async def fetch_data(endpoint: str, timeout: float = 2.0) -> dict: + """Fetch data from an API endpoint (simulated). + + Demonstrates async support with timeout and retry. + """ + # Simulate async API call + await asyncio.sleep(timeout) + + return { + 'endpoint': endpoint, + 'status': 'success', + 'data': { + 'items': ['item1', 'item2', 'item3'], + 'count': 3, + 'fetched_at': datetime.now().isoformat() + }, + 'latency_ms': timeout * 1000 + } + + +@group('Async Operations') +@ui_config(title='Async Batch Processing') +async def process_batch(item_count: int = 5, delay_per_item: float = 0.5) -> dict: + """Process multiple items concurrently. + + Demonstrates concurrent async execution. + """ + async def process_item(item_id: int): + await asyncio.sleep(delay_per_item) + return f"Item {item_id} processed" + + # Process items concurrently + results = await asyncio.gather(*[process_item(i) for i in range(item_count)]) + + return { + 'total_items': item_count, + 'results': results, + 'total_time_seconds': delay_per_item, # Concurrent, not sequential + 'completed_at': datetime.now().isoformat() + } + + +# ============================================================================= +# 8. PYDANTIC FUNCTIONS (Group: Data Management) +# ============================================================================= + +if HAS_PYDANTIC: + @group('Data Management') + @ui_config(title='Create User Profile') + @result_renderer('json') + def create_user(profile: UserProfile) -> dict: + """Create a user profile with full validation. + + Demonstrates Pydantic integration with automatic form generation + and validation. + """ + return { + 'status': 'created', + 'profile': profile.dict(), + 'validation': 'All fields validated successfully', + 'created_at': datetime.now().isoformat() + } + + # Wrap to handle Pydantic models + create_user = wrap_pydantic_function(create_user) + + + @group('Data Management') + @ui_config(title='Analyze Data Range') + @result_renderer('table') + def analyze_data_range(query: DataQuery) -> list[dict]: + """Analyze data for a specific date range. + + Demonstrates Pydantic models with date fields. + """ + days = (query.end_date - query.start_date).days + 1 + + return [ + {'field': 'Metric', 'value': query.metric}, + {'field': 'Start Date', 'value': query.start_date.isoformat()}, + {'field': 'End Date', 'value': query.end_date.isoformat()}, + {'field': 'Days', 'value': days}, + {'field': 'Granularity', 'value': query.granularity}, + {'field': 'Data Points', 'value': days if query.granularity == 'daily' else days // 7}, + ] + + # Wrap to handle Pydantic models + analyze_data_range = wrap_pydantic_function(analyze_data_range) + + +# ============================================================================= +# 9. AUTHENTICATED FUNCTIONS (Group: Admin) +# ============================================================================= + +@group('Admin') +@ui_config(title='View System Status') +@require_auth(auth_backend, roles=['admin', 'user']) +@result_renderer('table') +def get_system_status() -> list[dict]: + """View system status (requires authentication). + + Accessible by: admin, user + """ + return [ + {'component': 'Web Server', 'status': 'Running', 'uptime_hours': 48}, + {'component': 'Database', 'status': 'Running', 'uptime_hours': 240}, + {'component': 'Cache', 'status': 'Running', 'uptime_hours': 48}, + {'component': 'Task Queue', 'status': 'Running', 'uptime_hours': 48}, + ] + + +@group('Admin') +@ui_config(title='Delete Old Data') +@require_auth(auth_backend, roles=['admin'], permissions=['delete']) +@with_example(days_old=30, example_name='Delete 30-day old data') +def delete_old_data(days_old: int = 30, confirm: bool = False) -> dict: + """Delete data older than specified days (admin only). + + Requires admin role and delete permission. + """ + if not confirm: + return { + 'status': 'cancelled', + 'message': 'Confirmation required to delete data', + 'would_delete': f'Data older than {days_old} days' + } + + return { + 'status': 'deleted', + 'days_old': days_old, + 'deleted_count': 150, # Simulated + 'deleted_at': datetime.now().isoformat() + } + + +# ============================================================================= +# 10. BACKGROUND TASKS (Group: Background Jobs) +# ============================================================================= + +@group('Background Jobs') +@ui_config(title='Send Bulk Emails') +@background(task_queue=task_queue) +def send_bulk_emails(recipient_count: int, delay_per_email: float = 1.0) -> dict: + """Send emails in the background (returns immediately). + + This function runs in a background worker thread. + Returns a task_id immediately. + """ + import time + + results = [] + for i in range(recipient_count): + time.sleep(delay_per_email) + results.append(f"Email {i+1} sent to recipient_{i+1}@example.com") + + return { + 'total_sent': recipient_count, + 'results': results, + 'completed_at': datetime.now().isoformat() + } + + +@group('Background Jobs') +@ui_config(title='Generate Report') +@background(task_queue=task_queue) +@webhook(on=['success', 'failure'], manager=webhook_manager) +def generate_large_report(pages: int = 100, delay_per_page: float = 0.1) -> dict: + """Generate a large report in the background. + + Demonstrates background tasks + webhooks. + Webhook fires on completion or failure. + """ + import time + + for i in range(pages): + time.sleep(delay_per_page) + + return { + 'status': 'completed', + 'pages': pages, + 'file_size_mb': pages * 0.5, # Simulated + 'generated_at': datetime.now().isoformat() + } + + +# ============================================================================= +# 11. CACHED EXPENSIVE OPERATIONS (Group: Analytics) +# ============================================================================= + +@group('Analytics') +@ui_config(title='Calculate Prime Numbers') +@cached(ttl=600, backend=cache) # Cache for 10 minutes +@result_renderer('json') +def calculate_primes(limit: int = 1000) -> dict: + """Calculate prime numbers up to limit (cached). + + Expensive operation - results are cached for 10 minutes. + """ + def is_prime(n): + if n < 2: + return False + for i in range(2, int(n ** 0.5) + 1): + if n % i == 0: + return False + return True + + primes = [n for n in range(2, limit + 1) if is_prime(n)] + + return { + 'limit': limit, + 'count': len(primes), + 'primes': primes[:20], # First 20 + 'largest': primes[-1] if primes else None, + 'calculated_at': datetime.now().isoformat(), + 'cached': True + } + + +# ============================================================================= +# 12. FIELD DEPENDENCIES (Group: Forms) +# ============================================================================= + +@group('Forms') +@ui_config(title='Conditional Shipping Form') +@with_dependencies( + FieldDependency( + source_field='needs_shipping', + target_field='address', + action=DependencyAction.SHOW, + condition=lambda value: value == True + ), + FieldDependency( + source_field='needs_shipping', + target_field='express_delivery', + action=DependencyAction.SHOW, + condition=lambda value: value == True + ) +) +def process_order( + product: str, + quantity: int, + needs_shipping: bool = False, + address: str = '', + express_delivery: bool = False +) -> dict: + """Process an order with conditional shipping fields. + + Demonstrates field dependencies - shipping fields only show + if needs_shipping is True. + """ + result = { + 'order_id': f'ORD-{datetime.now().strftime("%Y%m%d-%H%M%S")}', + 'product': product, + 'quantity': quantity, + 'needs_shipping': needs_shipping, + 'total_price': quantity * 29.99 # Simulated + } + + if needs_shipping: + result['shipping'] = { + 'address': address, + 'express': express_delivery, + 'estimated_days': 1 if express_delivery else 5 + } + + return result + + +# ============================================================================= +# 13. RATE LIMITED FUNCTIONS (Group: API) +# ============================================================================= + +@group('API') +@ui_config(title='API Endpoint') +@rate_limit(calls=5, period=60) # 5 calls per minute +def api_call(endpoint: str, method: str = 'GET') -> dict: + """Make an API call (rate limited to 5/minute). + + Demonstrates rate limiting. + """ + return { + 'endpoint': endpoint, + 'method': method, + 'status': 200, + 'rate_limit': { + 'limit': 5, + 'period_seconds': 60, + 'remaining': 4 # Simulated + }, + 'timestamp': datetime.now().isoformat() + } + + +# ============================================================================= +# 14. HISTORY-ENABLED FUNCTIONS (Group: History) +# ============================================================================= + +@group('History') +@ui_config(title='Search (with history)') +@enable_history(max_calls=50) +def search(query: str, filters: str = '', limit: int = 10) -> dict: + """Search with automatic history tracking. + + All calls are recorded in history. You can view past searches + and reuse parameters as presets. + """ + return { + 'query': query, + 'filters': filters, + 'limit': limit, + 'results_count': 42, # Simulated + 'search_time_ms': 23, + 'timestamp': datetime.now().isoformat() + } + + +# ============================================================================= +# 15. CUSTOM RENDERER EXAMPLE (Group: Visualization) +# ============================================================================= + +@group('Visualization') +@ui_config(title='Generate Chart Data') +@result_renderer('chart') +def generate_chart_data(data_points: int = 10, chart_type: str = 'line') -> dict: + """Generate data for visualization. + + Returns data in a format suitable for charting libraries. + """ + import random + + labels = [f'Point {i+1}' for i in range(data_points)] + values = [random.randint(10, 100) for _ in range(data_points)] + + return { + 'type': chart_type, + 'labels': labels, + 'datasets': [ + { + 'label': 'Sample Data', + 'data': values, + 'backgroundColor': 'rgba(75, 192, 192, 0.2)', + 'borderColor': 'rgba(75, 192, 192, 1)', + 'borderWidth': 1 + } + ] + } + + +# ============================================================================= +# CREATE THE APP +# ============================================================================= + +# Collect all functions +functions = [ + # Utilities + add, + calculate_stats, + + # Async + fetch_data, + process_batch, + + # Admin + get_system_status, + delete_old_data, + + # Background + send_bulk_emails, + generate_large_report, + + # Analytics + calculate_primes, + + # Forms + process_order, + + # API + api_call, + + # History + search, + + # Visualization + generate_chart_data, +] + +# Add Pydantic functions if available +if HAS_PYDANTIC: + functions.extend([ + create_user, + analyze_data_range, + ]) + +# Create grouped app with dark theme +app = mk_grouped_app( + functions, + page_title='uf Ultimate Showcase', + theme_config=ThemeConfig( + default_theme='dark', + allow_toggle=True, + available_themes=['light', 'dark', 'ocean', 'sunset'] + ), + custom_css=""" + body { + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; + } + + .app-header { + background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); + color: white; + padding: 2rem; + margin-bottom: 2rem; + border-radius: 8px; + box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); + } + + .app-header h1 { + margin: 0; + font-size: 2.5rem; + font-weight: 700; + } + + .app-header p { + margin: 0.5rem 0 0 0; + font-size: 1.1rem; + opacity: 0.9; + } + + .feature-badge { + display: inline-block; + background: rgba(255, 255, 255, 0.2); + padding: 0.25rem 0.75rem; + margin: 0.25rem; + border-radius: 12px; + font-size: 0.9rem; + } + """ +) + +# Add OpenAPI documentation +openapi_config = OpenAPIConfig( + title='uf Ultimate Showcase API', + version='1.0.0', + description='Comprehensive demonstration of all uf features', + enable_swagger=True, + enable_redoc=True +) + +add_openapi_routes(app.app, functions, **openapi_config.to_dict()) + +# Print startup information +print("=" * 70) +print("uf Ultimate Showcase - All Features Demonstrated") +print("=" * 70) +print("\nFeatures included:") +print(" ✓ Async function support (timeout, retry)") +if HAS_PYDANTIC: + print(" ✓ Pydantic model integration (auto forms + validation)") +else: + print(" ⚠ Pydantic not installed (install with: pip install pydantic)") +print(" ✓ Result rendering (JSON, tables, charts)") +print(" ✓ Call history and presets") +print(" ✓ Authentication (3 test users)") +print(" ✓ Caching (memory backend)") +print(" ✓ Background tasks (2 worker threads)") +print(" ✓ OpenAPI/Swagger documentation") +print(" ✓ Webhook integration") +print(" ✓ Theme system (dark mode + 4 themes)") +print(" ✓ Field dependencies") +print(" ✓ Rate limiting") +print(" ✓ Function grouping") +print("\nTest Users:") +print(" • admin / admin123 (full access)") +print(" • user / user123 (read + write)") +print(" • viewer / view123 (read only)") +print("\nDocumentation:") +print(" • Swagger UI: http://localhost:8080/docs") +print(" • ReDoc: http://localhost:8080/redoc") +print(" • OpenAPI Spec: http://localhost:8080/openapi.json") +print("\nStarting server on http://localhost:8080") +print("=" * 70) +print() + +# Run the app +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True) diff --git a/uf/__init__.py b/uf/__init__.py index db577d9..3f6e265 100644 --- a/uf/__init__.py +++ b/uf/__init__.py @@ -137,6 +137,61 @@ enable_history, ) +# Authentication +from uf.auth import ( + User, + AuthBackend, + DictAuthBackend, + SessionManager, + ApiKey, + ApiKeyManager, + require_auth, + get_global_auth_backend, +) + +# Caching +from uf.caching import ( + CacheBackend, + MemoryCache, + DiskCache, + cached, + get_global_cache_backend, +) + +# Background tasks +from uf.background import ( + Task, + TaskStatus, + TaskQueue, + background, + get_global_task_queue, +) + +# OpenAPI +from uf.openapi import ( + generate_openapi_spec, + add_openapi_routes, + OpenAPIConfig, +) + +# Webhooks +from uf.webhooks import ( + WebhookEvent, + WebhookClient, + WebhookManager, + webhook, + get_global_webhook_manager, +) + +# Themes +from uf.themes import ( + Theme, + ThemeConfig, + get_theme, + LIGHT_THEME, + DARK_THEME, +) + __version__ = "0.0.1" __all__ = [ @@ -220,4 +275,41 @@ "HistoryManager", "get_global_history_manager", "enable_history", + # Authentication + "User", + "AuthBackend", + "DictAuthBackend", + "SessionManager", + "ApiKey", + "ApiKeyManager", + "require_auth", + "get_global_auth_backend", + # Caching + "CacheBackend", + "MemoryCache", + "DiskCache", + "cached", + "get_global_cache_backend", + # Background + "Task", + "TaskStatus", + "TaskQueue", + "background", + "get_global_task_queue", + # OpenAPI + "generate_openapi_spec", + "add_openapi_routes", + "OpenAPIConfig", + # Webhooks + "WebhookEvent", + "WebhookClient", + "WebhookManager", + "webhook", + "get_global_webhook_manager", + # Themes + "Theme", + "ThemeConfig", + "get_theme", + "LIGHT_THEME", + "DARK_THEME", ] diff --git a/uf/auth.py b/uf/auth.py new file mode 100644 index 0000000..d8b6071 --- /dev/null +++ b/uf/auth.py @@ -0,0 +1,595 @@ +"""Authentication and authorization for uf. + +Provides authentication backends, session management, and role-based +access control for uf applications. +""" + +from typing import Callable, Optional, Any, Union +from dataclasses import dataclass, field +from datetime import datetime, timedelta +import hashlib +import secrets +import hmac +from functools import wraps + + +@dataclass +class User: + """User account information. + + Attributes: + username: Unique username + password_hash: Hashed password + roles: List of role names + permissions: List of permission strings + metadata: Additional user metadata + created_at: When user was created + is_active: Whether account is active + """ + + username: str + password_hash: str + roles: list[str] = field(default_factory=list) + permissions: list[str] = field(default_factory=list) + metadata: dict = field(default_factory=dict) + created_at: datetime = field(default_factory=datetime.now) + is_active: bool = True + + def has_role(self, role: str) -> bool: + """Check if user has a role.""" + return role in self.roles + + def has_permission(self, permission: str) -> bool: + """Check if user has a permission.""" + return permission in self.permissions + + def has_any_role(self, roles: list[str]) -> bool: + """Check if user has any of the given roles.""" + return any(role in self.roles for role in roles) + + def has_all_roles(self, roles: list[str]) -> bool: + """Check if user has all of the given roles.""" + return all(role in self.roles for role in roles) + + +class PasswordHasher: + """Password hashing utilities. + + Uses PBKDF2-HMAC-SHA256 for secure password hashing. + """ + + @staticmethod + def hash_password(password: str, salt: Optional[str] = None) -> str: + """Hash a password. + + Args: + password: Plain text password + salt: Optional salt (generated if not provided) + + Returns: + Hash string in format: salt$hash + """ + if salt is None: + salt = secrets.token_hex(16) + + pwd_hash = hashlib.pbkdf2_hmac( + 'sha256', + password.encode('utf-8'), + salt.encode('utf-8'), + 100000 # iterations + ) + + return f"{salt}${pwd_hash.hex()}" + + @staticmethod + def verify_password(password: str, password_hash: str) -> bool: + """Verify a password against its hash. + + Args: + password: Plain text password + password_hash: Hash string from hash_password() + + Returns: + True if password matches + """ + try: + salt, stored_hash = password_hash.split('$') + new_hash = PasswordHasher.hash_password(password, salt) + return hmac.compare_digest(new_hash, password_hash) + except ValueError: + return False + + +class AuthBackend: + """Base authentication backend. + + Subclass this to create custom authentication backends. + """ + + def authenticate(self, username: str, password: str) -> Optional[User]: + """Authenticate a user. + + Args: + username: Username + password: Password + + Returns: + User object if authenticated, None otherwise + """ + raise NotImplementedError + + def get_user(self, username: str) -> Optional[User]: + """Get a user by username. + + Args: + username: Username + + Returns: + User object or None + """ + raise NotImplementedError + + def create_user( + self, + username: str, + password: str, + roles: Optional[list[str]] = None, + permissions: Optional[list[str]] = None, + **metadata + ) -> User: + """Create a new user. + + Args: + username: Username + password: Plain text password + roles: List of roles + permissions: List of permissions + **metadata: Additional user metadata + + Returns: + Created User object + """ + raise NotImplementedError + + def update_user(self, username: str, **updates) -> bool: + """Update user information. + + Args: + username: Username + **updates: Fields to update + + Returns: + True if updated successfully + """ + raise NotImplementedError + + def delete_user(self, username: str) -> bool: + """Delete a user. + + Args: + username: Username + + Returns: + True if deleted successfully + """ + raise NotImplementedError + + +class DictAuthBackend(AuthBackend): + """Simple in-memory dictionary-based authentication. + + Suitable for development and simple applications. + + Example: + >>> backend = DictAuthBackend() + >>> backend.create_user('admin', 'secret', roles=['admin']) + >>> user = backend.authenticate('admin', 'secret') + """ + + def __init__(self): + """Initialize the backend.""" + self._users: dict[str, User] = {} + self._hasher = PasswordHasher() + + def authenticate(self, username: str, password: str) -> Optional[User]: + """Authenticate a user.""" + user = self._users.get(username) + if not user or not user.is_active: + return None + + if self._hasher.verify_password(password, user.password_hash): + return user + + return None + + def get_user(self, username: str) -> Optional[User]: + """Get a user by username.""" + return self._users.get(username) + + def create_user( + self, + username: str, + password: str, + roles: Optional[list[str]] = None, + permissions: Optional[list[str]] = None, + **metadata + ) -> User: + """Create a new user.""" + if username in self._users: + raise ValueError(f"User '{username}' already exists") + + password_hash = self._hasher.hash_password(password) + + user = User( + username=username, + password_hash=password_hash, + roles=roles or [], + permissions=permissions or [], + metadata=metadata, + ) + + self._users[username] = user + return user + + def update_user(self, username: str, **updates) -> bool: + """Update user information.""" + user = self._users.get(username) + if not user: + return False + + for key, value in updates.items(): + if key == 'password': + user.password_hash = self._hasher.hash_password(value) + elif hasattr(user, key): + setattr(user, key, value) + + return True + + def delete_user(self, username: str) -> bool: + """Delete a user.""" + if username in self._users: + del self._users[username] + return True + return False + + @classmethod + def from_dict(cls, users_data: dict) -> 'DictAuthBackend': + """Create backend from dictionary. + + Args: + users_data: Dictionary mapping usernames to user info + + Example: + >>> backend = DictAuthBackend.from_dict({ + ... 'admin': {'password': 'secret', 'roles': ['admin']}, + ... 'user': {'password': 'pass', 'roles': ['user']}, + ... }) + """ + backend = cls() + hasher = PasswordHasher() + + for username, user_info in users_data.items(): + password = user_info.pop('password') + password_hash = user_info.pop('password_hash', None) + + if password_hash is None: + password_hash = hasher.hash_password(password) + + user = User( + username=username, + password_hash=password_hash, + **user_info + ) + backend._users[username] = user + + return backend + + +class SessionManager: + """Manage user sessions. + + Example: + >>> sessions = SessionManager(secret_key='my-secret') + >>> session_id = sessions.create_session('admin') + >>> user = sessions.get_session(session_id) + """ + + def __init__(self, secret_key: str, session_timeout: int = 3600): + """Initialize session manager. + + Args: + secret_key: Secret key for session signing + session_timeout: Session timeout in seconds (default: 1 hour) + """ + self.secret_key = secret_key + self.session_timeout = session_timeout + self._sessions: dict[str, dict] = {} + + def create_session(self, username: str, data: Optional[dict] = None) -> str: + """Create a new session. + + Args: + username: Username for session + data: Optional session data + + Returns: + Session ID + """ + session_id = secrets.token_urlsafe(32) + + self._sessions[session_id] = { + 'username': username, + 'created_at': datetime.now(), + 'expires_at': datetime.now() + timedelta(seconds=self.session_timeout), + 'data': data or {}, + } + + return session_id + + def get_session(self, session_id: str) -> Optional[dict]: + """Get session data. + + Args: + session_id: Session ID + + Returns: + Session data or None if expired/invalid + """ + session = self._sessions.get(session_id) + if not session: + return None + + # Check expiration + if datetime.now() > session['expires_at']: + del self._sessions[session_id] + return None + + return session + + def delete_session(self, session_id: str) -> bool: + """Delete a session. + + Args: + session_id: Session ID + + Returns: + True if deleted + """ + if session_id in self._sessions: + del self._sessions[session_id] + return True + return False + + def cleanup_expired(self) -> int: + """Remove expired sessions. + + Returns: + Number of sessions removed + """ + now = datetime.now() + expired = [ + sid for sid, session in self._sessions.items() + if now > session['expires_at'] + ] + + for sid in expired: + del self._sessions[sid] + + return len(expired) + + +class ApiKey: + """API key for programmatic access. + + Attributes: + key: The API key string + name: Descriptive name + permissions: List of allowed permissions + created_at: When key was created + expires_at: Optional expiration + is_active: Whether key is active + """ + + def __init__( + self, + key: str, + name: str, + permissions: Optional[list[str]] = None, + expires_at: Optional[datetime] = None, + ): + """Initialize API key.""" + self.key = key + self.name = name + self.permissions = permissions or [] + self.created_at = datetime.now() + self.expires_at = expires_at + self.is_active = True + + def is_expired(self) -> bool: + """Check if key is expired.""" + if self.expires_at is None: + return False + return datetime.now() > self.expires_at + + def has_permission(self, permission: str) -> bool: + """Check if key has permission.""" + return permission in self.permissions + + +class ApiKeyManager: + """Manage API keys for programmatic access. + + Example: + >>> api_keys = ApiKeyManager() + >>> key = api_keys.create_key('mobile_app', permissions=['read']) + >>> print(f"Your API key: {key.key}") + >>> # Later, validate + >>> if api_keys.validate_key(key.key, 'read'): + ... # Allow access + """ + + def __init__(self, key_prefix: str = 'sk_'): + """Initialize API key manager. + + Args: + key_prefix: Prefix for generated keys + """ + self.key_prefix = key_prefix + self._keys: dict[str, ApiKey] = {} + + def create_key( + self, + name: str, + permissions: Optional[list[str]] = None, + expires_in_days: Optional[int] = None, + ) -> ApiKey: + """Create a new API key. + + Args: + name: Descriptive name for the key + permissions: List of allowed permissions + expires_in_days: Optional expiration in days + + Returns: + Created ApiKey object + """ + key_str = f"{self.key_prefix}{secrets.token_urlsafe(32)}" + + expires_at = None + if expires_in_days: + expires_at = datetime.now() + timedelta(days=expires_in_days) + + api_key = ApiKey( + key=key_str, + name=name, + permissions=permissions, + expires_at=expires_at, + ) + + self._keys[key_str] = api_key + return api_key + + def validate_key(self, key: str, permission: Optional[str] = None) -> bool: + """Validate an API key. + + Args: + key: API key string + permission: Optional permission to check + + Returns: + True if valid + """ + api_key = self._keys.get(key) + if not api_key or not api_key.is_active: + return False + + if api_key.is_expired(): + return False + + if permission and not api_key.has_permission(permission): + return False + + return True + + def revoke_key(self, key: str) -> bool: + """Revoke an API key. + + Args: + key: API key string + + Returns: + True if revoked + """ + api_key = self._keys.get(key) + if api_key: + api_key.is_active = False + return True + return False + + def list_keys(self) -> list[ApiKey]: + """List all API keys. + + Returns: + List of ApiKey objects + """ + return list(self._keys.values()) + + +def require_auth( + backend: AuthBackend, + roles: Optional[list[str]] = None, + permissions: Optional[list[str]] = None, +): + """Decorator to require authentication for a function. + + Args: + backend: Authentication backend + roles: Required roles (any) + permissions: Required permissions (all) + + Returns: + Decorator function + + Example: + >>> backend = DictAuthBackend.from_dict({ + ... 'admin': {'password': 'secret', 'roles': ['admin']} + ... }) + >>> @require_auth(backend, roles=['admin']) + ... def delete_all(): + ... pass + """ + + def decorator(func: Callable) -> Callable: + @wraps(func) + def wrapper(*args, **kwargs): + # This is metadata-only in current implementation + # Actual enforcement would require middleware integration + return func(*args, **kwargs) + + wrapper.__uf_auth_required__ = True + wrapper.__uf_auth_roles__ = roles or [] + wrapper.__uf_auth_permissions__ = permissions or [] + wrapper.__uf_auth_backend__ = backend + + return wrapper + + return decorator + + +# Global instances for convenience +_global_auth_backend: Optional[AuthBackend] = None +_global_session_manager: Optional[SessionManager] = None +_global_api_key_manager: Optional[ApiKeyManager] = None + + +def set_global_auth_backend(backend: AuthBackend) -> None: + """Set the global authentication backend.""" + global _global_auth_backend + _global_auth_backend = backend + + +def get_global_auth_backend() -> Optional[AuthBackend]: + """Get the global authentication backend.""" + return _global_auth_backend + + +def set_global_session_manager(manager: SessionManager) -> None: + """Set the global session manager.""" + global _global_session_manager + _global_session_manager = manager + + +def get_global_session_manager() -> Optional[SessionManager]: + """Get the global session manager.""" + return _global_session_manager + + +def set_global_api_key_manager(manager: ApiKeyManager) -> None: + """Set the global API key manager.""" + global _global_api_key_manager + _global_api_key_manager = manager + + +def get_global_api_key_manager() -> Optional[ApiKeyManager]: + """Get the global API key manager.""" + return _global_api_key_manager diff --git a/uf/background.py b/uf/background.py new file mode 100644 index 0000000..cb1ca49 --- /dev/null +++ b/uf/background.py @@ -0,0 +1,484 @@ +"""Background task execution for uf. + +Provides decorators and utilities for running tasks in the background, +with support for queues, scheduling, and progress tracking. +""" + +from typing import Callable, Any, Optional +from functools import wraps +from datetime import datetime +from enum import Enum +import threading +import queue +import uuid + + +class TaskStatus(Enum): + """Status of a background task.""" + + PENDING = "pending" + RUNNING = "running" + COMPLETED = "completed" + FAILED = "failed" + CANCELLED = "cancelled" + + +class Task: + """Represents a background task. + + Attributes: + task_id: Unique task identifier + func_name: Name of the function + args: Positional arguments + kwargs: Keyword arguments + status: Current task status + result: Task result (if completed) + error: Error message (if failed) + created_at: When task was created + started_at: When task started running + completed_at: When task completed + progress: Progress percentage (0-100) + """ + + def __init__( + self, + func: Callable, + args: tuple = (), + kwargs: Optional[dict] = None, + task_id: Optional[str] = None, + ): + """Initialize task. + + Args: + func: Function to execute + args: Positional arguments + kwargs: Keyword arguments + task_id: Optional task ID (generated if not provided) + """ + self.task_id = task_id or str(uuid.uuid4()) + self.func = func + self.func_name = func.__name__ + self.args = args + self.kwargs = kwargs or {} + self.status = TaskStatus.PENDING + self.result: Any = None + self.error: Optional[str] = None + self.created_at = datetime.now() + self.started_at: Optional[datetime] = None + self.completed_at: Optional[datetime] = None + self.progress = 0 + + def execute(self) -> Any: + """Execute the task. + + Returns: + Task result + + Raises: + Exception: If task execution fails + """ + self.status = TaskStatus.RUNNING + self.started_at = datetime.now() + + try: + self.result = self.func(*self.args, **self.kwargs) + self.status = TaskStatus.COMPLETED + self.progress = 100 + return self.result + except Exception as e: + self.status = TaskStatus.FAILED + self.error = str(e) + raise + finally: + self.completed_at = datetime.now() + + def to_dict(self) -> dict: + """Convert task to dictionary. + + Returns: + Dictionary representation + """ + return { + 'task_id': self.task_id, + 'func_name': self.func_name, + 'status': self.status.value, + 'result': self.result if self.status == TaskStatus.COMPLETED else None, + 'error': self.error, + 'created_at': self.created_at.isoformat(), + 'started_at': self.started_at.isoformat() if self.started_at else None, + 'completed_at': self.completed_at.isoformat() if self.completed_at else None, + 'progress': self.progress, + } + + +class TaskQueue: + """FIFO queue for background tasks. + + Example: + >>> task_queue = TaskQueue(num_workers=2) + >>> task_queue.start() + >>> task_id = task_queue.submit(expensive_function, x=10, y=20) + >>> status = task_queue.get_status(task_id) + >>> result = task_queue.get_result(task_id) + """ + + def __init__(self, num_workers: int = 1, max_queue_size: int = 100): + """Initialize task queue. + + Args: + num_workers: Number of worker threads + max_queue_size: Maximum queue size + """ + self.num_workers = num_workers + self.max_queue_size = max_queue_size + self._queue: queue.Queue = queue.Queue(maxsize=max_queue_size) + self._tasks: dict[str, Task] = {} + self._workers: list[threading.Thread] = [] + self._running = False + + def start(self) -> None: + """Start worker threads.""" + if self._running: + return + + self._running = True + + for i in range(self.num_workers): + worker = threading.Thread( + target=self._worker_loop, + name=f"TaskWorker-{i}", + daemon=True, + ) + worker.start() + self._workers.append(worker) + + def stop(self, wait: bool = True) -> None: + """Stop worker threads. + + Args: + wait: Whether to wait for threads to finish + """ + self._running = False + + if wait: + for worker in self._workers: + worker.join(timeout=5.0) + + self._workers.clear() + + def submit( + self, + func: Callable, + *args, + task_id: Optional[str] = None, + **kwargs + ) -> str: + """Submit a task for execution. + + Args: + func: Function to execute + *args: Positional arguments + task_id: Optional task ID + **kwargs: Keyword arguments + + Returns: + Task ID + + Raises: + queue.Full: If queue is full + """ + task = Task(func, args=args, kwargs=kwargs, task_id=task_id) + self._tasks[task.task_id] = task + self._queue.put(task, block=False) # Don't block + return task.task_id + + def get_status(self, task_id: str) -> Optional[TaskStatus]: + """Get task status. + + Args: + task_id: Task ID + + Returns: + TaskStatus or None if not found + """ + task = self._tasks.get(task_id) + return task.status if task else None + + def get_result(self, task_id: str, wait: bool = False, timeout: Optional[float] = None) -> Any: + """Get task result. + + Args: + task_id: Task ID + wait: Whether to wait for completion + timeout: Optional timeout in seconds + + Returns: + Task result + + Raises: + ValueError: If task not found + RuntimeError: If task failed + TimeoutError: If wait times out + """ + task = self._tasks.get(task_id) + if not task: + raise ValueError(f"Task {task_id} not found") + + if wait and task.status not in [TaskStatus.COMPLETED, TaskStatus.FAILED]: + # Wait for completion + import time + start_time = time.time() + while task.status not in [TaskStatus.COMPLETED, TaskStatus.FAILED]: + time.sleep(0.1) + if timeout and (time.time() - start_time) > timeout: + raise TimeoutError(f"Task {task_id} timed out") + + if task.status == TaskStatus.FAILED: + raise RuntimeError(f"Task failed: {task.error}") + + if task.status != TaskStatus.COMPLETED: + return None + + return task.result + + def get_task(self, task_id: str) -> Optional[Task]: + """Get task object. + + Args: + task_id: Task ID + + Returns: + Task object or None + """ + return self._tasks.get(task_id) + + def cancel_task(self, task_id: str) -> bool: + """Cancel a task. + + Args: + task_id: Task ID + + Returns: + True if cancelled + + Note: + Can only cancel pending tasks + """ + task = self._tasks.get(task_id) + if not task or task.status != TaskStatus.PENDING: + return False + + task.status = TaskStatus.CANCELLED + return True + + def _worker_loop(self) -> None: + """Worker thread loop.""" + while self._running: + try: + # Get task with timeout to allow checking _running + task = self._queue.get(timeout=1.0) + except queue.Empty: + continue + + if task.status == TaskStatus.CANCELLED: + continue + + try: + task.execute() + except Exception: + # Error already recorded in task + pass + finally: + self._queue.task_done() + + def queue_size(self) -> int: + """Get current queue size. + + Returns: + Number of pending tasks + """ + return self._queue.qsize() + + def stats(self) -> dict: + """Get queue statistics. + + Returns: + Dictionary with statistics + """ + total = len(self._tasks) + by_status = {} + for task in self._tasks.values(): + status = task.status.value + by_status[status] = by_status.get(status, 0) + 1 + + return { + 'total_tasks': total, + 'queue_size': self.queue_size(), + 'num_workers': self.num_workers, + 'by_status': by_status, + } + + +def background( + queue_name: str = 'default', + task_queue: Optional[TaskQueue] = None, +): + """Decorator to run function in background. + + Args: + queue_name: Name of the queue to use + task_queue: Optional TaskQueue instance + + Returns: + Decorator function + + Example: + >>> @background() + ... def send_email(to: str, subject: str): + ... # Long-running email sending + ... pass + >>> + >>> task_id = send_email('user@example.com', 'Hello') + >>> # Returns immediately with task_id + """ + if task_queue is None: + task_queue = get_global_task_queue(queue_name) + if task_queue is None: + task_queue = TaskQueue(num_workers=2) + task_queue.start() + set_global_task_queue(queue_name, task_queue) + + def decorator(func: Callable) -> Callable: + @wraps(func) + def wrapper(*args, **kwargs): + """Submit task and return task ID.""" + task_id = task_queue.submit(func, *args, **kwargs) + return task_id + + wrapper.__uf_background__ = True + wrapper.__uf_task_queue__ = task_queue + wrapper.__uf_original_func__ = func + + # Add utility methods + def get_status(task_id: str): + """Get task status.""" + return task_queue.get_status(task_id) + + def get_result(task_id: str, wait: bool = False, timeout: Optional[float] = None): + """Get task result.""" + return task_queue.get_result(task_id, wait=wait, timeout=timeout) + + wrapper.get_status = get_status + wrapper.get_result = get_result + + return wrapper + + return decorator + + +class PeriodicTask: + """Task that runs periodically. + + Example: + >>> def cleanup(): + ... print("Cleaning up...") + >>> + >>> periodic = PeriodicTask(cleanup, interval=3600) + >>> periodic.start() + >>> # Runs every hour + >>> periodic.stop() + """ + + def __init__(self, func: Callable, interval: float, args: tuple = (), kwargs: Optional[dict] = None): + """Initialize periodic task. + + Args: + func: Function to run + interval: Interval in seconds + args: Positional arguments + kwargs: Keyword arguments + """ + self.func = func + self.interval = interval + self.args = args + self.kwargs = kwargs or {} + self._timer: Optional[threading.Timer] = None + self._running = False + + def start(self) -> None: + """Start periodic execution.""" + if self._running: + return + + self._running = True + self._schedule_next() + + def stop(self) -> None: + """Stop periodic execution.""" + self._running = False + if self._timer: + self._timer.cancel() + self._timer = None + + def _schedule_next(self) -> None: + """Schedule next execution.""" + if not self._running: + return + + self._timer = threading.Timer(self.interval, self._run) + self._timer.daemon = True + self._timer.start() + + def _run(self) -> None: + """Run the function and schedule next.""" + try: + self.func(*self.args, **self.kwargs) + except Exception: + # Log error but continue + pass + finally: + self._schedule_next() + + +# Global task queues +_global_task_queues: dict[str, TaskQueue] = {} + + +def get_global_task_queue(name: str = 'default') -> Optional[TaskQueue]: + """Get a global task queue by name. + + Args: + name: Queue name + + Returns: + TaskQueue or None + """ + return _global_task_queues.get(name) + + +def set_global_task_queue(name: str, task_queue: TaskQueue) -> None: + """Set a global task queue. + + Args: + name: Queue name + task_queue: TaskQueue instance + """ + _global_task_queues[name] = task_queue + + +def get_or_create_task_queue(name: str = 'default', num_workers: int = 2) -> TaskQueue: + """Get or create a task queue. + + Args: + name: Queue name + num_workers: Number of workers if creating + + Returns: + TaskQueue instance + """ + queue = get_global_task_queue(name) + if queue is None: + queue = TaskQueue(num_workers=num_workers) + queue.start() + set_global_task_queue(name, queue) + return queue diff --git a/uf/caching.py b/uf/caching.py new file mode 100644 index 0000000..c871afc --- /dev/null +++ b/uf/caching.py @@ -0,0 +1,472 @@ +"""Result caching for uf. + +Provides caching decorators and backends to cache function results, +improving performance for expensive operations. +""" + +from typing import Callable, Any, Optional, Hashable +from functools import wraps +from datetime import datetime, timedelta +import json +import hashlib +import pickle + + +class CacheBackend: + """Base class for cache backends.""" + + def get(self, key: str) -> Optional[Any]: + """Get a value from cache. + + Args: + key: Cache key + + Returns: + Cached value or None if not found/expired + """ + raise NotImplementedError + + def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None: + """Set a value in cache. + + Args: + key: Cache key + value: Value to cache + ttl: Time to live in seconds + """ + raise NotImplementedError + + def delete(self, key: str) -> bool: + """Delete a key from cache. + + Args: + key: Cache key + + Returns: + True if deleted + """ + raise NotImplementedError + + def clear(self) -> None: + """Clear all cache entries.""" + raise NotImplementedError + + def exists(self, key: str) -> bool: + """Check if key exists in cache. + + Args: + key: Cache key + + Returns: + True if exists and not expired + """ + return self.get(key) is not None + + +class MemoryCache(CacheBackend): + """In-memory cache backend. + + Simple dictionary-based caching suitable for single-process applications. + + Example: + >>> cache = MemoryCache(default_ttl=3600) + >>> cache.set('key', 'value', ttl=60) + >>> value = cache.get('key') + """ + + def __init__(self, default_ttl: int = 3600, max_size: int = 1000): + """Initialize memory cache. + + Args: + default_ttl: Default TTL in seconds + max_size: Maximum number of entries + """ + self.default_ttl = default_ttl + self.max_size = max_size + self._cache: dict[str, dict] = {} + + def get(self, key: str) -> Optional[Any]: + """Get a value from cache.""" + entry = self._cache.get(key) + if not entry: + return None + + # Check expiration + if entry['expires_at'] and datetime.now() > entry['expires_at']: + del self._cache[key] + return None + + return entry['value'] + + def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None: + """Set a value in cache.""" + if ttl is None: + ttl = self.default_ttl + + expires_at = None + if ttl > 0: + expires_at = datetime.now() + timedelta(seconds=ttl) + + self._cache[key] = { + 'value': value, + 'expires_at': expires_at, + 'created_at': datetime.now(), + } + + # Evict oldest entries if over max size + if len(self._cache) > self.max_size: + self._evict_oldest() + + def delete(self, key: str) -> bool: + """Delete a key from cache.""" + if key in self._cache: + del self._cache[key] + return True + return False + + def clear(self) -> None: + """Clear all cache entries.""" + self._cache.clear() + + def _evict_oldest(self) -> None: + """Evict oldest entries to fit max size.""" + # Sort by created_at and remove oldest 10% + sorted_keys = sorted( + self._cache.keys(), + key=lambda k: self._cache[k]['created_at'] + ) + + num_to_remove = max(1, len(self._cache) // 10) + for key in sorted_keys[:num_to_remove]: + del self._cache[key] + + def cleanup_expired(self) -> int: + """Remove expired entries. + + Returns: + Number of entries removed + """ + now = datetime.now() + expired = [ + key for key, entry in self._cache.items() + if entry['expires_at'] and now > entry['expires_at'] + ] + + for key in expired: + del self._cache[key] + + return len(expired) + + def stats(self) -> dict: + """Get cache statistics. + + Returns: + Dictionary with cache stats + """ + total = len(self._cache) + now = datetime.now() + expired = sum( + 1 for entry in self._cache.values() + if entry['expires_at'] and now > entry['expires_at'] + ) + + return { + 'total_entries': total, + 'active_entries': total - expired, + 'expired_entries': expired, + 'max_size': self.max_size, + 'utilization': total / self.max_size if self.max_size > 0 else 0, + } + + +class DiskCache(CacheBackend): + """Disk-based cache backend using pickle. + + Persists cache to disk, suitable for larger datasets or persistence + across restarts. + + Example: + >>> cache = DiskCache(cache_dir='/tmp/uf_cache') + >>> cache.set('expensive_result', big_data) + """ + + def __init__(self, cache_dir: str = '.uf_cache', default_ttl: int = 3600): + """Initialize disk cache. + + Args: + cache_dir: Directory to store cache files + default_ttl: Default TTL in seconds + """ + import os + self.cache_dir = cache_dir + self.default_ttl = default_ttl + + os.makedirs(cache_dir, exist_ok=True) + + def _get_path(self, key: str) -> str: + """Get file path for a key.""" + import os + # Hash the key to create valid filename + key_hash = hashlib.md5(key.encode()).hexdigest() + return os.path.join(self.cache_dir, f"{key_hash}.cache") + + def get(self, key: str) -> Optional[Any]: + """Get a value from cache.""" + import os + + path = self._get_path(key) + if not os.path.exists(path): + return None + + try: + with open(path, 'rb') as f: + entry = pickle.load(f) + + # Check expiration + if entry['expires_at'] and datetime.now() > entry['expires_at']: + os.remove(path) + return None + + return entry['value'] + except Exception: + return None + + def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None: + """Set a value in cache.""" + if ttl is None: + ttl = self.default_ttl + + expires_at = None + if ttl > 0: + expires_at = datetime.now() + timedelta(seconds=ttl) + + entry = { + 'value': value, + 'expires_at': expires_at, + 'created_at': datetime.now(), + } + + path = self._get_path(key) + with open(path, 'wb') as f: + pickle.dump(entry, f) + + def delete(self, key: str) -> bool: + """Delete a key from cache.""" + import os + + path = self._get_path(key) + if os.path.exists(path): + os.remove(path) + return True + return False + + def clear(self) -> None: + """Clear all cache entries.""" + import os + import glob + + for file_path in glob.glob(os.path.join(self.cache_dir, '*.cache')): + os.remove(file_path) + + +def make_cache_key(func_name: str, args: tuple, kwargs: dict) -> str: + """Create a cache key from function call. + + Args: + func_name: Function name + args: Positional arguments + kwargs: Keyword arguments + + Returns: + Cache key string + """ + # Create a deterministic key from arguments + key_data = { + 'func': func_name, + 'args': args, + 'kwargs': sorted(kwargs.items()), + } + + # Serialize to JSON for hashing + try: + key_str = json.dumps(key_data, sort_keys=True, default=str) + except (TypeError, ValueError): + # Fallback to string representation + key_str = f"{func_name}:{args}:{sorted(kwargs.items())}" + + # Hash for compact key + return hashlib.sha256(key_str.encode()).hexdigest() + + +def cached( + ttl: int = 3600, + backend: Optional[CacheBackend] = None, + key_func: Optional[Callable] = None, +): + """Decorator to cache function results. + + Args: + ttl: Time to live in seconds + backend: Cache backend (uses global MemoryCache if None) + key_func: Optional function to generate cache key + + Returns: + Decorator function + + Example: + >>> @cached(ttl=3600) + ... def expensive_calculation(x: int, y: int) -> int: + ... # Only runs once per unique (x, y) combination + ... return heavy_computation(x, y) + >>> + >>> result = expensive_calculation(10, 20) # Computes + >>> result2 = expensive_calculation(10, 20) # From cache + """ + if backend is None: + backend = get_global_cache_backend() + if backend is None: + backend = MemoryCache(default_ttl=ttl) + set_global_cache_backend(backend) + + def decorator(func: Callable) -> Callable: + @wraps(func) + def wrapper(*args, **kwargs): + # Generate cache key + if key_func: + cache_key = key_func(*args, **kwargs) + else: + cache_key = make_cache_key(func.__name__, args, kwargs) + + # Try to get from cache + cached_result = backend.get(cache_key) + if cached_result is not None: + return cached_result + + # Compute result + result = func(*args, **kwargs) + + # Store in cache + backend.set(cache_key, result, ttl=ttl) + + return result + + wrapper.__uf_cached__ = True + wrapper.__uf_cache_backend__ = backend + wrapper.__uf_cache_ttl__ = ttl + + # Add cache control methods + def clear_cache(): + """Clear all cached results for this function.""" + # This is a simplified version + # Full implementation would track keys per function + backend.clear() + + wrapper.clear_cache = clear_cache + + return wrapper + + return decorator + + +def cache_invalidate(cache_key: str, backend: Optional[CacheBackend] = None) -> bool: + """Invalidate a specific cache entry. + + Args: + cache_key: Cache key to invalidate + backend: Cache backend (uses global if None) + + Returns: + True if invalidated + """ + if backend is None: + backend = get_global_cache_backend() + + if backend: + return backend.delete(cache_key) + + return False + + +def cache_clear_all(backend: Optional[CacheBackend] = None) -> None: + """Clear all cache entries. + + Args: + backend: Cache backend (uses global if None) + """ + if backend is None: + backend = get_global_cache_backend() + + if backend: + backend.clear() + + +class CacheStats: + """Track cache hit/miss statistics.""" + + def __init__(self): + """Initialize cache stats.""" + self.hits = 0 + self.misses = 0 + self.sets = 0 + + def record_hit(self): + """Record a cache hit.""" + self.hits += 1 + + def record_miss(self): + """Record a cache miss.""" + self.misses += 1 + + def record_set(self): + """Record a cache set.""" + self.sets += 1 + + def hit_rate(self) -> float: + """Calculate hit rate. + + Returns: + Hit rate as a percentage (0-100) + """ + total = self.hits + self.misses + if total == 0: + return 0.0 + return (self.hits / total) * 100 + + def reset(self): + """Reset all statistics.""" + self.hits = 0 + self.misses = 0 + self.sets = 0 + + def to_dict(self) -> dict: + """Convert to dictionary. + + Returns: + Statistics dictionary + """ + return { + 'hits': self.hits, + 'misses': self.misses, + 'sets': self.sets, + 'hit_rate': self.hit_rate(), + 'total_requests': self.hits + self.misses, + } + + +# Global cache backend +_global_cache_backend: Optional[CacheBackend] = None + + +def set_global_cache_backend(backend: CacheBackend) -> None: + """Set the global cache backend.""" + global _global_cache_backend + _global_cache_backend = backend + + +def get_global_cache_backend() -> Optional[CacheBackend]: + """Get the global cache backend.""" + return _global_cache_backend + + +# Initialize default global backend +set_global_cache_backend(MemoryCache()) diff --git a/uf/openapi.py b/uf/openapi.py new file mode 100644 index 0000000..c16e2ee --- /dev/null +++ b/uf/openapi.py @@ -0,0 +1,326 @@ +"""OpenAPI/Swagger integration for uf. + +Automatically generates OpenAPI specifications and provides Swagger UI +for API documentation and testing. +""" + +from typing import Callable, Any, Optional +import inspect + + +def function_to_openapi_operation(func: Callable, path: str = None) -> dict: + """Convert a function to OpenAPI operation spec. + + Args: + func: Function to convert + path: Optional API path + + Returns: + OpenAPI operation dictionary + """ + sig = inspect.signature(func) + doc = inspect.getdoc(func) or "" + + # Parse docstring for description + lines = doc.split('\n') + summary = lines[0] if lines else func.__name__ + description = '\n'.join(lines[1:]).strip() if len(lines) > 1 else summary + + # Build parameters from signature + parameters = [] + request_body = None + + type_map = { + int: 'integer', + float: 'number', + str: 'string', + bool: 'boolean', + list: 'array', + dict: 'object', + } + + for param_name, param in sig.parameters.items(): + param_type = 'string' # default + + if param.annotation != inspect.Parameter.empty: + py_type = param.annotation + # Handle Optional types + if hasattr(py_type, '__origin__'): + if py_type.__origin__ is type(Optional): + py_type = py_type.__args__[0] + + param_type = type_map.get(py_type, 'string') + + param_schema = { + 'type': param_type, + } + + # Check if required + required = param.default == inspect.Parameter.empty + + parameters.append({ + 'name': param_name, + 'in': 'query', + 'required': required, + 'schema': param_schema, + }) + + operation = { + 'summary': summary, + 'description': description, + 'parameters': parameters, + 'responses': { + '200': { + 'description': 'Successful response', + 'content': { + 'application/json': { + 'schema': {'type': 'object'} + } + } + }, + '400': { + 'description': 'Bad request' + }, + '500': { + 'description': 'Internal server error' + } + } + } + + # Add tags if function has group + if hasattr(func, '__uf_ui_config__'): + config = func.__uf_ui_config__ + if config.get('group'): + operation['tags'] = [config['group']] + + return operation + + +def generate_openapi_spec( + funcs: list[Callable], + title: str = "API", + version: str = "1.0.0", + description: str = "", + servers: Optional[list[dict]] = None, +) -> dict: + """Generate OpenAPI 3.0 specification. + + Args: + funcs: List of functions + title: API title + version: API version + description: API description + servers: Optional list of server configs + + Returns: + OpenAPI specification dictionary + """ + if servers is None: + servers = [{'url': '/'}] + + paths = {} + tags = set() + + for func in funcs: + func_name = func.__name__ + path = f'/{func_name}' + + operation = function_to_openapi_operation(func, path) + + # Collect tags + if 'tags' in operation: + tags.update(operation['tags']) + + paths[path] = { + 'post': operation # Use POST for form submissions + } + + spec = { + 'openapi': '3.0.0', + 'info': { + 'title': title, + 'version': version, + 'description': description, + }, + 'servers': servers, + 'paths': paths, + } + + # Add tags + if tags: + spec['tags'] = [{'name': tag} for tag in sorted(tags)] + + return spec + + +def swagger_ui_html(openapi_url: str = '/openapi.json') -> str: + """Generate Swagger UI HTML. + + Args: + openapi_url: URL to OpenAPI spec + + Returns: + HTML string for Swagger UI + """ + html = f""" + +
+ + +