From bc915c61e8eef202fc8f213516fcbc06e1344368 Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 13:50:13 +0100 Subject: [PATCH 001/122] feat: Complete Phase 2 - Users, Activity & Library Detail Views - Add PlaybackActivity model for detailed playback session tracking - Add activity API routes (/api/activity/) with list, stats, active sessions - Add users API routes (/api/users/) with list, detail, activity endpoints - Extend Emby sync with _sync_active_sessions() for real-time session tracking - Add Users.tsx page with search, pagination, user stats - Add UserDetail.tsx page with Overview/Activity tabs and time-based stats - Add Activity.tsx page with global activity log, active sessions, stats cards - Add LibraryDetail.tsx with Overview/Media/Activity tabs - Extend libraries API with /details, /media, /activity endpoints - Update navigation with Users and Activity menu items - Library cards now link to detail view Completes Phase 2 of PLANNED_FEATURES.md roadmap (Jellystat-style features) --- PLANNED_FEATURES.md | 17 +- backend/app/api/routes/activity.py | 267 ++++++++++++ backend/app/api/routes/libraries.py | 408 +++++++++++++++++- backend/app/api/routes/users.py | 295 +++++++++++++ backend/app/main.py | 4 +- backend/app/models/__init__.py | 2 +- backend/app/models/database.py | 55 +++ backend/app/services/sync.py | 122 +++++- frontend/src/App.tsx | 8 + frontend/src/components/Layout.tsx | 6 +- frontend/src/pages/Activity.tsx | 401 ++++++++++++++++++ frontend/src/pages/Libraries.tsx | 11 +- frontend/src/pages/LibraryDetail.tsx | 599 +++++++++++++++++++++++++++ frontend/src/pages/UserDetail.tsx | 425 +++++++++++++++++++ frontend/src/pages/Users.tsx | 273 ++++++++++++ 15 files changed, 2874 insertions(+), 19 deletions(-) create mode 100644 backend/app/api/routes/activity.py create mode 100644 backend/app/api/routes/users.py create mode 100644 frontend/src/pages/Activity.tsx create mode 100644 frontend/src/pages/LibraryDetail.tsx create mode 100644 frontend/src/pages/UserDetail.tsx create mode 100644 frontend/src/pages/Users.tsx diff --git a/PLANNED_FEATURES.md b/PLANNED_FEATURES.md index c708b16..615056e 100644 --- a/PLANNED_FEATURES.md +++ b/PLANNED_FEATURES.md @@ -256,17 +256,20 @@ Delete movie if: ## Implementation Priority -### Phase 1 - Foundation (Current) +### Phase 1 - Foundation ✅ - [x] MediaServerUser model - [x] UserWatchHistory model - [x] Basic user stats on Dashboard - [x] Library stats API - -### Phase 2 - Views & Navigation -- [ ] Library Detail View (Overview, Media, Activity tabs) -- [ ] Users Page (list with stats) -- [ ] User Detail View (Overview, Activity, Timeline tabs) -- [ ] Global Activity Log +- [x] PlaybackActivity model + +### Phase 2 - Views & Navigation ✅ +- [x] Library Detail View (Overview, Media, Activity tabs) +- [x] Users Page (list with stats) +- [x] User Detail View (Overview, Activity tabs) +- [x] Global Activity Log +- [x] Activity stats API (plays by day/hour/week) +- [x] Active sessions tracking ### Phase 3 - Statistics & Charts - [ ] Daily Play Count Chart diff --git a/backend/app/api/routes/activity.py b/backend/app/api/routes/activity.py new file mode 100644 index 0000000..de44404 --- /dev/null +++ b/backend/app/api/routes/activity.py @@ -0,0 +1,267 @@ +""" +Activity API routes - Global activity log and playback history. +""" +from fastapi import APIRouter, Depends, Query, HTTPException +from sqlalchemy import select, func, desc, and_, or_ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload +from typing import Optional +from datetime import datetime, timedelta + +from ...core.database import get_db +from ...api.deps import get_current_user +from ...models import ( + User, PlaybackActivity, MediaServerUser, MediaItem, Library +) + +router = APIRouter() + + +@router.get("/") +async def get_activities( + page: int = Query(1, ge=1), + page_size: int = Query(25, ge=1, le=100), + library_id: Optional[int] = None, + user_id: Optional[int] = None, + media_type: Optional[str] = None, + search: Optional[str] = None, + start_date: Optional[datetime] = None, + end_date: Optional[datetime] = None, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get paginated activity log with filtering options.""" + + # Base query + query = ( + select(PlaybackActivity) + .options( + selectinload(PlaybackActivity.user), + selectinload(PlaybackActivity.media_item), + selectinload(PlaybackActivity.library) + ) + .order_by(desc(PlaybackActivity.started_at)) + ) + + # Apply filters + conditions = [] + + if library_id: + conditions.append(PlaybackActivity.library_id == library_id) + + if user_id: + conditions.append(PlaybackActivity.user_id == user_id) + + if media_type: + # Join with media_item to filter by type + query = query.join(MediaItem, MediaItem.id == PlaybackActivity.media_item_id, isouter=True) + conditions.append(MediaItem.media_type == media_type) + + if search: + conditions.append( + or_( + PlaybackActivity.media_title.ilike(f"%{search}%"), + PlaybackActivity.client_name.ilike(f"%{search}%"), + PlaybackActivity.device_name.ilike(f"%{search}%") + ) + ) + + if start_date: + conditions.append(PlaybackActivity.started_at >= start_date) + + if end_date: + conditions.append(PlaybackActivity.started_at <= end_date) + + if conditions: + query = query.where(and_(*conditions)) + + # Count total + count_query = select(func.count()).select_from( + query.subquery() + ) + total_result = await db.execute(count_query) + total = total_result.scalar() or 0 + + # Paginate + offset = (page - 1) * page_size + query = query.offset(offset).limit(page_size) + + result = await db.execute(query) + activities = result.scalars().all() + + return { + "items": [ + { + "id": a.id, + "user": { + "id": a.user.id, + "name": a.user.name, + "is_admin": a.user.is_admin + } if a.user else None, + "media_title": a.media_title, + "media_item_id": a.media_item_id, + "media_type": a.media_item.media_type if a.media_item else None, + "library": { + "id": a.library.id, + "name": a.library.name + } if a.library else None, + "client_name": a.client_name, + "device_name": a.device_name, + "ip_address": a.ip_address, + "play_method": a.play_method, + "is_transcoding": a.is_transcoding, + "transcode_video": a.transcode_video, + "transcode_audio": a.transcode_audio, + "started_at": a.started_at.isoformat() if a.started_at else None, + "ended_at": a.ended_at.isoformat() if a.ended_at else None, + "duration_seconds": a.duration_seconds, + "played_percentage": a.played_percentage, + "is_active": a.is_active + } + for a in activities + ], + "total": total, + "page": page, + "page_size": page_size, + "total_pages": (total + page_size - 1) // page_size + } + + +@router.get("/stats") +async def get_activity_stats( + days: int = Query(30, ge=1, le=365), + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get activity statistics for the specified time period.""" + + since = datetime.utcnow() - timedelta(days=days) + + # Total plays + total_plays_result = await db.execute( + select(func.count(PlaybackActivity.id)) + .where(PlaybackActivity.started_at >= since) + ) + total_plays = total_plays_result.scalar() or 0 + + # Total watch time + total_time_result = await db.execute( + select(func.sum(PlaybackActivity.duration_seconds)) + .where(PlaybackActivity.started_at >= since) + ) + total_watch_seconds = total_time_result.scalar() or 0 + + # Unique users + unique_users_result = await db.execute( + select(func.count(func.distinct(PlaybackActivity.user_id))) + .where(PlaybackActivity.started_at >= since) + ) + unique_users = unique_users_result.scalar() or 0 + + # Active sessions (currently playing) + active_sessions_result = await db.execute( + select(func.count(PlaybackActivity.id)) + .where(PlaybackActivity.is_active == True) + ) + active_sessions = active_sessions_result.scalar() or 0 + + # Plays by day (for chart) + plays_by_day_result = await db.execute( + select( + func.date(PlaybackActivity.started_at).label('date'), + func.count(PlaybackActivity.id).label('plays'), + func.sum(PlaybackActivity.duration_seconds).label('duration') + ) + .where(PlaybackActivity.started_at >= since) + .group_by(func.date(PlaybackActivity.started_at)) + .order_by(func.date(PlaybackActivity.started_at)) + ) + plays_by_day = [ + { + "date": str(row.date), + "plays": row.plays, + "duration_seconds": row.duration or 0 + } + for row in plays_by_day_result + ] + + # Plays by hour (for chart) + plays_by_hour_result = await db.execute( + select( + func.extract('hour', PlaybackActivity.started_at).label('hour'), + func.count(PlaybackActivity.id).label('plays') + ) + .where(PlaybackActivity.started_at >= since) + .group_by(func.extract('hour', PlaybackActivity.started_at)) + .order_by(func.extract('hour', PlaybackActivity.started_at)) + ) + plays_by_hour = [ + {"hour": int(row.hour), "plays": row.plays} + for row in plays_by_hour_result + ] + + # Plays by day of week + plays_by_dow_result = await db.execute( + select( + func.extract('dow', PlaybackActivity.started_at).label('dow'), + func.count(PlaybackActivity.id).label('plays') + ) + .where(PlaybackActivity.started_at >= since) + .group_by(func.extract('dow', PlaybackActivity.started_at)) + .order_by(func.extract('dow', PlaybackActivity.started_at)) + ) + plays_by_dow = [ + {"day_of_week": int(row.dow), "plays": row.plays} + for row in plays_by_dow_result + ] + + return { + "period_days": days, + "total_plays": total_plays, + "total_watch_seconds": total_watch_seconds, + "unique_users": unique_users, + "active_sessions": active_sessions, + "plays_by_day": plays_by_day, + "plays_by_hour": plays_by_hour, + "plays_by_day_of_week": plays_by_dow + } + + +@router.get("/active") +async def get_active_sessions( + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get currently active playback sessions.""" + + result = await db.execute( + select(PlaybackActivity) + .options( + selectinload(PlaybackActivity.user), + selectinload(PlaybackActivity.media_item), + selectinload(PlaybackActivity.library) + ) + .where(PlaybackActivity.is_active == True) + .order_by(desc(PlaybackActivity.started_at)) + ) + sessions = result.scalars().all() + + return [ + { + "id": s.id, + "user": { + "id": s.user.id, + "name": s.user.name + } if s.user else None, + "media_title": s.media_title, + "media_type": s.media_item.media_type if s.media_item else None, + "library": s.library.name if s.library else None, + "client_name": s.client_name, + "device_name": s.device_name, + "play_method": s.play_method, + "is_transcoding": s.is_transcoding, + "started_at": s.started_at.isoformat() if s.started_at else None, + "played_percentage": s.played_percentage + } + for s in sessions + ] diff --git a/backend/app/api/routes/libraries.py b/backend/app/api/routes/libraries.py index ecf6a5c..554b884 100644 --- a/backend/app/api/routes/libraries.py +++ b/backend/app/api/routes/libraries.py @@ -1,15 +1,15 @@ """ Libraries API routes - auto-synced from Emby/Jellyfin. """ -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, HTTPException, status, Query from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy import select, and_, func -from typing import List, Dict, Any +from sqlalchemy import select, and_, func, or_ +from typing import List, Dict, Any, Optional from datetime import datetime, timedelta from loguru import logger from ...core.database import get_db -from ...models import Library, ServiceConnection, ServiceType, MediaType, MediaItem, UserWatchHistory +from ...models import Library, ServiceConnection, ServiceType, MediaType, MediaItem, UserWatchHistory, PlaybackActivity from ...schemas import LibraryUpdate, LibraryResponse, LibrarySyncResponse from ...services import EmbyClient from ..deps import get_current_user @@ -219,6 +219,406 @@ async def get_library( return library +@router.get("/{library_id}/details") +async def get_library_details( + library_id: int, + db: AsyncSession = Depends(get_db), + current_user = Depends(get_current_user) +) -> Dict[str, Any]: + """Get detailed statistics for a specific library (Jellystat-style).""" + result = await db.execute( + select(Library).where(Library.id == library_id) + ) + library = result.scalar_one_or_none() + if not library: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Library not found" + ) + + # Get service info + service_result = await db.execute( + select(ServiceConnection).where(ServiceConnection.id == library.service_connection_id) + ) + service = service_result.scalar_one_or_none() + + # Time periods + now = datetime.utcnow() + day_ago = now - timedelta(days=1) + week_ago = now - timedelta(days=7) + month_ago = now - timedelta(days=30) + + # Build base stats + if library.media_type == MediaType.MOVIE: + # Movie library + total_items = await db.execute( + select(func.count(MediaItem.id)).where( + and_(MediaItem.library_id == library.id, MediaItem.media_type == MediaType.MOVIE) + ) + ) + total_items_count = total_items.scalar() or 0 + + # Total size + total_size = await db.execute( + select(func.sum(MediaItem.size_bytes)).where( + and_(MediaItem.library_id == library.id, MediaItem.media_type == MediaType.MOVIE) + ) + ) + total_size_bytes = int(total_size.scalar() or 0) + + # Total plays + total_plays = await db.execute( + select(func.sum(MediaItem.watch_count)).where( + and_(MediaItem.library_id == library.id, MediaItem.media_type == MediaType.MOVIE) + ) + ) + total_plays_count = int(total_plays.scalar() or 0) + + item_breakdown = { + "movies": total_items_count, + "series": 0, + "seasons": 0, + "episodes": 0 + } + else: + # Series library + series_count = await db.execute( + select(func.count(MediaItem.id)).where( + and_(MediaItem.library_id == library.id, MediaItem.media_type == MediaType.SERIES) + ) + ) + series_count_val = series_count.scalar() or 0 + + season_count = await db.execute( + select(func.count(MediaItem.id)).where( + and_(MediaItem.library_id == library.id, MediaItem.media_type == MediaType.SEASON) + ) + ) + season_count_val = season_count.scalar() or 0 + + episode_count = await db.execute( + select(func.count(MediaItem.id)).where( + and_(MediaItem.library_id == library.id, MediaItem.media_type == MediaType.EPISODE) + ) + ) + total_items_count = episode_count.scalar() or 0 + + # Total size + total_size = await db.execute( + select(func.sum(MediaItem.size_bytes)).where(MediaItem.library_id == library.id) + ) + total_size_bytes = int(total_size.scalar() or 0) + + # Total plays (from episodes) + total_plays = await db.execute( + select(func.sum(MediaItem.watch_count)).where( + and_(MediaItem.library_id == library.id, MediaItem.media_type == MediaType.EPISODE) + ) + ) + total_plays_count = int(total_plays.scalar() or 0) + + item_breakdown = { + "movies": 0, + "series": series_count_val, + "seasons": season_count_val, + "episodes": total_items_count + } + + # Activity stats by time period + plays_24h = await db.execute( + select(func.count(PlaybackActivity.id)).where( + and_( + PlaybackActivity.library_id == library.id, + PlaybackActivity.started_at >= day_ago + ) + ) + ) + plays_7d = await db.execute( + select(func.count(PlaybackActivity.id)).where( + and_( + PlaybackActivity.library_id == library.id, + PlaybackActivity.started_at >= week_ago + ) + ) + ) + plays_30d = await db.execute( + select(func.count(PlaybackActivity.id)).where( + and_( + PlaybackActivity.library_id == library.id, + PlaybackActivity.started_at >= month_ago + ) + ) + ) + + # Watch time by period + watch_time_24h = await db.execute( + select(func.coalesce(func.sum(PlaybackActivity.duration_seconds), 0)).where( + and_( + PlaybackActivity.library_id == library.id, + PlaybackActivity.started_at >= day_ago + ) + ) + ) + watch_time_7d = await db.execute( + select(func.coalesce(func.sum(PlaybackActivity.duration_seconds), 0)).where( + and_( + PlaybackActivity.library_id == library.id, + PlaybackActivity.started_at >= week_ago + ) + ) + ) + watch_time_30d = await db.execute( + select(func.coalesce(func.sum(PlaybackActivity.duration_seconds), 0)).where( + and_( + PlaybackActivity.library_id == library.id, + PlaybackActivity.started_at >= month_ago + ) + ) + ) + + # Top users for this library (last 30 days) + top_users = await db.execute( + select( + PlaybackActivity.user_id, + func.count(PlaybackActivity.id).label("plays"), + func.sum(PlaybackActivity.duration_seconds).label("watch_time") + ) + .where( + and_( + PlaybackActivity.library_id == library.id, + PlaybackActivity.started_at >= month_ago + ) + ) + .group_by(PlaybackActivity.user_id) + .order_by(func.count(PlaybackActivity.id).desc()) + .limit(5) + ) + top_users_list = [ + { + "user_id": row.user_id, + "plays": row.plays, + "watch_time_seconds": int(row.watch_time or 0) + } + for row in top_users.all() + ] + + # Recently watched items + recently_watched = await db.execute( + select(MediaItem) + .where( + and_( + MediaItem.library_id == library.id, + MediaItem.last_watched_at.isnot(None) + ) + ) + .order_by(MediaItem.last_watched_at.desc()) + .limit(10) + ) + recently_watched_list = [ + { + "id": item.id, + "title": item.title, + "media_type": item.media_type.value, + "last_watched_at": item.last_watched_at.isoformat() if item.last_watched_at else None, + "watch_count": item.watch_count + } + for item in recently_watched.scalars().all() + ] + + # Active sessions in this library + active_sessions = await db.execute( + select(func.count(PlaybackActivity.id)).where( + and_( + PlaybackActivity.library_id == library.id, + PlaybackActivity.is_active == True + ) + ) + ) + + return { + "id": library.id, + "name": library.name, + "type": "Movies" if library.media_type == MediaType.MOVIE else "Series", + "media_type": library.media_type.value, + "is_enabled": library.is_enabled, + "path": library.path, + "service_name": service.name if service else None, + "external_id": library.external_id, + "last_synced_at": library.last_synced_at.isoformat() if library.last_synced_at else None, + + # Item counts + "total_items": total_items_count, + "total_size_bytes": total_size_bytes, + "total_plays": total_plays_count, + "item_breakdown": item_breakdown, + + # Time-based stats + "stats": { + "plays_24h": plays_24h.scalar() or 0, + "plays_7d": plays_7d.scalar() or 0, + "plays_30d": plays_30d.scalar() or 0, + "watch_time_24h": int(watch_time_24h.scalar() or 0), + "watch_time_7d": int(watch_time_7d.scalar() or 0), + "watch_time_30d": int(watch_time_30d.scalar() or 0) + }, + + # Top users + "top_users": top_users_list, + + # Recently watched + "recently_watched": recently_watched_list, + + # Active sessions + "active_sessions": active_sessions.scalar() or 0 + } + + +@router.get("/{library_id}/media") +async def get_library_media( + library_id: int, + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), + search: Optional[str] = None, + sort_by: str = Query("title", regex="^(title|added_at|last_watched_at|watch_count|size_bytes)$"), + sort_order: str = Query("asc", regex="^(asc|desc)$"), + media_type: Optional[str] = None, + db: AsyncSession = Depends(get_db), + current_user = Depends(get_current_user) +) -> Dict[str, Any]: + """Get paginated media items for a library.""" + result = await db.execute( + select(Library).where(Library.id == library_id) + ) + library = result.scalar_one_or_none() + if not library: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Library not found" + ) + + # Build query + query = select(MediaItem).where(MediaItem.library_id == library_id) + count_query = select(func.count(MediaItem.id)).where(MediaItem.library_id == library_id) + + # Filter by media type (for Series library, show only series or episodes) + if media_type: + media_type_enum = MediaType(media_type) + query = query.where(MediaItem.media_type == media_type_enum) + count_query = count_query.where(MediaItem.media_type == media_type_enum) + elif library.media_type == MediaType.SERIES: + # By default show only series (not seasons/episodes) + query = query.where(MediaItem.media_type == MediaType.SERIES) + count_query = count_query.where(MediaItem.media_type == MediaType.SERIES) + + # Search filter + if search: + search_pattern = f"%{search}%" + query = query.where(MediaItem.title.ilike(search_pattern)) + count_query = count_query.where(MediaItem.title.ilike(search_pattern)) + + # Get total count + total_result = await db.execute(count_query) + total = total_result.scalar() or 0 + + # Sorting + sort_column = getattr(MediaItem, sort_by, MediaItem.title) + if sort_order == "desc": + query = query.order_by(sort_column.desc().nullslast()) + else: + query = query.order_by(sort_column.asc().nullsfirst()) + + # Pagination + offset = (page - 1) * page_size + query = query.offset(offset).limit(page_size) + + result = await db.execute(query) + items = result.scalars().all() + + return { + "items": [ + { + "id": item.id, + "title": item.title, + "media_type": item.media_type.value, + "external_id": item.external_id, + "added_at": item.added_at.isoformat() if item.added_at else None, + "last_watched_at": item.last_watched_at.isoformat() if item.last_watched_at else None, + "watch_count": item.watch_count, + "size_bytes": item.size_bytes, + "year": item.year + } + for item in items + ], + "total": total, + "page": page, + "page_size": page_size, + "total_pages": (total + page_size - 1) // page_size if total > 0 else 1 + } + + +@router.get("/{library_id}/activity") +async def get_library_activity( + library_id: int, + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), + db: AsyncSession = Depends(get_db), + current_user = Depends(get_current_user) +) -> Dict[str, Any]: + """Get playback activity for a specific library.""" + result = await db.execute( + select(Library).where(Library.id == library_id) + ) + library = result.scalar_one_or_none() + if not library: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Library not found" + ) + + # Get total count + count_query = select(func.count(PlaybackActivity.id)).where( + PlaybackActivity.library_id == library_id + ) + total_result = await db.execute(count_query) + total = total_result.scalar() or 0 + + # Get activity + offset = (page - 1) * page_size + query = ( + select(PlaybackActivity) + .where(PlaybackActivity.library_id == library_id) + .order_by(PlaybackActivity.started_at.desc()) + .offset(offset) + .limit(page_size) + ) + result = await db.execute(query) + activities = result.scalars().all() + + return { + "items": [ + { + "id": activity.id, + "user_id": activity.user_id, + "media_title": activity.media_title, + "client_name": activity.client_name, + "device_name": activity.device_name, + "play_method": activity.play_method, + "is_transcoding": activity.is_transcoding, + "started_at": activity.started_at.isoformat() if activity.started_at else None, + "ended_at": activity.ended_at.isoformat() if activity.ended_at else None, + "duration_seconds": activity.duration_seconds, + "played_percentage": activity.played_percentage, + "is_active": activity.is_active + } + for activity in activities + ], + "total": total, + "page": page, + "page_size": page_size, + "total_pages": (total + page_size - 1) // page_size if total > 0 else 1 + } + + @router.patch("/{library_id}", response_model=LibraryResponse) async def update_library( library_id: int, diff --git a/backend/app/api/routes/users.py b/backend/app/api/routes/users.py new file mode 100644 index 0000000..a9de6a5 --- /dev/null +++ b/backend/app/api/routes/users.py @@ -0,0 +1,295 @@ +""" +Users API routes - Media server users and their statistics. +""" +from fastapi import APIRouter, Depends, Query, HTTPException +from sqlalchemy import select, func, desc, and_ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload +from typing import Optional +from datetime import datetime, timedelta + +from ...core.database import get_db +from ...api.deps import get_current_user +from ...models import ( + User, MediaServerUser, UserWatchHistory, PlaybackActivity, MediaItem +) + +router = APIRouter() + + +@router.get("/") +async def get_users( + page: int = Query(1, ge=1), + page_size: int = Query(25, ge=1, le=100), + search: Optional[str] = None, + include_hidden: bool = False, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get paginated list of media server users with statistics.""" + + # Base query + query = select(MediaServerUser).order_by(desc(MediaServerUser.last_activity_at)) + + # Apply filters + conditions = [] + + if not include_hidden: + conditions.append(MediaServerUser.is_hidden == False) + + if search: + conditions.append(MediaServerUser.name.ilike(f"%{search}%")) + + if conditions: + query = query.where(and_(*conditions)) + + # Count total + count_query = select(func.count()).select_from(query.subquery()) + total_result = await db.execute(count_query) + total = total_result.scalar() or 0 + + # Paginate + offset = (page - 1) * page_size + query = query.offset(offset).limit(page_size) + + result = await db.execute(query) + users = result.scalars().all() + + # Get last watched for each user + user_data = [] + for u in users: + # Get last activity + last_activity_result = await db.execute( + select(PlaybackActivity) + .where(PlaybackActivity.user_id == u.id) + .order_by(desc(PlaybackActivity.started_at)) + .limit(1) + ) + last_activity = last_activity_result.scalar_one_or_none() + + user_data.append({ + "id": u.id, + "external_id": u.external_id, + "name": u.name, + "is_admin": u.is_admin, + "is_hidden": u.is_hidden, + "total_plays": u.total_plays, + "total_watch_time_seconds": u.total_watch_time_seconds, + "last_activity_at": u.last_activity_at.isoformat() if u.last_activity_at else None, + "last_watched": { + "title": last_activity.media_title, + "client": last_activity.client_name, + "device": last_activity.device_name + } if last_activity else None + }) + + return { + "items": user_data, + "total": total, + "page": page, + "page_size": page_size, + "total_pages": (total + page_size - 1) // page_size + } + + +@router.get("/{user_id}") +async def get_user_detail( + user_id: int, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get detailed information about a specific user.""" + + result = await db.execute( + select(MediaServerUser) + .options(selectinload(MediaServerUser.service_connection)) + .where(MediaServerUser.id == user_id) + ) + user = result.scalar_one_or_none() + + if not user: + raise HTTPException(status_code=404, detail="User not found") + + # Get time-based stats + now = datetime.utcnow() + periods = { + "last_24h": now - timedelta(hours=24), + "last_7d": now - timedelta(days=7), + "last_30d": now - timedelta(days=30) + } + + stats = {} + for period_name, since in periods.items(): + plays_result = await db.execute( + select( + func.count(PlaybackActivity.id), + func.sum(PlaybackActivity.duration_seconds) + ) + .where( + and_( + PlaybackActivity.user_id == user_id, + PlaybackActivity.started_at >= since + ) + ) + ) + row = plays_result.one() + stats[period_name] = { + "plays": row[0] or 0, + "watch_seconds": row[1] or 0 + } + + # Get recently watched + recent_result = await db.execute( + select(PlaybackActivity) + .where(PlaybackActivity.user_id == user_id) + .order_by(desc(PlaybackActivity.started_at)) + .limit(10) + ) + recent_activities = recent_result.scalars().all() + + return { + "id": user.id, + "external_id": user.external_id, + "name": user.name, + "is_admin": user.is_admin, + "is_hidden": user.is_hidden, + "service": user.service_connection.name if user.service_connection else None, + "total_plays": user.total_plays, + "total_watch_time_seconds": user.total_watch_time_seconds, + "last_activity_at": user.last_activity_at.isoformat() if user.last_activity_at else None, + "created_at": user.created_at.isoformat() if user.created_at else None, + "stats": stats, + "recently_watched": [ + { + "id": a.id, + "media_title": a.media_title, + "client_name": a.client_name, + "device_name": a.device_name, + "started_at": a.started_at.isoformat() if a.started_at else None, + "duration_seconds": a.duration_seconds, + "played_percentage": a.played_percentage + } + for a in recent_activities + ] + } + + +@router.get("/{user_id}/activity") +async def get_user_activity( + user_id: int, + page: int = Query(1, ge=1), + page_size: int = Query(25, ge=1, le=100), + library_id: Optional[int] = None, + media_type: Optional[str] = None, + search: Optional[str] = None, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Get paginated activity history for a specific user.""" + + # Verify user exists + user_result = await db.execute( + select(MediaServerUser).where(MediaServerUser.id == user_id) + ) + if not user_result.scalar_one_or_none(): + raise HTTPException(status_code=404, detail="User not found") + + # Base query + query = ( + select(PlaybackActivity) + .options( + selectinload(PlaybackActivity.media_item), + selectinload(PlaybackActivity.library) + ) + .where(PlaybackActivity.user_id == user_id) + .order_by(desc(PlaybackActivity.started_at)) + ) + + # Apply filters + conditions = [PlaybackActivity.user_id == user_id] + + if library_id: + conditions.append(PlaybackActivity.library_id == library_id) + + if media_type: + query = query.join(MediaItem, MediaItem.id == PlaybackActivity.media_item_id, isouter=True) + conditions.append(MediaItem.media_type == media_type) + + if search: + conditions.append(PlaybackActivity.media_title.ilike(f"%{search}%")) + + query = query.where(and_(*conditions)) + + # Count total + count_query = select(func.count()).select_from(query.subquery()) + total_result = await db.execute(count_query) + total = total_result.scalar() or 0 + + # Paginate + offset = (page - 1) * page_size + query = query.offset(offset).limit(page_size) + + result = await db.execute(query) + activities = result.scalars().all() + + return { + "items": [ + { + "id": a.id, + "media_title": a.media_title, + "media_item_id": a.media_item_id, + "media_type": a.media_item.media_type if a.media_item else None, + "library": { + "id": a.library.id, + "name": a.library.name + } if a.library else None, + "client_name": a.client_name, + "device_name": a.device_name, + "ip_address": a.ip_address, + "play_method": a.play_method, + "is_transcoding": a.is_transcoding, + "transcode_video": a.transcode_video, + "transcode_audio": a.transcode_audio, + "started_at": a.started_at.isoformat() if a.started_at else None, + "ended_at": a.ended_at.isoformat() if a.ended_at else None, + "duration_seconds": a.duration_seconds, + "played_percentage": a.played_percentage + } + for a in activities + ], + "total": total, + "page": page, + "page_size": page_size, + "total_pages": (total + page_size - 1) // page_size + } + + +@router.patch("/{user_id}") +async def update_user( + user_id: int, + is_hidden: Optional[bool] = None, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """Update user settings (e.g., hide from statistics).""" + + result = await db.execute( + select(MediaServerUser).where(MediaServerUser.id == user_id) + ) + user = result.scalar_one_or_none() + + if not user: + raise HTTPException(status_code=404, detail="User not found") + + if is_hidden is not None: + user.is_hidden = is_hidden + + await db.commit() + await db.refresh(user) + + return { + "id": user.id, + "name": user.name, + "is_hidden": user.is_hidden, + "message": "User updated successfully" + } diff --git a/backend/app/main.py b/backend/app/main.py index 9362084..c1cbed8 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -13,7 +13,7 @@ from .core.config import get_settings from .core.database import init_db, close_db from .core.rate_limit import setup_rate_limiting, limiter, RateLimits -from .api.routes import auth, services, rules, libraries, notifications, system, jobs, media, staging, audit +from .api.routes import auth, services, rules, libraries, notifications, system, jobs, media, staging, audit, activity, users settings = get_settings() @@ -95,6 +95,8 @@ async def lifespan(app: FastAPI): app.include_router(jobs.router, prefix="/api") app.include_router(media.router, prefix="/api") app.include_router(staging.router, prefix="/api/staging", tags=["staging"]) +app.include_router(activity.router, prefix="/api/activity", tags=["activity"]) +app.include_router(users.router, prefix="/api/users", tags=["users"]) @app.get("/api/health") diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 9e26f2f..41c21c0 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -5,5 +5,5 @@ ServiceType, MediaType, RuleActionType, NotificationType, SeriesEvaluationMode, SeriesDeleteTarget, AuditActionType, JobExecutionLog, ImportStats, AuditLog, - MediaServerUser, UserWatchHistory + MediaServerUser, UserWatchHistory, PlaybackActivity ) diff --git a/backend/app/models/database.py b/backend/app/models/database.py index 3177d22..96d9386 100644 --- a/backend/app/models/database.py +++ b/backend/app/models/database.py @@ -506,6 +506,61 @@ class ImportStats(Base): service_connection = relationship("ServiceConnection", backref="import_stats") +class PlaybackActivity(Base): + """Detailed playback activity log for tracking what users watch, when, and how.""" + __tablename__ = "playback_activities" + + id = Column(Integer, primary_key=True, index=True) + + # User who watched + user_id = Column(Integer, ForeignKey("media_server_users.id", ondelete="CASCADE"), nullable=False, index=True) + + # What was watched (can be null if media item was deleted) + media_item_id = Column(Integer, ForeignKey("media_items.id", ondelete="SET NULL"), nullable=True, index=True) + media_title = Column(String(500), nullable=False) # Store title in case item is deleted + + # Library context + library_id = Column(Integer, ForeignKey("libraries.id", ondelete="SET NULL"), nullable=True, index=True) + + # Playback session details + session_id = Column(String(100), nullable=True) # External session ID from Emby + play_method = Column(String(50), nullable=True) # DirectPlay, DirectStream, Transcode + + # Client information + client_name = Column(String(100), nullable=True) # "Emby Web", "Emby for iOS", etc. + device_name = Column(String(200), nullable=True) # "iPhone", "Samsung Smart TV", etc. + device_id = Column(String(200), nullable=True) # Device unique ID + ip_address = Column(String(45), nullable=True) # IPv4 or IPv6 + + # Transcode information + is_transcoding = Column(Boolean, default=False) + transcode_video = Column(Boolean, default=False) + transcode_audio = Column(Boolean, default=False) + transcode_reason = Column(String(500), nullable=True) + + # Playback timing + started_at = Column(DateTime(timezone=True), nullable=False, index=True) + ended_at = Column(DateTime(timezone=True), nullable=True) + duration_seconds = Column(Integer, default=0) # How long they watched + + # Media position + position_ticks = Column(Integer, nullable=True) # Current position in ticks + runtime_ticks = Column(Integer, nullable=True) # Total runtime in ticks + played_percentage = Column(Float, default=0) # 0-100 + + # Is this an active/live session? + is_active = Column(Boolean, default=False, index=True) + + # Metadata + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) + + # Relationships + user = relationship("MediaServerUser", backref="playback_activities") + media_item = relationship("MediaItem", backref="playback_activities") + library = relationship("Library", backref="playback_activities") + + class MediaServerUser(Base): """User from Emby/Jellyfin media server for tracking watch statistics.""" __tablename__ = "media_server_users" diff --git a/backend/app/services/sync.py b/backend/app/services/sync.py index a3631fa..8ffc582 100644 --- a/backend/app/services/sync.py +++ b/backend/app/services/sync.py @@ -9,7 +9,7 @@ from sqlalchemy.orm import joinedload from loguru import logger -from ..models import ServiceConnection, MediaItem, ServiceType, MediaType, ImportStats, MediaServerUser, UserWatchHistory +from ..models import ServiceConnection, MediaItem, ServiceType, MediaType, ImportStats, MediaServerUser, UserWatchHistory, PlaybackActivity, Library from .sonarr import SonarrClient from .radarr import RadarrClient from .emby import EmbyClient @@ -561,6 +561,9 @@ def track_watch_data(path: str, user_data: dict, db_user_id: int): await db.commit() logger.info(f"Updated watch data for {updated} items from Emby (max values from {len(users)} users), saved {history_saved} watch history records") + # === SYNC ACTIVE SESSIONS === + await _sync_active_sessions(db, client, service, user_id_map, path_to_item) + finally: await client.close() @@ -571,3 +574,120 @@ def track_watch_data(path: str, user_data: dict, db_user_id: int): "updated": updated, "users_synced": users_synced } + + +async def _sync_active_sessions( + db: AsyncSession, + client: EmbyClient, + service: ServiceConnection, + user_id_map: Dict[str, int], + path_to_item: Dict[str, MediaItem] +): + """Sync active and recent playback sessions from Emby.""" + try: + # Get active sessions + sessions = await client.get_sessions() + + # Mark all existing active sessions as inactive first + result = await db.execute( + select(PlaybackActivity).where(PlaybackActivity.is_active == True) + ) + active_activities = result.scalars().all() + for activity in active_activities: + activity.is_active = False + if not activity.ended_at: + activity.ended_at = datetime.now(timezone.utc) + + active_count = 0 + + for session in sessions: + now_playing = session.get("NowPlayingItem") + if not now_playing: + continue + + emby_user_id = session.get("UserId") + db_user_id = user_id_map.get(emby_user_id) + + if not db_user_id: + continue + + # Get media item info + item_path = now_playing.get("Path") + media_item = path_to_item.get(item_path) if item_path else None + + # Get or create title + title = now_playing.get("Name", "Unknown") + series_name = now_playing.get("SeriesName") + if series_name: + season_num = now_playing.get("ParentIndexNumber", 0) + episode_num = now_playing.get("IndexNumber", 0) + title = f"{series_name} : S{season_num}E{episode_num} - {title}" + + # Determine play method + play_state = session.get("PlayState", {}) + transcode_info = session.get("TranscodingInfo", {}) + + if transcode_info: + play_method = "Transcode" + is_transcoding = True + transcode_video = transcode_info.get("IsVideoDirect", True) == False + transcode_audio = transcode_info.get("IsAudioDirect", True) == False + else: + play_method = "DirectPlay" if not session.get("TranscodingInfo") else "DirectStream" + is_transcoding = False + transcode_video = False + transcode_audio = False + + # Check if we have an existing active session for this user+item + session_id = session.get("Id") + + result = await db.execute( + select(PlaybackActivity).where( + PlaybackActivity.user_id == db_user_id, + PlaybackActivity.session_id == session_id + ) + ) + existing = result.scalar_one_or_none() + + position_ticks = play_state.get("PositionTicks", 0) + runtime_ticks = now_playing.get("RunTimeTicks", 1) or 1 + played_percentage = (position_ticks / runtime_ticks * 100) if runtime_ticks else 0 + + if existing: + # Update existing session + existing.is_active = True + existing.ended_at = None + existing.position_ticks = position_ticks + existing.played_percentage = played_percentage + existing.duration_seconds = int(position_ticks / 10_000_000) # Ticks to seconds + else: + # Create new session + activity = PlaybackActivity( + user_id=db_user_id, + media_item_id=media_item.id if media_item else None, + media_title=title, + library_id=media_item.library_id if media_item else None, + session_id=session_id, + play_method=play_method, + client_name=session.get("Client"), + device_name=session.get("DeviceName"), + device_id=session.get("DeviceId"), + ip_address=session.get("RemoteEndPoint"), + is_transcoding=is_transcoding, + transcode_video=transcode_video, + transcode_audio=transcode_audio, + started_at=datetime.now(timezone.utc), + position_ticks=position_ticks, + runtime_ticks=runtime_ticks, + played_percentage=played_percentage, + is_active=True + ) + db.add(activity) + + active_count += 1 + + await db.commit() + logger.info(f"Synced {active_count} active playback sessions") + + except Exception as e: + logger.warning(f"Failed to sync active sessions: {e}") diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 2365292..d93731a 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -8,12 +8,16 @@ import Dashboard from './pages/Dashboard' import Services from './pages/Services' import Rules from './pages/Rules' import Libraries from './pages/Libraries' +import LibraryDetail from './pages/LibraryDetail' import Notifications from './pages/Notifications' import Settings from './pages/Settings' import History from './pages/History' import Preview from './pages/Preview' import Jobs from './pages/Jobs' import Staging from './pages/Staging' +import Users from './pages/Users' +import UserDetail from './pages/UserDetail' +import Activity from './pages/Activity' function ProtectedRoute({ children }: { children: React.ReactNode }) { const { isAuthenticated } = useAuthStore() @@ -56,12 +60,16 @@ function App() { } /> } /> } /> + } /> } /> } /> } /> } /> } /> } /> + } /> + } /> + } /> } /> diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 882bde4..5fc4357 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -23,13 +23,17 @@ import { SunIcon, MoonIcon, ComputerDesktopIcon, + UsersIcon, + ChartBarIcon, } from '@heroicons/react/24/outline' const navigation = [ { name: 'Dashboard', href: '/', icon: HomeIcon }, + { name: 'Libraries', href: '/libraries', icon: FolderIcon }, + { name: 'Users', href: '/users', icon: UsersIcon }, + { name: 'Activity', href: '/activity', icon: ChartBarIcon }, { name: 'Services', href: '/services', icon: ServerStackIcon }, { name: 'Rules', href: '/rules', icon: ClipboardDocumentListIcon }, - { name: 'Libraries', href: '/libraries', icon: FolderIcon }, { name: 'Preview', href: '/preview', icon: EyeIcon }, { name: 'Staging', href: '/staging', icon: ArchiveBoxIcon }, { name: 'Jobs', href: '/jobs', icon: CpuChipIcon }, diff --git a/frontend/src/pages/Activity.tsx b/frontend/src/pages/Activity.tsx new file mode 100644 index 0000000..a3bd434 --- /dev/null +++ b/frontend/src/pages/Activity.tsx @@ -0,0 +1,401 @@ +import { useQuery } from '@tanstack/react-query' +import { useState } from 'react' +import { Link } from 'react-router-dom' +import { + PlayIcon, + MagnifyingGlassIcon, + FilmIcon, + TvIcon, + UserIcon, + FunnelIcon, + SignalIcon +} from '@heroicons/react/24/outline' +import api from '../lib/api' +import { formatRelativeTime } from '../lib/utils' + +interface ActivityItem { + id: number + user: { + id: number + name: string + is_admin: boolean + } | null + media_title: string + media_item_id: number | null + media_type: string | null + library: { id: number; name: string } | null + client_name: string + device_name: string + ip_address: string + play_method: string + is_transcoding: boolean + transcode_video: boolean + transcode_audio: boolean + started_at: string + ended_at: string | null + duration_seconds: number + played_percentage: number + is_active: boolean +} + +interface ActivityResponse { + items: ActivityItem[] + total: number + page: number + page_size: number + total_pages: number +} + +interface ActivityStats { + period_days: number + total_plays: number + total_watch_seconds: number + unique_users: number + active_sessions: number + plays_by_day: { date: string; plays: number; duration_seconds: number }[] + plays_by_hour: { hour: number; plays: number }[] + plays_by_day_of_week: { day_of_week: number; plays: number }[] +} + +function formatDuration(seconds: number): string { + if (!seconds || seconds === 0) return '0s' + + const hours = Math.floor(seconds / 3600) + const minutes = Math.floor((seconds % 3600) / 60) + const secs = seconds % 60 + + if (hours > 0) { + return `${hours}h ${minutes}m ${secs}s` + } else if (minutes > 0) { + return `${minutes}m ${secs}s` + } + return `${secs}s` +} + +function formatWatchTime(seconds: number): string { + if (!seconds || seconds === 0) return '0 Minutes' + + const hours = Math.floor(seconds / 3600) + const minutes = Math.floor((seconds % 3600) / 60) + + if (hours > 0) { + return `${hours} Hours ${minutes} Minutes` + } + return `${minutes} Minutes` +} + +export default function Activity() { + const [page, setPage] = useState(1) + const [pageSize] = useState(25) + const [search, setSearch] = useState('') + const [debouncedSearch, setDebouncedSearch] = useState('') + const [mediaTypeFilter, setMediaTypeFilter] = useState('') + + const handleSearchChange = (value: string) => { + setSearch(value) + setTimeout(() => { + setDebouncedSearch(value) + setPage(1) + }, 300) + } + + // Fetch activity stats + const { data: stats } = useQuery({ + queryKey: ['activityStats'], + queryFn: async () => { + const res = await api.get('/activity/stats?days=30') + return res.data + } + }) + + // Fetch active sessions + const { data: activeSessions } = useQuery({ + queryKey: ['activeSessions'], + queryFn: async () => { + const res = await api.get('/activity/active') + return res.data + }, + refetchInterval: 30000 // Refresh every 30 seconds + }) + + // Fetch activity list + const { data, isLoading } = useQuery({ + queryKey: ['activities', page, pageSize, debouncedSearch, mediaTypeFilter], + queryFn: async () => { + const params = new URLSearchParams({ + page: page.toString(), + page_size: pageSize.toString() + }) + if (debouncedSearch) params.append('search', debouncedSearch) + if (mediaTypeFilter) params.append('media_type', mediaTypeFilter) + + const res = await api.get(`/activity/?${params}`) + return res.data + } + }) + + return ( +
+ {/* Header */} +
+

Activity

+

+ Playback history and active sessions +

+
+ + {/* Stats Cards */} + {stats && ( +
+
+
+
+ +
+
+
+ {activeSessions?.length || 0} +
+
Now Playing
+
+
+
+ +
+
+
+ +
+
+
+ {stats.total_plays.toLocaleString()} +
+
Plays (30 days)
+
+
+
+ +
+
+
+ +
+
+
+ {stats.unique_users} +
+
Active Users
+
+
+
+ +
+
+ {formatWatchTime(stats.total_watch_seconds)} +
+
Total Watch Time
+
+
+ )} + + {/* Active Sessions */} + {activeSessions && activeSessions.length > 0 && ( +
+

+ + Now Playing + + ({activeSessions.length} active) + +

+
+ {activeSessions.map((session: any) => ( +
+
+

+ {session.media_title} +

+

+ {session.user?.name || 'Unknown'} • {session.client_name} +

+
+
+
+ {Math.round(session.played_percentage)}% +
+
+ {session.is_transcoding ? 'Transcode' : 'Direct'} +
+
+
+ ))} +
+
+ )} + + {/* Filters */} +
+
+ {/* Search */} +
+ + handleSearchChange(e.target.value)} + placeholder="Search by title, client, device..." + className="w-full pl-10 pr-4 py-2 bg-gray-50 dark:bg-dark-700 border border-gray-200 dark:border-dark-600 rounded-lg text-gray-900 dark:text-white placeholder-gray-400 focus:outline-none focus:ring-2 focus:ring-primary-500" + /> +
+ + {/* Type Filter */} + +
+
+ + {/* Activity Table */} +
+
+ + + + + + + + + + + + + {isLoading ? ( + [...Array(10)].map((_, i) => ( + + + + + + + + + )) + ) : data?.items.length === 0 ? ( + + + + ) : ( + data?.items.map((item) => ( + + + + + + + + + )) + )} + +
UserTitleClientTranscodeDateDuration
+ +

No activity found

+

+ Activity will appear after syncing with your media server +

+
+ {item.user ? ( + + {item.user.name} + + ) : ( + Unknown + )} + +
+ {item.media_type === 'movie' ? ( + + ) : ( + + )} + + {item.media_title} + + {item.is_active && ( + + LIVE + + )} +
+
+ {item.client_name} + {item.device_name} + + {item.is_transcoding ? ( + + Transcode + {item.transcode_video && item.transcode_audio ? ' (V+A)' : + item.transcode_video ? ' (Video)' : + item.transcode_audio ? ' (Audio)' : ''} + + ) : ( + {item.play_method || 'Direct'} + )} + + + {new Date(item.started_at).toLocaleString()} + + + + {formatDuration(item.duration_seconds)} + +
+
+ + {/* Pagination */} + {data && data.total_pages > 1 && ( +
+
+ Showing {((page - 1) * pageSize) + 1} to {Math.min(page * pageSize, data.total)} of {data.total} +
+
+ + + {page} of {data.total_pages} + + +
+
+ )} +
+
+ ) +} diff --git a/frontend/src/pages/Libraries.tsx b/frontend/src/pages/Libraries.tsx index 7dc8afa..5af0613 100644 --- a/frontend/src/pages/Libraries.tsx +++ b/frontend/src/pages/Libraries.tsx @@ -1,4 +1,5 @@ import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { Link } from 'react-router-dom' import { FolderIcon, ArrowPathIcon, @@ -8,7 +9,8 @@ import { FilmIcon, TvIcon, PlayIcon, - ClockIcon + ClockIcon, + ArrowTopRightOnSquareIcon } from '@heroicons/react/24/outline' import api from '../lib/api' import toast from 'react-hot-toast' @@ -78,17 +80,18 @@ function LibraryCard({ const isMovie = stat.media_type === 'movie' return ( -
+
{/* Header with gradient */}
-
+ {isMovie ? ( ) : ( )}

{stat.name}

-
+ + + ))} + +
+ + {/* Tab Content */} + {activeTab === 'overview' && ( +
+ {/* Stats Grid */} +
+
+
+ + Total Items +
+

{details.total_items.toLocaleString()}

+
+
+
+ + Total Plays +
+

{details.total_plays.toLocaleString()}

+
+
+
+ Size +
+

{formatBytes(details.total_size_bytes)}

+
+
+
+ Status +
+

+ {details.is_enabled ? 'Enabled' : 'Disabled'} +

+
+
+ + {/* Time-based Stats */} +
+

Activity Summary

+
+
+

Last 24 Hours

+

{details.stats.plays_24h} plays

+

{formatDuration(details.stats.watch_time_24h)}

+
+
+

Last 7 Days

+

{details.stats.plays_7d} plays

+

{formatDuration(details.stats.watch_time_7d)}

+
+
+

Last 30 Days

+

{details.stats.plays_30d} plays

+

{formatDuration(details.stats.watch_time_30d)}

+
+
+
+ + {/* Item Breakdown (for Series) */} + {details.media_type === 'series' && ( +
+

Content Breakdown

+
+
+

{details.item_breakdown.series}

+

Series

+
+
+

{details.item_breakdown.seasons}

+

Seasons

+
+
+

{details.item_breakdown.episodes}

+

Episodes

+
+
+
+ )} + + {/* Recently Watched */} + {details.recently_watched.length > 0 && ( +
+

Recently Watched

+
+ {details.recently_watched.map((item) => ( +
+
+ {item.media_type === 'movie' ? ( + + ) : ( + + )} + {item.title} +
+
+ {item.watch_count} plays + {formatRelativeTime(item.last_watched_at)} +
+
+ ))} +
+
+ )} + + {/* Library Info */} +
+

Library Information

+
+
+ Path + {details.path} +
+
+ External ID + {details.external_id} +
+
+ Last Synced + {formatDate(details.last_synced_at)} +
+
+
+
+ )} + + {activeTab === 'media' && ( +
+ {/* Search and Sort */} +
+
+ + { + setMediaSearch(e.target.value); + setMediaPage(1); + }} + className="w-full pl-10 pr-4 py-2 bg-dark-800 border border-dark-600 rounded-lg focus:ring-2 focus:ring-primary-500 focus:border-primary-500" + /> +
+ +
+ + {/* Media Table */} +
+
+ + + + + + + + + + + + {media.map((item) => ( + + + + + + + + ))} + +
TitleYearSizePlaysLast Watched
+
+ {item.media_type === 'movie' ? ( + + ) : ( + + )} + {item.title} +
+
{item.year || '-'}{formatBytes(item.size_bytes)}{item.watch_count} + {formatRelativeTime(item.last_watched_at)} +
+
+
+ + {/* Media Pagination */} + {mediaTotalPages > 1 && ( +
+ + + Page {mediaPage} of {mediaTotalPages} + + +
+ )} +
+ )} + + {activeTab === 'activity' && ( +
+ {/* Activity Table */} +
+
+ + + + + + + + + + + + + {activity.length === 0 ? ( + + + + ) : ( + activity.map((item) => ( + + + + + + + + + )) + )} + +
TitleUserClientPlayedDurationStatus
+ No activity recorded yet +
{item.media_title} + + {item.user_id.slice(0, 8)}... + + + {item.client_name || item.device_name || 'Unknown'} + + {formatRelativeTime(item.started_at)} + + {formatDuration(item.duration_seconds)} + {item.played_percentage > 0 && ( + + ({Math.round(item.played_percentage)}%) + + )} + + {item.is_active ? ( + + + Playing + + ) : ( + + {item.is_transcoding ? 'Transcoded' : 'Direct'} + + )} +
+
+
+ + {/* Activity Pagination */} + {activityTotalPages > 1 && ( +
+ + + Page {activityPage} of {activityTotalPages} + + +
+ )} +
+ )} +
+ ); +} diff --git a/frontend/src/pages/UserDetail.tsx b/frontend/src/pages/UserDetail.tsx new file mode 100644 index 0000000..0120c16 --- /dev/null +++ b/frontend/src/pages/UserDetail.tsx @@ -0,0 +1,425 @@ +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' +import { useState } from 'react' +import { useParams, Link } from 'react-router-dom' +import { + UserIcon, + ArrowLeftIcon, + PlayIcon, + ClockIcon, + ShieldCheckIcon, + EyeSlashIcon, + EyeIcon, + FilmIcon, + TvIcon +} from '@heroicons/react/24/outline' +import api from '../lib/api' +import { formatRelativeTime } from '../lib/utils' +import toast from 'react-hot-toast' + +interface UserStats { + plays: number + watch_seconds: number +} + +interface RecentActivity { + id: number + media_title: string + client_name: string + device_name: string + started_at: string + duration_seconds: number + played_percentage: number +} + +interface UserDetail { + id: number + external_id: string + name: string + is_admin: boolean + is_hidden: boolean + service: string | null + total_plays: number + total_watch_time_seconds: number + last_activity_at: string | null + created_at: string | null + stats: { + last_24h: UserStats + last_7d: UserStats + last_30d: UserStats + } + recently_watched: RecentActivity[] +} + +interface ActivityItem { + id: number + media_title: string + media_item_id: number | null + media_type: string | null + library: { id: number; name: string } | null + client_name: string + device_name: string + ip_address: string + play_method: string + is_transcoding: boolean + transcode_video: boolean + transcode_audio: boolean + started_at: string + ended_at: string | null + duration_seconds: number + played_percentage: number +} + +interface ActivityResponse { + items: ActivityItem[] + total: number + page: number + page_size: number + total_pages: number +} + +function formatDuration(seconds: number): string { + if (!seconds || seconds === 0) return '0s' + + const hours = Math.floor(seconds / 3600) + const minutes = Math.floor((seconds % 3600) / 60) + const secs = seconds % 60 + + if (hours > 0) { + return `${hours}h ${minutes}m ${secs}s` + } else if (minutes > 0) { + return `${minutes}m ${secs}s` + } + return `${secs}s` +} + +function formatWatchTime(seconds: number): string { + if (!seconds || seconds === 0) return '0 Minutes' + + const days = Math.floor(seconds / 86400) + const hours = Math.floor((seconds % 86400) / 3600) + const minutes = Math.floor((seconds % 3600) / 60) + + const parts = [] + if (days > 0) parts.push(`${days} Days`) + if (hours > 0) parts.push(`${hours} Hours`) + if (minutes > 0 || parts.length === 0) parts.push(`${minutes} Minutes`) + + return parts.join(' ') +} + +// Stats Box Component +function StatsBox({ title, stats }: { title: string; stats: UserStats }) { + return ( +
+

{title}

+
{stats.plays}
+
Plays
+
+ {formatWatchTime(stats.watch_seconds)} +
+
+ ) +} + +export default function UserDetail() { + const { userId } = useParams<{ userId: string }>() + const queryClient = useQueryClient() + const [activeTab, setActiveTab] = useState<'overview' | 'activity'>('overview') + const [activityPage, setActivityPage] = useState(1) + + const { data: user, isLoading } = useQuery({ + queryKey: ['mediaUser', userId], + queryFn: async () => { + const res = await api.get(`/users/${userId}`) + return res.data + }, + enabled: !!userId + }) + + const { data: activityData, isLoading: activityLoading } = useQuery({ + queryKey: ['userActivity', userId, activityPage], + queryFn: async () => { + const res = await api.get(`/users/${userId}/activity?page=${activityPage}&page_size=25`) + return res.data + }, + enabled: !!userId && activeTab === 'activity' + }) + + const toggleHiddenMutation = useMutation({ + mutationFn: async () => { + const res = await api.patch(`/users/${userId}`, { is_hidden: !user?.is_hidden }) + return res.data + }, + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ['mediaUser', userId] }) + queryClient.invalidateQueries({ queryKey: ['mediaUsers'] }) + toast.success(user?.is_hidden ? 'User is now visible' : 'User is now hidden') + } + }) + + if (isLoading) { + return ( +
+
+
+
+
+
+ ) + } + + if (!user) { + return ( +
+ +

User not found

+ + Back to Users + +
+ ) + } + + return ( +
+ {/* Header */} +
+ + + + +
+
+
+ +
+
+
+

{user.name}

+ {user.is_admin && ( + + )} + {user.is_hidden && ( + + )} +
+

+ {user.service || 'Unknown service'} +

+
+
+
+ + +
+ + {/* Tabs */} +
+ + +
+ + {/* Tab Content */} + {activeTab === 'overview' ? ( +
+ {/* User Stats */} +
+

User Stats

+
+ + + +
+

All Time

+
{user.total_plays}
+
Plays
+
+ {formatWatchTime(user.total_watch_time_seconds)} +
+
+
+
+ + {/* Recently Watched */} +
+

Last Watched

+ {user.recently_watched.length > 0 ? ( +
+ {user.recently_watched.map((item) => ( +
+
+

+ {item.media_title} +

+

+ {item.client_name} • {item.device_name} +

+
+
+

+ {formatDuration(item.duration_seconds)} +

+

+ {formatRelativeTime(item.started_at)} +

+
+
+ ))} +
+ ) : ( +

No watch history yet

+ )} +
+
+ ) : ( +
+
+ + + + + + + + + + + + {activityLoading ? ( + [...Array(5)].map((_, i) => ( + + + + + + + + )) + ) : activityData?.items.length === 0 ? ( + + + + ) : ( + activityData?.items.map((item) => ( + + + + + + + + )) + )} + +
TitleClientTranscodeDateDuration
+ No activity found +
+
+ {item.media_type === 'movie' ? ( + + ) : ( + + )} + + {item.media_title} + +
+
+ + {item.client_name} + + + {item.device_name} + + + {item.is_transcoding ? ( + + Transcode + {item.transcode_video && item.transcode_audio ? ' (V+A)' : + item.transcode_video ? ' (Video)' : + item.transcode_audio ? ' (Audio)' : ''} + + ) : ( + + {item.play_method || 'Direct'} + + )} + + + {new Date(item.started_at).toLocaleString()} + + + + {formatDuration(item.duration_seconds)} + +
+
+ + {/* Pagination */} + {activityData && activityData.total_pages > 1 && ( +
+
+ Page {activityPage} of {activityData.total_pages} +
+
+ + +
+
+ )} +
+ )} +
+ ) +} diff --git a/frontend/src/pages/Users.tsx b/frontend/src/pages/Users.tsx new file mode 100644 index 0000000..a6c18bc --- /dev/null +++ b/frontend/src/pages/Users.tsx @@ -0,0 +1,273 @@ +import { useQuery } from '@tanstack/react-query' +import { useState } from 'react' +import { Link } from 'react-router-dom' +import { + UserIcon, + MagnifyingGlassIcon, + PlayIcon, + ClockIcon, + ShieldCheckIcon, + EyeSlashIcon +} from '@heroicons/react/24/outline' +import api from '../lib/api' +import { formatRelativeTime } from '../lib/utils' + +interface MediaServerUser { + id: number + external_id: string + name: string + is_admin: boolean + is_hidden: boolean + total_plays: number + total_watch_time_seconds: number + last_activity_at: string | null + last_watched: { + title: string + client: string + device: string + } | null +} + +interface UsersResponse { + items: MediaServerUser[] + total: number + page: number + page_size: number + total_pages: number +} + +// Format seconds to readable time +function formatWatchTime(seconds: number): string { + if (!seconds || seconds === 0) return '0 Minutes' + + const hours = Math.floor(seconds / 3600) + const minutes = Math.floor((seconds % 3600) / 60) + + if (hours > 0) { + return `${hours} Hours ${minutes} Minutes` + } + return `${minutes} Minutes` +} + +export default function Users() { + const [page, setPage] = useState(1) + const [pageSize] = useState(25) + const [search, setSearch] = useState('') + const [debouncedSearch, setDebouncedSearch] = useState('') + const [includeHidden, setIncludeHidden] = useState(false) + + // Debounce search + const handleSearchChange = (value: string) => { + setSearch(value) + setTimeout(() => { + setDebouncedSearch(value) + setPage(1) + }, 300) + } + + const { data, isLoading } = useQuery({ + queryKey: ['mediaUsers', page, pageSize, debouncedSearch, includeHidden], + queryFn: async () => { + const params = new URLSearchParams({ + page: page.toString(), + page_size: pageSize.toString(), + include_hidden: includeHidden.toString() + }) + if (debouncedSearch) { + params.append('search', debouncedSearch) + } + const res = await api.get(`/users/?${params}`) + return res.data + } + }) + + return ( +
+ {/* Header */} +
+
+

Users

+

+ Media server users and their watch statistics +

+
+
+ + {/* Filters */} +
+
+ {/* Search */} +
+ + handleSearchChange(e.target.value)} + placeholder="Search users..." + className="w-full pl-10 pr-4 py-2 bg-gray-50 dark:bg-dark-700 border border-gray-200 dark:border-dark-600 rounded-lg text-gray-900 dark:text-white placeholder-gray-400 dark:placeholder-dark-500 focus:outline-none focus:ring-2 focus:ring-primary-500" + /> +
+ + {/* Include Hidden Toggle */} + +
+
+ + {/* Users Table */} +
+
+ + + + + + + + + + + + + {isLoading ? ( + [...Array(5)].map((_, i) => ( + + + + + + + + + )) + ) : data?.items.length === 0 ? ( + + + + ) : ( + data?.items.map((user) => ( + + + + + + + + + )) + )} + +
+ User + + Last Watched + + Last Client + + Plays + + Watch Time + + Last Seen +
+ +

No users found

+

+ Users will appear after syncing with your media server +

+
+ +
+ +
+
+
+ + {user.name} + + {user.is_admin && ( + + )} + {user.is_hidden && ( + + )} +
+
+ +
+ + {user.last_watched?.title || 'Never'} + + + + {user.last_watched ? ( + <> + {user.last_watched.client} + {user.last_watched.device && ` - ${user.last_watched.device}`} + + ) : ( + 'N/A' + )} + + + + {user.total_plays.toLocaleString()} + + + + {formatWatchTime(user.total_watch_time_seconds)} + + + + {user.last_activity_at + ? formatRelativeTime(user.last_activity_at) + : 'Never' + } + +
+
+ + {/* Pagination */} + {data && data.total_pages > 1 && ( +
+
+ Showing {((page - 1) * pageSize) + 1} to {Math.min(page * pageSize, data.total)} of {data.total} +
+
+ + + {page} of {data.total_pages} + + +
+
+ )} +
+
+ ) +} From 372876caa5cfea3edb5d82fcfec883d90a8a804b Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 14:24:10 +0100 Subject: [PATCH 002/122] feat: Add PostgreSQL support and fix sync issues Database: - Add PostgreSQL support via POSTGRES_* environment variables - Add asyncpg driver to requirements - Create docker-compose.postgres.yml for PostgreSQL setup - Keep SQLite as default for simple deployments - Add connection pooling for PostgreSQL Bug Fixes: - Fix MediaItem.parent_id AttributeError by using series_id (external_id) - Add parent_id column to MediaItem model for future use - Fix Users API to fallback to UserWatchHistory when PlaybackActivity is empty - Fix deprecation warning: regex -> pattern in Query parameters - Remove 'Jellystat-style' comments from codebase Improvements: - Sync jobs now visible in Jobs view with execution logs - Manual sync shows progress/errors in Jobs history - Users page shows last watched from UserWatchHistory fallback --- .env.example | 17 +++++ backend/app/api/routes/libraries.py | 8 +- backend/app/api/routes/media.py | 21 ++++-- backend/app/api/routes/services.py | 46 +++++++++++- backend/app/api/routes/users.py | 111 ++++++++++++++++++++++------ backend/app/core/config.py | 18 ++++- backend/app/core/database.py | 42 +++++++++-- backend/app/models/database.py | 1 + backend/requirements.txt | 1 + docker-compose.postgres.yml | 81 ++++++++++++++++++++ frontend/src/pages/Dashboard.tsx | 8 +- frontend/src/pages/Libraries.tsx | 2 +- 12 files changed, 306 insertions(+), 50 deletions(-) create mode 100644 docker-compose.postgres.yml diff --git a/.env.example b/.env.example index 103b1ac..be5c2e0 100644 --- a/.env.example +++ b/.env.example @@ -14,3 +14,20 @@ MEDIA_PATH=/path/to/your/media # Optional: Debug mode # DEBUG=false + +# ═══════════════════════════════════════════════════════════════════ +# DATABASE CONFIGURATION +# ═══════════════════════════════════════════════════════════════════ +# By default, MediaCurator uses SQLite (no configuration needed). +# For larger installations with many users/activity, use PostgreSQL. + +# Option 1: SQLite (default, no configuration needed) +# DATABASE_URL=sqlite+aiosqlite:////app/config/mediacurator.db + +# Option 2: PostgreSQL (recommended for production with many users) +# Use docker-compose.postgres.yml instead of docker-compose.yml +# POSTGRES_HOST=postgres +# POSTGRES_PORT=5432 +# POSTGRES_USER=mediacurator +# POSTGRES_PASSWORD=your-secure-password +# POSTGRES_DB=mediacurator diff --git a/backend/app/api/routes/libraries.py b/backend/app/api/routes/libraries.py index 554b884..a7dcf2d 100644 --- a/backend/app/api/routes/libraries.py +++ b/backend/app/api/routes/libraries.py @@ -22,7 +22,7 @@ async def get_library_stats( db: AsyncSession = Depends(get_db), current_user = Depends(get_current_user) ) -> List[Dict[str, Any]]: - """Get detailed statistics for all libraries (Jellystat-style).""" + """Get detailed statistics for all libraries.""" result = await db.execute( select(Library).order_by(Library.media_type, Library.name) @@ -225,7 +225,7 @@ async def get_library_details( db: AsyncSession = Depends(get_db), current_user = Depends(get_current_user) ) -> Dict[str, Any]: - """Get detailed statistics for a specific library (Jellystat-style).""" + """Get detailed statistics for a specific library.""" result = await db.execute( select(Library).where(Library.id == library_id) ) @@ -479,8 +479,8 @@ async def get_library_media( page: int = Query(1, ge=1), page_size: int = Query(50, ge=1, le=100), search: Optional[str] = None, - sort_by: str = Query("title", regex="^(title|added_at|last_watched_at|watch_count|size_bytes)$"), - sort_order: str = Query("asc", regex="^(asc|desc)$"), + sort_by: str = Query("title", pattern="^(title|added_at|last_watched_at|watch_count|size_bytes)$"), + sort_order: str = Query("asc", pattern="^(asc|desc)$"), media_type: Optional[str] = None, db: AsyncSession = Depends(get_db), current_user = Depends(get_current_user) diff --git a/backend/app/api/routes/media.py b/backend/app/api/routes/media.py index e542afa..eb455ea 100644 --- a/backend/app/api/routes/media.py +++ b/backend/app/api/routes/media.py @@ -340,7 +340,7 @@ async def get_dashboard_stats( current_user = Depends(get_current_user) ): """ - Get comprehensive dashboard statistics similar to Jellystat. + Get comprehensive dashboard statistics. Includes: most viewed movies/series, library overview, watch trends. """ @@ -381,33 +381,38 @@ def format_item(item: MediaItem) -> dict: most_viewed_movies = [format_item(m) for m in most_viewed_movies_result.scalars().all()] # === MOST VIEWED SERIES (aggregate episode plays per series) === - # Get series with most total episode plays + # Get series with most total episode plays using series_id (external ID) series_plays_result = await db.execute( select( - MediaItem.parent_id, + MediaItem.series_id, func.sum(MediaItem.watch_count).label('total_plays') ) .where( and_( MediaItem.media_type == MediaType.EPISODE, - MediaItem.parent_id.isnot(None), + MediaItem.series_id.isnot(None), MediaItem.watch_count > 0 ) ) - .group_by(MediaItem.parent_id) + .group_by(MediaItem.series_id) .order_by(func.sum(MediaItem.watch_count).desc()) .limit(limit) ) series_plays = series_plays_result.all() - # Get the actual series info + # Get the actual series info using external_id most_viewed_series = [] for row in series_plays: - if row.parent_id: + if row.series_id: series_result = await db.execute( select(MediaItem) .options(joinedload(MediaItem.service_connection)) - .where(MediaItem.id == row.parent_id) + .where( + and_( + MediaItem.external_id == row.series_id, + MediaItem.media_type == MediaType.SERIES + ) + ) ) series = series_result.scalar_one_or_none() if series: diff --git a/backend/app/api/routes/services.py b/backend/app/api/routes/services.py index 2df3175..3af56a2 100644 --- a/backend/app/api/routes/services.py +++ b/backend/app/api/routes/services.py @@ -215,6 +215,8 @@ async def sync_service( ): """Sync media items from a service.""" from ...services.sync import sync_service_media + from ...models import JobExecutionLog + from datetime import timezone result = await db.execute( select(ServiceConnection).where(ServiceConnection.id == service_id) @@ -226,5 +228,45 @@ async def sync_service( detail="Service connection not found" ) - sync_result = await sync_service_media(db, service) - return sync_result + # Create execution log for visibility in Jobs view + start_time = datetime.now(timezone.utc) + execution_log = JobExecutionLog( + job_id=f"sync_service_{service_id}", + job_name=f"Sync: {service.name}", + status="running", + started_at=start_time + ) + db.add(execution_log) + await db.commit() + await db.refresh(execution_log) + + try: + sync_result = await sync_service_media(db, service) + + # Update execution log + end_time = datetime.now(timezone.utc) + execution_log.status = "success" if sync_result.get("success", True) else "error" + execution_log.completed_at = end_time + execution_log.duration_seconds = (end_time - start_time).total_seconds() + execution_log.details = { + "service_name": service.name, + "service_type": service.service_type.value, + "added": sync_result.get("added", 0), + "updated": sync_result.get("updated", 0), + "users_synced": sync_result.get("users_synced", 0) + } + if not sync_result.get("success", True): + execution_log.error_message = sync_result.get("message", "Unknown error") + + await db.commit() + return sync_result + + except Exception as e: + # Update execution log with error + end_time = datetime.now(timezone.utc) + execution_log.status = "error" + execution_log.completed_at = end_time + execution_log.duration_seconds = (end_time - start_time).total_seconds() + execution_log.error_message = str(e) + await db.commit() + raise diff --git a/backend/app/api/routes/users.py b/backend/app/api/routes/users.py index a9de6a5..d6ccdf1 100644 --- a/backend/app/api/routes/users.py +++ b/backend/app/api/routes/users.py @@ -4,7 +4,7 @@ from fastapi import APIRouter, Depends, Query, HTTPException from sqlalchemy import select, func, desc, and_ from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm import selectinload +from sqlalchemy.orm import selectinload, joinedload from typing import Optional from datetime import datetime, timedelta @@ -55,10 +55,10 @@ async def get_users( result = await db.execute(query) users = result.scalars().all() - # Get last watched for each user + # Get last watched for each user from UserWatchHistory (since PlaybackActivity may be empty) user_data = [] for u in users: - # Get last activity + # Try to get last activity from PlaybackActivity first last_activity_result = await db.execute( select(PlaybackActivity) .where(PlaybackActivity.user_id == u.id) @@ -67,6 +67,40 @@ async def get_users( ) last_activity = last_activity_result.scalar_one_or_none() + # If no PlaybackActivity, try UserWatchHistory + last_watched_info = None + last_client = None + + if last_activity: + last_watched_info = { + "title": last_activity.media_title, + "client": last_activity.client_name, + "device": last_activity.device_name + } + last_client = last_activity.client_name or last_activity.device_name + else: + # Fallback to UserWatchHistory + last_history_result = await db.execute( + select(UserWatchHistory) + .options(joinedload(UserWatchHistory.media_item)) + .where( + and_( + UserWatchHistory.user_id == u.id, + UserWatchHistory.last_played_at.isnot(None) + ) + ) + .order_by(desc(UserWatchHistory.last_played_at)) + .limit(1) + ) + last_history = last_history_result.scalar_one_or_none() + + if last_history and last_history.media_item: + last_watched_info = { + "title": last_history.media_item.title, + "client": None, + "device": None + } + user_data.append({ "id": u.id, "external_id": u.external_id, @@ -76,11 +110,8 @@ async def get_users( "total_plays": u.total_plays, "total_watch_time_seconds": u.total_watch_time_seconds, "last_activity_at": u.last_activity_at.isoformat() if u.last_activity_at else None, - "last_watched": { - "title": last_activity.media_title, - "client": last_activity.client_name, - "device": last_activity.device_name - } if last_activity else None + "last_watched": last_watched_info, + "last_client": last_client }) return { @@ -110,7 +141,7 @@ async def get_user_detail( if not user: raise HTTPException(status_code=404, detail="User not found") - # Get time-based stats + # Get time-based stats from PlaybackActivity now = datetime.utcnow() periods = { "last_24h": now - timedelta(hours=24), @@ -123,7 +154,7 @@ async def get_user_detail( plays_result = await db.execute( select( func.count(PlaybackActivity.id), - func.sum(PlaybackActivity.duration_seconds) + func.coalesce(func.sum(PlaybackActivity.duration_seconds), 0) ) .where( and_( @@ -135,10 +166,10 @@ async def get_user_detail( row = plays_result.one() stats[period_name] = { "plays": row[0] or 0, - "watch_seconds": row[1] or 0 + "watch_seconds": int(row[1] or 0) } - # Get recently watched + # Get recently watched from PlaybackActivity first recent_result = await db.execute( select(PlaybackActivity) .where(PlaybackActivity.user_id == user_id) @@ -147,6 +178,49 @@ async def get_user_detail( ) recent_activities = recent_result.scalars().all() + # If no PlaybackActivity, fallback to UserWatchHistory + recently_watched = [] + if recent_activities: + recently_watched = [ + { + "id": a.id, + "media_title": a.media_title, + "client_name": a.client_name, + "device_name": a.device_name, + "started_at": a.started_at.isoformat() if a.started_at else None, + "duration_seconds": a.duration_seconds, + "played_percentage": a.played_percentage + } + for a in recent_activities + ] + else: + # Fallback to UserWatchHistory + history_result = await db.execute( + select(UserWatchHistory) + .options(joinedload(UserWatchHistory.media_item)) + .where( + and_( + UserWatchHistory.user_id == user_id, + UserWatchHistory.last_played_at.isnot(None) + ) + ) + .order_by(desc(UserWatchHistory.last_played_at)) + .limit(10) + ) + history_items = history_result.scalars().all() + recently_watched = [ + { + "id": h.id, + "media_title": h.media_item.title if h.media_item else "Unknown", + "client_name": None, + "device_name": None, + "started_at": h.last_played_at.isoformat() if h.last_played_at else None, + "duration_seconds": 0, + "played_percentage": h.played_percentage or 0 + } + for h in history_items + ] + return { "id": user.id, "external_id": user.external_id, @@ -159,18 +233,7 @@ async def get_user_detail( "last_activity_at": user.last_activity_at.isoformat() if user.last_activity_at else None, "created_at": user.created_at.isoformat() if user.created_at else None, "stats": stats, - "recently_watched": [ - { - "id": a.id, - "media_title": a.media_title, - "client_name": a.client_name, - "device_name": a.device_name, - "started_at": a.started_at.isoformat() if a.started_at else None, - "duration_seconds": a.duration_seconds, - "played_percentage": a.played_percentage - } - for a in recent_activities - ] + "recently_watched": recently_watched } diff --git a/backend/app/core/config.py b/backend/app/core/config.py index bb0c5f3..5aeb176 100644 --- a/backend/app/core/config.py +++ b/backend/app/core/config.py @@ -21,9 +21,25 @@ class Settings(BaseSettings): host: str = "0.0.0.0" port: int = 8080 - # Database + # Database - supports SQLite (default) or PostgreSQL + # SQLite: sqlite+aiosqlite:////data/mediacurator.db + # PostgreSQL: postgresql+asyncpg://user:password@host:5432/dbname database_url: str = "sqlite+aiosqlite:////data/mediacurator.db" + # PostgreSQL specific settings (alternative to database_url) + postgres_host: Optional[str] = None + postgres_port: int = 5432 + postgres_user: Optional[str] = None + postgres_password: Optional[str] = None + postgres_db: Optional[str] = None + + @property + def effective_database_url(self) -> str: + """Get the effective database URL, preferring PostgreSQL if configured.""" + if self.postgres_host and self.postgres_user and self.postgres_password and self.postgres_db: + return f"postgresql+asyncpg://{self.postgres_user}:{self.postgres_password}@{self.postgres_host}:{self.postgres_port}/{self.postgres_db}" + return self.database_url + # Security secret_key: str = Field(default_factory=lambda: secrets.token_urlsafe(32)) algorithm: str = "HS256" diff --git a/backend/app/core/database.py b/backend/app/core/database.py index 7a5c87c..d4bd12f 100644 --- a/backend/app/core/database.py +++ b/backend/app/core/database.py @@ -1,18 +1,48 @@ """ Database configuration and session management. +Supports SQLite (default) and PostgreSQL. """ from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.pool import NullPool +from loguru import logger from .config import get_settings settings = get_settings() -# Create async engine -engine = create_async_engine( - settings.database_url, - echo=settings.debug, - future=True, -) +# Get effective database URL (PostgreSQL if configured, else SQLite) +database_url = settings.effective_database_url + +# Detect database type for connection pool settings +is_sqlite = database_url.startswith("sqlite") +is_postgres = database_url.startswith("postgresql") + +# Log database type +if is_postgres: + # Mask password in log + masked_url = database_url.replace(settings.postgres_password or "", "***") if settings.postgres_password else database_url + logger.info(f"Using PostgreSQL database: {masked_url}") +else: + logger.info(f"Using SQLite database: {database_url}") + +# Create async engine with appropriate settings +engine_kwargs = { + "echo": settings.debug, + "future": True, +} + +# PostgreSQL specific: use connection pooling +if is_postgres: + engine_kwargs.update({ + "pool_size": 5, + "max_overflow": 10, + "pool_pre_ping": True, # Check connection health + }) +else: + # SQLite: use NullPool to avoid threading issues + engine_kwargs["poolclass"] = NullPool + +engine = create_async_engine(database_url, **engine_kwargs) # Create async session factory async_session_maker = async_sessionmaker( diff --git a/backend/app/models/database.py b/backend/app/models/database.py index 96d9386..13bf16d 100644 --- a/backend/app/models/database.py +++ b/backend/app/models/database.py @@ -327,6 +327,7 @@ class MediaItem(Base): # Series specific series_id = Column(String(100), nullable=True) + parent_id = Column(Integer, ForeignKey("media_items.id"), nullable=True) # For episodes -> series/season season_number = Column(Integer, nullable=True) episode_number = Column(Integer, nullable=True) diff --git a/backend/requirements.txt b/backend/requirements.txt index 0e12c01..0673e6c 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -7,6 +7,7 @@ python-multipart>=0.0.18 sqlalchemy>=2.0.36 alembic>=1.14.0 aiosqlite>=0.20.0 +asyncpg>=0.30.0 # PostgreSQL async driver # Security python-jose[cryptography]>=3.3.0 diff --git a/docker-compose.postgres.yml b/docker-compose.postgres.yml new file mode 100644 index 0000000..d4c08ee --- /dev/null +++ b/docker-compose.postgres.yml @@ -0,0 +1,81 @@ +# Production Docker Compose Configuration with PostgreSQL +# For standard setup with SQLite, see docker-compose.yml +# For development, see docker-compose.dev.yml + +services: + mediacurator: + image: ghcr.io/serph91p/mediacurator:latest + container_name: mediacurator + restart: unless-stopped + user: "1000:1000" + + ports: + - "8080:8080" + + environment: + - TZ=${TZ:-Europe/Berlin} + - SECRET_KEY=${SECRET_KEY:-please-change-this-secret-key-in-production} + + # PostgreSQL Database Configuration + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - POSTGRES_USER=${POSTGRES_USER:-mediacurator} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-mediacurator} + - POSTGRES_DB=${POSTGRES_DB:-mediacurator} + + volumes: + - mediacurator_config:/app/config + - mediacurator_logs:/app/logs + - ${MEDIA_PATH:-/data}:/data:ro + + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/api/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s + + depends_on: + postgres: + condition: service_healthy + + deploy: + resources: + limits: + cpus: '2.0' + memory: 2G + reservations: + cpus: '0.5' + memory: 512M + + postgres: + image: postgres:16-alpine + container_name: mediacurator_postgres + restart: unless-stopped + + environment: + - POSTGRES_USER=${POSTGRES_USER:-mediacurator} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-mediacurator} + - POSTGRES_DB=${POSTGRES_DB:-mediacurator} + + volumes: + - postgres_data:/var/lib/postgresql/data + + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-mediacurator} -d ${POSTGRES_DB:-mediacurator}"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + + # Expose port only if needed for external access + # ports: + # - "5432:5432" + +volumes: + mediacurator_config: + driver: local + mediacurator_logs: + driver: local + postgres_data: + driver: local diff --git a/frontend/src/pages/Dashboard.tsx b/frontend/src/pages/Dashboard.tsx index 2c3e083..ccccd01 100644 --- a/frontend/src/pages/Dashboard.tsx +++ b/frontend/src/pages/Dashboard.tsx @@ -94,7 +94,7 @@ function DiskUsageBar({ ) } -// Top List Item Component (Jellystat-style) +// Top List Item Component function TopListItem({ rank, title, @@ -186,7 +186,7 @@ export default function Dashboard() { }, }) - // Dashboard stats (Jellystat-style with user tracking) + // Dashboard stats with user tracking const { data: dashboardStats } = useQuery({ queryKey: ['dashboardStats', statsDays], queryFn: async () => { @@ -270,7 +270,7 @@ export default function Dashboard() {
)} - {/* Watch Statistics Section (Jellystat-style) */} + {/* Watch Statistics Section */}

@@ -395,7 +395,7 @@ export default function Dashboard() {

- {/* Library Overview (Jellystat-style) */} + {/* Library Overview */}

diff --git a/frontend/src/pages/Libraries.tsx b/frontend/src/pages/Libraries.tsx index 5af0613..f5f016c 100644 --- a/frontend/src/pages/Libraries.tsx +++ b/frontend/src/pages/Libraries.tsx @@ -67,7 +67,7 @@ function formatDuration(seconds: number): string { return parts.join(' ') } -// Library Card Component (Jellystat-style) +// Library Card Component function LibraryCard({ stat, stagingSettings, From 0688ab905b6613dd59a871c90a40c46c08faac5b Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 14:40:59 +0100 Subject: [PATCH 003/122] chore: Update PostgreSQL to version 18 (latest stable) --- docker-compose.postgres.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.postgres.yml b/docker-compose.postgres.yml index d4c08ee..052d77c 100644 --- a/docker-compose.postgres.yml +++ b/docker-compose.postgres.yml @@ -49,7 +49,7 @@ services: memory: 512M postgres: - image: postgres:16-alpine + image: postgres:18-alpine container_name: mediacurator_postgres restart: unless-stopped From 8aae3d224daeec69f4c6f7d5314bed47d3486067 Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 15:03:27 +0100 Subject: [PATCH 004/122] fix: Add PostgreSQL support to database migrations - Add column_exists() helper for SQLite/PostgreSQL compatibility - Add table_exists() helper for SQLite/PostgreSQL compatibility - Use information_schema for PostgreSQL column/table checks - Use pragma_table_info for SQLite column checks - Add IF NOT EXISTS for PostgreSQL ALTER TABLE commands - Use SERIAL PRIMARY KEY for PostgreSQL auto-increment --- backend/app/core/migrations.py | 362 +++++++++++++++++---------------- 1 file changed, 187 insertions(+), 175 deletions(-) diff --git a/backend/app/core/migrations.py b/backend/app/core/migrations.py index 870b94a..854fd04 100644 --- a/backend/app/core/migrations.py +++ b/backend/app/core/migrations.py @@ -1,106 +1,139 @@ """ Database migrations for schema changes. +Supports both SQLite and PostgreSQL. """ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import text from loguru import logger +from .database import is_postgres + + +async def column_exists(db: AsyncSession, table_name: str, column_name: str) -> bool: + """Check if a column exists in a table. Works with both SQLite and PostgreSQL.""" + if is_postgres: + result = await db.execute(text(""" + SELECT COUNT(*) as count + FROM information_schema.columns + WHERE table_name = :table_name AND column_name = :column_name + """), {"table_name": table_name, "column_name": column_name}) + else: + result = await db.execute(text(f""" + SELECT COUNT(*) as count + FROM pragma_table_info('{table_name}') + WHERE name = :column_name + """), {"column_name": column_name}) + return result.scalar() > 0 + + +async def table_exists(db: AsyncSession, table_name: str) -> bool: + """Check if a table exists. Works with both SQLite and PostgreSQL.""" + if is_postgres: + result = await db.execute(text(""" + SELECT COUNT(*) as count + FROM information_schema.tables + WHERE table_name = :table_name AND table_schema = 'public' + """), {"table_name": table_name}) + else: + result = await db.execute(text(""" + SELECT COUNT(*) as count + FROM sqlite_master + WHERE type='table' AND name = :table_name + """), {"table_name": table_name}) + return result.scalar() > 0 async def migrate_database(db: AsyncSession): """Run all necessary database migrations.""" # Check if old media_type column exists (needs migration) - result = await db.execute(text(""" - SELECT COUNT(*) as count - FROM pragma_table_info('cleanup_rules') - WHERE name='media_type' - """)) - has_old_media_type = result.scalar() > 0 + has_old_media_type = await column_exists(db, 'cleanup_rules', 'media_type') + has_media_types = await column_exists(db, 'cleanup_rules', 'media_types') - if has_old_media_type: + if has_old_media_type and not has_media_types: logger.info("Migrating cleanup_rules: removing old media_type column and adding media_types") - # SQLite doesn't support DROP COLUMN directly, so we need to recreate the table - # Step 1: Create new table with correct schema - await db.execute(text(""" - CREATE TABLE cleanup_rules_new ( - id INTEGER PRIMARY KEY, - name VARCHAR(100) NOT NULL, - description TEXT, - is_enabled BOOLEAN DEFAULT 1, - priority INTEGER DEFAULT 0, - media_types TEXT NOT NULL DEFAULT '[]', - library_id INTEGER, - conditions TEXT NOT NULL DEFAULT '{}', - action VARCHAR(50) DEFAULT 'DELETE', - grace_period_days INTEGER DEFAULT 7, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP, - FOREIGN KEY (library_id) REFERENCES libraries(id) - ) - """)) - - # Step 2: Copy data from old table, converting media_type to media_types - await db.execute(text(""" - INSERT INTO cleanup_rules_new - (id, name, description, is_enabled, priority, media_types, library_id, - conditions, action, grace_period_days, created_at, updated_at) - SELECT - id, name, description, is_enabled, priority, - CASE - WHEN media_type = 'movie' THEN '["movie"]' - WHEN media_type = 'series' THEN '["series"]' - WHEN media_type = 'episode' THEN '["episode"]' - ELSE '["movie"]' - END as media_types, - library_id, conditions, action, grace_period_days, created_at, updated_at - FROM cleanup_rules - """)) - - # Step 3: Drop old table - await db.execute(text("DROP TABLE cleanup_rules")) - - # Step 4: Rename new table - await db.execute(text("ALTER TABLE cleanup_rules_new RENAME TO cleanup_rules")) + if is_postgres: + # PostgreSQL supports ADD COLUMN and DROP COLUMN + await db.execute(text(""" + ALTER TABLE cleanup_rules + ADD COLUMN IF NOT EXISTS media_types TEXT NOT NULL DEFAULT '[]' + """)) + + # Migrate data from old column + await db.execute(text(""" + UPDATE cleanup_rules SET media_types = + CASE + WHEN media_type = 'movie' THEN '["movie"]' + WHEN media_type = 'series' THEN '["series"]' + WHEN media_type = 'episode' THEN '["episode"]' + ELSE '["movie"]' + END + """)) + + # Drop old column + await db.execute(text("ALTER TABLE cleanup_rules DROP COLUMN IF EXISTS media_type")) + else: + # SQLite doesn't support DROP COLUMN directly, so we need to recreate the table + await db.execute(text(""" + CREATE TABLE cleanup_rules_new ( + id INTEGER PRIMARY KEY, + name VARCHAR(100) NOT NULL, + description TEXT, + is_enabled BOOLEAN DEFAULT 1, + priority INTEGER DEFAULT 0, + media_types TEXT NOT NULL DEFAULT '[]', + library_id INTEGER, + conditions TEXT NOT NULL DEFAULT '{}', + action VARCHAR(50) DEFAULT 'DELETE', + grace_period_days INTEGER DEFAULT 7, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP, + FOREIGN KEY (library_id) REFERENCES libraries(id) + ) + """)) + + await db.execute(text(""" + INSERT INTO cleanup_rules_new + (id, name, description, is_enabled, priority, media_types, library_id, + conditions, action, grace_period_days, created_at, updated_at) + SELECT + id, name, description, is_enabled, priority, + CASE + WHEN media_type = 'movie' THEN '["movie"]' + WHEN media_type = 'series' THEN '["series"]' + WHEN media_type = 'episode' THEN '["episode"]' + ELSE '["movie"]' + END as media_types, + library_id, conditions, action, grace_period_days, created_at, updated_at + FROM cleanup_rules + """)) + + await db.execute(text("DROP TABLE cleanup_rules")) + await db.execute(text("ALTER TABLE cleanup_rules_new RENAME TO cleanup_rules")) await db.commit() logger.info("Migration completed: media_type column removed, media_types added") # Check if staging columns exist in media_items - result = await db.execute(text(""" - SELECT COUNT(*) as count - FROM pragma_table_info('media_items') - WHERE name='is_staged' - """)) - has_staging = result.scalar() > 0 + has_staging = await column_exists(db, 'media_items', 'is_staged') if not has_staging: logger.info("Migrating media_items: adding staging columns") - await db.execute(text(""" - ALTER TABLE media_items - ADD COLUMN is_staged BOOLEAN DEFAULT 0 - """)) - await db.execute(text(""" - ALTER TABLE media_items - ADD COLUMN staged_at TIMESTAMP - """)) - await db.execute(text(""" - ALTER TABLE media_items - ADD COLUMN original_path VARCHAR(500) - """)) - await db.execute(text(""" - ALTER TABLE media_items - ADD COLUMN staged_path VARCHAR(500) - """)) - await db.execute(text(""" - ALTER TABLE media_items - ADD COLUMN permanent_delete_at TIMESTAMP - """)) - await db.execute(text(""" - ALTER TABLE media_items - ADD COLUMN staged_library_id VARCHAR(100) - """)) + if is_postgres: + await db.execute(text("ALTER TABLE media_items ADD COLUMN IF NOT EXISTS is_staged BOOLEAN DEFAULT FALSE")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN IF NOT EXISTS staged_at TIMESTAMP")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN IF NOT EXISTS original_path VARCHAR(500)")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN IF NOT EXISTS staged_path VARCHAR(500)")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN IF NOT EXISTS permanent_delete_at TIMESTAMP")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN IF NOT EXISTS staged_library_id VARCHAR(100)")) + else: + await db.execute(text("ALTER TABLE media_items ADD COLUMN is_staged BOOLEAN DEFAULT 0")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN staged_at TIMESTAMP")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN original_path VARCHAR(500)")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN staged_path VARCHAR(500)")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN permanent_delete_at TIMESTAMP")) + await db.execute(text("ALTER TABLE media_items ADD COLUMN staged_library_id VARCHAR(100)")) await db.commit() logger.info("Migration completed: staging columns added") @@ -146,41 +179,51 @@ async def migrate_database(db: AsyncSession): logger.info("Database indexes created successfully") # Check if refresh_tokens table exists - result = await db.execute(text(""" - SELECT COUNT(*) as count - FROM sqlite_master - WHERE type='table' AND name='refresh_tokens' - """)) - has_refresh_tokens = result.scalar() > 0 + has_refresh_tokens = await table_exists(db, 'refresh_tokens') if not has_refresh_tokens: logger.info("Creating refresh_tokens table for session management") - await db.execute(text(""" - CREATE TABLE refresh_tokens ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - token VARCHAR(255) NOT NULL UNIQUE, - user_id INTEGER NOT NULL, - expires_at TIMESTAMP NOT NULL, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - revoked_at TIMESTAMP, - device_info VARCHAR(255), - ip_address VARCHAR(45), - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE - ) - """)) + if is_postgres: + await db.execute(text(""" + CREATE TABLE refresh_tokens ( + id SERIAL PRIMARY KEY, + token VARCHAR(255) NOT NULL UNIQUE, + user_id INTEGER NOT NULL, + expires_at TIMESTAMP NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + revoked_at TIMESTAMP, + device_info VARCHAR(255), + ip_address VARCHAR(45), + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE + ) + """)) + else: + await db.execute(text(""" + CREATE TABLE refresh_tokens ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + token VARCHAR(255) NOT NULL UNIQUE, + user_id INTEGER NOT NULL, + expires_at TIMESTAMP NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + revoked_at TIMESTAMP, + device_info VARCHAR(255), + ip_address VARCHAR(45), + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE + ) + """)) # Create indexes for performance await db.execute(text(""" - CREATE INDEX idx_refresh_tokens_token + CREATE INDEX IF NOT EXISTS idx_refresh_tokens_token ON refresh_tokens(token) """)) await db.execute(text(""" - CREATE INDEX idx_refresh_tokens_user + CREATE INDEX IF NOT EXISTS idx_refresh_tokens_user ON refresh_tokens(user_id) """)) await db.execute(text(""" - CREATE INDEX idx_refresh_tokens_expires + CREATE INDEX IF NOT EXISTS idx_refresh_tokens_expires ON refresh_tokens(expires_at) """)) @@ -188,20 +231,21 @@ async def migrate_database(db: AsyncSession): logger.info("Migration completed: refresh_tokens table created") # Check if library_id column exists in media_items - result = await db.execute(text(""" - SELECT COUNT(*) as count - FROM pragma_table_info('media_items') - WHERE name='library_id' - """)) - has_library_id = result.scalar() > 0 + has_library_id = await column_exists(db, 'media_items', 'library_id') if not has_library_id: logger.info("Migrating media_items: adding library_id column") - await db.execute(text(""" - ALTER TABLE media_items - ADD COLUMN library_id INTEGER REFERENCES libraries(id) - """)) + if is_postgres: + await db.execute(text(""" + ALTER TABLE media_items + ADD COLUMN IF NOT EXISTS library_id INTEGER REFERENCES libraries(id) + """)) + else: + await db.execute(text(""" + ALTER TABLE media_items + ADD COLUMN library_id INTEGER REFERENCES libraries(id) + """)) # Create index for library_id await db.execute(text(""" @@ -213,101 +257,69 @@ async def migrate_database(db: AsyncSession): logger.info("Migration completed: library_id column added to media_items") # Check if staging columns exist in libraries table - result = await db.execute(text(""" - SELECT COUNT(*) as count - FROM pragma_table_info('libraries') - WHERE name='staging_enabled' - """)) - has_library_staging = result.scalar() > 0 + has_library_staging = await column_exists(db, 'libraries', 'staging_enabled') if not has_library_staging: logger.info("Migrating libraries: adding per-library staging columns") - await db.execute(text(""" - ALTER TABLE libraries - ADD COLUMN staging_enabled BOOLEAN DEFAULT NULL - """)) - await db.execute(text(""" - ALTER TABLE libraries - ADD COLUMN staging_path VARCHAR(500) - """)) - await db.execute(text(""" - ALTER TABLE libraries - ADD COLUMN staging_grace_period_days INTEGER - """)) - await db.execute(text(""" - ALTER TABLE libraries - ADD COLUMN staging_auto_restore BOOLEAN DEFAULT NULL - """)) + if is_postgres: + await db.execute(text("ALTER TABLE libraries ADD COLUMN IF NOT EXISTS staging_enabled BOOLEAN DEFAULT NULL")) + await db.execute(text("ALTER TABLE libraries ADD COLUMN IF NOT EXISTS staging_path VARCHAR(500)")) + await db.execute(text("ALTER TABLE libraries ADD COLUMN IF NOT EXISTS staging_grace_period_days INTEGER")) + await db.execute(text("ALTER TABLE libraries ADD COLUMN IF NOT EXISTS staging_auto_restore BOOLEAN DEFAULT NULL")) + else: + await db.execute(text("ALTER TABLE libraries ADD COLUMN staging_enabled BOOLEAN DEFAULT NULL")) + await db.execute(text("ALTER TABLE libraries ADD COLUMN staging_path VARCHAR(500)")) + await db.execute(text("ALTER TABLE libraries ADD COLUMN staging_grace_period_days INTEGER")) + await db.execute(text("ALTER TABLE libraries ADD COLUMN staging_auto_restore BOOLEAN DEFAULT NULL")) await db.commit() logger.info("Migration completed: per-library staging columns added") # Check if event_types column exists in notification_channels - result = await db.execute(text(""" - SELECT COUNT(*) as count - FROM pragma_table_info('notification_channels') - WHERE name='event_types' - """)) - has_event_types = result.scalar() > 0 + has_event_types = await column_exists(db, 'notification_channels', 'event_types') if not has_event_types: logger.info("Migrating notification_channels: adding event_types column") - await db.execute(text(""" - ALTER TABLE notification_channels - ADD COLUMN event_types TEXT - """)) + if is_postgres: + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN IF NOT EXISTS event_types TEXT")) + else: + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN event_types TEXT")) await db.commit() logger.info("Migration completed: event_types column added to notification_channels") # Check if title_template column exists in notification_channels - result = await db.execute(text(""" - SELECT COUNT(*) as count - FROM pragma_table_info('notification_channels') - WHERE name='title_template' - """)) - has_title_template = result.scalar() > 0 + has_title_template = await column_exists(db, 'notification_channels', 'title_template') if not has_title_template: logger.info("Migrating notification_channels: adding template and retry columns") - await db.execute(text(""" - ALTER TABLE notification_channels - ADD COLUMN title_template VARCHAR(500) - """)) - await db.execute(text(""" - ALTER TABLE notification_channels - ADD COLUMN message_template TEXT - """)) - await db.execute(text(""" - ALTER TABLE notification_channels - ADD COLUMN max_retries INTEGER DEFAULT 3 - """)) - await db.execute(text(""" - ALTER TABLE notification_channels - ADD COLUMN retry_backoff_base INTEGER DEFAULT 2 - """)) + if is_postgres: + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN IF NOT EXISTS title_template VARCHAR(500)")) + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN IF NOT EXISTS message_template TEXT")) + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN IF NOT EXISTS max_retries INTEGER DEFAULT 3")) + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN IF NOT EXISTS retry_backoff_base INTEGER DEFAULT 2")) + else: + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN title_template VARCHAR(500)")) + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN message_template TEXT")) + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN max_retries INTEGER DEFAULT 3")) + await db.execute(text("ALTER TABLE notification_channels ADD COLUMN retry_backoff_base INTEGER DEFAULT 2")) await db.commit() logger.info("Migration completed: template and retry columns added to notification_channels") # Check if staging_library_name column exists in libraries table - result = await db.execute(text(""" - SELECT COUNT(*) as count - FROM pragma_table_info('libraries') - WHERE name='staging_library_name' - """)) - has_staging_library_name = result.scalar() > 0 + has_staging_library_name = await column_exists(db, 'libraries', 'staging_library_name') if not has_staging_library_name: logger.info("Migrating libraries: adding staging_library_name column") - await db.execute(text(""" - ALTER TABLE libraries - ADD COLUMN staging_library_name VARCHAR(200) - """)) + if is_postgres: + await db.execute(text("ALTER TABLE libraries ADD COLUMN IF NOT EXISTS staging_library_name VARCHAR(200)")) + else: + await db.execute(text("ALTER TABLE libraries ADD COLUMN staging_library_name VARCHAR(200)")) await db.commit() logger.info("Migration completed: staging_library_name column added to libraries") From 3f97c11887fb3ea9bdd1ea5a6e6de9b7776dd19a Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 15:06:22 +0100 Subject: [PATCH 005/122] chore: Remove ARM64 build to speed up CI - Build only linux/amd64 for faster builds - Remove QEMU setup (not needed without cross-platform builds) - Update release notes to reflect single architecture - Add note about opening feature request for ARM support --- .github/workflows/release.yml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3dce074..90e8c0b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -83,9 +83,6 @@ jobs: - name: Checkout repository uses: actions/checkout@v6 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -100,7 +97,7 @@ jobs: uses: docker/build-push-action@v6 with: context: . - platforms: linux/amd64,linux/arm64 + platforms: linux/amd64 push: true tags: ${{ needs.prepare.outputs.docker_tags }} labels: ${{ needs.prepare.outputs.docker_labels }} @@ -220,7 +217,7 @@ jobs: ## Docker Images - This release is available as multi-architecture Docker images (amd64, arm64): + This release is available as a Docker image for amd64 architecture: ### Pull the image: \`\`\`bash @@ -250,7 +247,9 @@ jobs: | Registry | GitHub Container Registry (ghcr.io) | | Image | \`ghcr.io/${REPO_OWNER}/mediacurator\` | | Version | \`${VERSION}\` | - | Architectures | linux/amd64, linux/arm64 | + | Architecture | linux/amd64 | + + > **Note:** ARM64 builds are not currently provided. Open a feature request if you need ARM support. ## Documentation From 4b90cdd594d2fdb5fba27d50dcbf24589ee23063 Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 15:21:22 +0100 Subject: [PATCH 006/122] refactor: Remove Jellystat service type - MediaCurator now has built-in watch statistics tracking - Jellystat integration no longer needed - Removed from frontend ServiceType, service options - Removed from backend models and schemas --- backend/app/models/database.py | 2 +- backend/app/schemas/__init__.py | 2 +- docker-compose.running.yml | 120 ++++++++++++++++++++++++++++++++ frontend/src/pages/Services.tsx | 1 - frontend/src/types/index.ts | 2 +- 5 files changed, 123 insertions(+), 4 deletions(-) create mode 100644 docker-compose.running.yml diff --git a/backend/app/models/database.py b/backend/app/models/database.py index 13bf16d..7ec15af 100644 --- a/backend/app/models/database.py +++ b/backend/app/models/database.py @@ -18,7 +18,7 @@ class ServiceType(str, Enum): RADARR = "radarr" EMBY = "emby" JELLYFIN = "jellyfin" - JELLYSTAT = "jellystat" + class MediaType(str, Enum): diff --git a/backend/app/schemas/__init__.py b/backend/app/schemas/__init__.py index cf37c2c..ae8052b 100644 --- a/backend/app/schemas/__init__.py +++ b/backend/app/schemas/__init__.py @@ -14,7 +14,7 @@ class ServiceType(str, Enum): RADARR = "radarr" EMBY = "emby" JELLYFIN = "jellyfin" - JELLYSTAT = "jellystat" + class MediaType(str, Enum): diff --git a/docker-compose.running.yml b/docker-compose.running.yml new file mode 100644 index 0000000..b8fe6c7 --- /dev/null +++ b/docker-compose.running.yml @@ -0,0 +1,120 @@ +# MediaCurator Production Deployment +# Komodo Stack Configuration for existing media network + +services: + mediacurator: + # renovate: datasource=docker depName=ghcr.io/serph91p/mediacurator + image: ghcr.io/serph91p/mediacurator:dev.0.0.133 + container_name: mediacurator + restart: unless-stopped + + labels: + - "komodo.stack=mediacurator" + - "komodo.enabled=true" + + # Run as non-root user (appuser with UID 1000) + # Note: Named volumes are managed by Docker, no manual permission fixes needed + user: "1000:1000" + + environment: + # Timezone + - TZ=${TZ:-Europe/Berlin} + + # Security - IMPORTANT: Change this in production! + - SECRET_KEY=${SECRET_KEY:-please-change-this-secret-key-in-production} + + # Database + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + - POSTGRES_USER=${POSTGRES_USER:-mediacurator} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-mediacurator} + - POSTGRES_DB=${POSTGRES_DB:-mediacurator} + + # Optional: Pre-create admin user (useful for automated deployments) + # If not set, first registered user via web UI becomes admin automatically + # - INITIAL_ADMIN_USER=${ADMIN_USER:-} + # - INITIAL_ADMIN_PASSWORD=${ADMIN_PASSWORD:-} + + # Optional: Debug mode (not recommended for production) + # - DEBUG=false + + volumes: + # Application data (database, config) - using named volume + - mediacurator_data:/app/config + + # Application logs - using named volume + - mediacurator_logs:/app/logs + + # Media mount - REQUIRED: Point to your actual media location + # Mount as read-only (:ro) for safety + - ${MEDIA_PATH:-/media}:/data + + # Optional: Additional media paths + # - /mnt/movies:/media/movies:ro + # - /mnt/tv:/media/tv:ro + + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/api/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 30s + + depends_on: + postgres: + condition: service_healthy + + # Resource limits (optional but recommended) + deploy: + resources: + limits: + cpus: '2.0' + memory: 2G + reservations: + cpus: '0.5' + memory: 512M + + networks: + - media + - mediacurator + + postgres: + image: postgres:18.1-alpine + container_name: mediacurator_postgres + restart: unless-stopped + + environment: + - POSTGRES_USER=${POSTGRES_USER:-mediacurator} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-mediacurator} + - POSTGRES_DB=${POSTGRES_DB:-mediacurator} + + volumes: + - postgres_data:/var/lib/postgresql/data + + labels: + - "komodo.stack=mediacurator" + - "komodo.enabled=true" + + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-mediacurator} -d ${POSTGRES_DB:-mediacurator}"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + + networks: + - mediacurator + +volumes: + mediacurator_data: + driver: local + mediacurator_logs: + driver: local + postgres_data: + driver: local + +networks: + media: + external: true + name: media + mediacurator: \ No newline at end of file diff --git a/frontend/src/pages/Services.tsx b/frontend/src/pages/Services.tsx index f3a3d68..9483176 100644 --- a/frontend/src/pages/Services.tsx +++ b/frontend/src/pages/Services.tsx @@ -11,7 +11,6 @@ const serviceTypes: { value: ServiceType; label: string; category: string }[] = { value: 'radarr', label: 'Radarr', category: 'Download Manager' }, { value: 'emby', label: 'Emby', category: 'Media Server' }, { value: 'jellyfin', label: 'Jellyfin', category: 'Media Server' }, - { value: 'jellystat', label: 'Jellystat', category: 'Media Server' }, ] const getServiceCategory = (type: ServiceType): string => { diff --git a/frontend/src/types/index.ts b/frontend/src/types/index.ts index 74caa66..ecb4c1d 100644 --- a/frontend/src/types/index.ts +++ b/frontend/src/types/index.ts @@ -26,7 +26,7 @@ export interface Session { is_current: boolean } -export type ServiceType = 'sonarr' | 'radarr' | 'emby' | 'jellyfin' | 'jellystat' +export type ServiceType = 'sonarr' | 'radarr' | 'emby' | 'jellyfin' export type MediaType = 'movie' | 'series' | 'episode' | 'season' export type RuleActionType = 'delete' | 'delete_and_unmonitor' | 'notify_only' | 'move_to_trash' | 'unmonitor' export type NotificationType = 'webhook' | 'discord' | 'slack' | 'email' | 'apprise' From 5d6371e88437a7db88f71063264d3d5cc0f1a6b3 Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 15:31:17 +0100 Subject: [PATCH 007/122] feat: Add individual service sync jobs in Jobs tab - Add separate sync jobs for each enabled service - Show service sync status with running indicator - Add manual 'Sync Now' button per service - Display last sync result and duration - Auto-refresh job status every 5 seconds - New API endpoint POST /jobs/sync/service/{id} - Service sync jobs show in job history --- backend/app/api/routes/jobs.py | 99 ++++++++++++++++++++- backend/app/scheduler.py | 154 ++++++++++++++++++++++++++++++++- frontend/src/pages/Jobs.tsx | 105 +++++++++++++++++++++- 3 files changed, 353 insertions(+), 5 deletions(-) diff --git a/backend/app/api/routes/jobs.py b/backend/app/api/routes/jobs.py index d581b64..c806bff 100644 --- a/backend/app/api/routes/jobs.py +++ b/backend/app/api/routes/jobs.py @@ -10,8 +10,8 @@ from ...core.database import get_db from ...core.rate_limit import limiter, RateLimits -from ...models import JobExecutionLog, SystemSettings -from ...scheduler import scheduler, reschedule_job +from ...models import JobExecutionLog, SystemSettings, ServiceConnection +from ...scheduler import scheduler, reschedule_job, run_service_sync_job from ..deps import get_current_user router = APIRouter(prefix="/jobs", tags=["Jobs"]) @@ -41,6 +41,13 @@ async def list_jobs( running_logs = result.scalars().all() running_job_ids = {log.job_id for log in running_logs} + # Get all services for potential sync jobs + services_result = await db.execute( + select(ServiceConnection).where(ServiceConnection.is_enabled == True) + ) + services = services_result.scalars().all() + service_map = {s.id: s for s in services} + for job in scheduler.get_jobs(): # Extract interval from trigger interval_str = str(job.trigger) @@ -67,6 +74,18 @@ async def list_jobs( if running_log: running_since = running_log.started_at.isoformat() if running_log.started_at else None + # Determine service info for service sync jobs + service_id = None + service_type = None + if job.id.startswith("sync_service_"): + try: + service_id = int(job.id.replace("sync_service_", "")) + if service_id in service_map: + service = service_map[service_id] + service_type = service.service_type.value if hasattr(service.service_type, 'value') else str(service.service_type) + except (ValueError, KeyError): + pass + jobs.append({ "id": job.id, "name": job.name, @@ -76,8 +95,32 @@ async def list_jobs( "interval_hours": interval_hours, "is_running": is_running, "running_since": running_since, + "service_id": service_id, + "service_type": service_type, }) + # Also check for running service syncs that may not have scheduler jobs yet + for log in running_logs: + if log.job_id.startswith("sync_service_") and not any(j["id"] == log.job_id for j in jobs): + try: + service_id = int(log.job_id.replace("sync_service_", "")) + if service_id in service_map: + service = service_map[service_id] + jobs.append({ + "id": log.job_id, + "name": log.job_name, + "next_run_time": None, + "trigger": "manual", + "interval_minutes": None, + "interval_hours": None, + "is_running": True, + "running_since": log.started_at.isoformat() if log.started_at else None, + "service_id": service_id, + "service_type": service.service_type.value if hasattr(service.service_type, 'value') else str(service.service_type), + }) + except (ValueError, KeyError): + pass + return { "running": scheduler.running, "jobs": jobs @@ -115,6 +158,58 @@ async def get_job_history( } for log in executions] +@router.post("/sync/service/{service_id}") +@limiter.limit(RateLimits.CLEANUP_OPERATION) +async def trigger_service_sync( + request: Request, + service_id: int, + db: AsyncSession = Depends(get_db), + current_user = Depends(get_current_user) +): + """Manually trigger a sync for a specific service.""" + import asyncio + + # Check if service exists and is enabled + result = await db.execute( + select(ServiceConnection).where(ServiceConnection.id == service_id) + ) + service = result.scalar_one_or_none() + + if not service: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Service with id {service_id} not found" + ) + + if not service.is_enabled: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Service '{service.name}' is disabled" + ) + + # Check if already running + job_id = f"sync_service_{service_id}" + running_result = await db.execute( + select(JobExecutionLog) + .where(JobExecutionLog.job_id == job_id) + .where(JobExecutionLog.status == "running") + ) + if running_result.scalar_one_or_none(): + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=f"Sync for service '{service.name}' is already running" + ) + + # Start the sync in background + asyncio.create_task(run_service_sync_job(service_id)) + + return { + "message": f"Sync triggered for service '{service.name}'", + "service_id": service_id, + "service_name": service.name + } + + @router.get("/history/recent") @limiter.limit(RateLimits.API_READ) async def get_recent_executions( diff --git a/backend/app/scheduler.py b/backend/app/scheduler.py index 93e006c..84002f8 100644 --- a/backend/app/scheduler.py +++ b/backend/app/scheduler.py @@ -12,11 +12,90 @@ from .services.cleanup_engine import CleanupEngine from .services.sync import sync_service_media from datetime import datetime, timezone +from functools import partial settings = get_settings() scheduler = AsyncIOScheduler() +async def run_service_sync_job(service_id: int): + """Sync media from a specific service.""" + start_time = datetime.now(timezone.utc) + execution_log = None + + async with async_session_maker() as db: + try: + # Get the service + result = await db.execute( + select(ServiceConnection).where(ServiceConnection.id == service_id) + ) + service = result.scalar_one_or_none() + + if not service: + logger.warning(f"Service {service_id} not found for sync") + return + + if not service.is_enabled: + logger.info(f"Skipping sync for disabled service: {service.name}") + return + + job_id = f"sync_service_{service_id}" + job_name = f"Sync: {service.name}" + + logger.info(f"Running sync job for {service.name}") + + # Create execution log + execution_log = JobExecutionLog( + job_id=job_id, + job_name=job_name, + status="running", + started_at=start_time + ) + db.add(execution_log) + await db.commit() + + try: + sync_result = await sync_service_media(db, service) + + # Update execution log + end_time = datetime.now(timezone.utc) + execution_log.status = "success" + execution_log.completed_at = end_time + execution_log.duration_seconds = (end_time - start_time).total_seconds() + execution_log.details = { + "service_name": service.name, + "service_type": service.service_type.value if hasattr(service.service_type, 'value') else str(service.service_type), + "synced_items": sync_result.get('synced', 0), + "libraries": sync_result.get('libraries', 0), + "result": sync_result + } + await db.commit() + logger.info(f"Sync completed for {service.name}: {sync_result}") + + except Exception as e: + error_msg = f"Sync failed for {service.name}: {e}" + logger.error(error_msg) + + end_time = datetime.now(timezone.utc) + execution_log.status = "error" + execution_log.completed_at = end_time + execution_log.duration_seconds = (end_time - start_time).total_seconds() + execution_log.error_message = str(e) + await db.commit() + + except Exception as e: + error_msg = f"Sync job failed for service {service_id}: {e}" + logger.error(error_msg) + + if execution_log: + end_time = datetime.now(timezone.utc) + execution_log.status = "error" + execution_log.completed_at = end_time + execution_log.duration_seconds = (end_time - start_time).total_seconds() + execution_log.error_message = str(e) + await db.commit() + + async def run_sync_job(): """Sync media from all enabled services.""" logger.info("Running scheduled sync job") @@ -296,12 +375,12 @@ async def run_auto_restore_job(): def start_scheduler(): """Start the scheduler with configured jobs.""" - # Sync job - default every 6 hours + # Sync job - default every 6 hours (syncs all services) scheduler.add_job( run_sync_job, IntervalTrigger(hours=6), id="sync_job", - name="Media Sync", + name="Media Sync (All)", replace_existing=True ) @@ -334,6 +413,77 @@ def start_scheduler(): scheduler.start() logger.info("Scheduler started with all jobs (sync, cleanup, staging cleanup, auto-restore)") + + # Schedule dynamic service sync jobs registration + import asyncio + asyncio.create_task(register_service_sync_jobs()) + + +async def register_service_sync_jobs(): + """Register individual sync jobs for each enabled service.""" + try: + async with async_session_maker() as db: + result = await db.execute( + select(ServiceConnection).where(ServiceConnection.is_enabled == True) + ) + services = result.scalars().all() + + for service in services: + job_id = f"sync_service_{service.id}" + # Check if job already exists + if scheduler.get_job(job_id): + continue + + scheduler.add_job( + partial(run_service_sync_job, service.id), + IntervalTrigger(hours=6), + id=job_id, + name=f"Sync: {service.name}", + replace_existing=True + ) + logger.info(f"Registered sync job for service: {service.name}") + + except Exception as e: + logger.error(f"Failed to register service sync jobs: {e}") + + +async def update_service_sync_jobs(): + """Update service sync jobs when services change.""" + async with async_session_maker() as db: + result = await db.execute(select(ServiceConnection)) + services = result.scalars().all() + + existing_service_ids = set() + + for service in services: + job_id = f"sync_service_{service.id}" + existing_service_ids.add(job_id) + + if service.is_enabled: + # Add or update job + if not scheduler.get_job(job_id): + scheduler.add_job( + partial(run_service_sync_job, service.id), + IntervalTrigger(hours=6), + id=job_id, + name=f"Sync: {service.name}", + replace_existing=True + ) + logger.info(f"Added sync job for service: {service.name}") + else: + # Remove job for disabled service + if scheduler.get_job(job_id): + scheduler.remove_job(job_id) + logger.info(f"Removed sync job for disabled service: {service.name}") + + # Remove jobs for deleted services + for job in scheduler.get_jobs(): + if job.id.startswith("sync_service_") and job.id not in existing_service_ids: + scheduler.remove_job(job.id) + logger.info(f"Removed sync job for deleted service: {job.id}") + + scheduler.start() + logger.info("Scheduler started with all jobs (sync, cleanup, staging cleanup, auto-restore)") async def load_saved_job_intervals(): diff --git a/frontend/src/pages/Jobs.tsx b/frontend/src/pages/Jobs.tsx index 734f116..6f1ab6d 100644 --- a/frontend/src/pages/Jobs.tsx +++ b/frontend/src/pages/Jobs.tsx @@ -1,6 +1,6 @@ import { useState } from 'react' import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' -import { PlayIcon, ClockIcon, CheckCircleIcon, XCircleIcon, ArrowPathIcon, MinusCircleIcon, PencilIcon } from '@heroicons/react/24/outline' +import { PlayIcon, ClockIcon, CheckCircleIcon, XCircleIcon, ArrowPathIcon, MinusCircleIcon, PencilIcon, ServerIcon } from '@heroicons/react/24/outline' import api from '../lib/api' import toast from 'react-hot-toast' import { formatDateTime } from '../lib/utils' @@ -14,6 +14,8 @@ interface Job { interval_hours: number | null is_running: boolean running_since: string | null + service_id?: number | null + service_type?: string | null } interface JobExecution { @@ -28,6 +30,13 @@ interface JobExecution { details: Record } +interface ServiceConnection { + id: number + name: string + service_type: string + is_enabled: boolean +} + export default function Jobs() { const queryClient = useQueryClient() const [selectedJobId, setSelectedJobId] = useState(null) @@ -44,6 +53,14 @@ export default function Jobs() { refetchInterval: 5000, // Refresh every 5 seconds }) + const { data: services } = useQuery({ + queryKey: ['services'], + queryFn: async () => { + const res = await api.get('/services/') + return res.data + }, + }) + const { data: recentExecutions, isLoading: executionsLoading } = useQuery({ queryKey: ['job-executions'], queryFn: async () => { @@ -75,6 +92,22 @@ export default function Jobs() { onError: () => toast.error('Failed to trigger job'), }) + const triggerServiceSyncMutation = useMutation({ + mutationFn: async (serviceId: number) => { + const res = await api.post(`/jobs/sync/service/${serviceId}`) + return res.data + }, + onSuccess: (data) => { + queryClient.invalidateQueries({ queryKey: ['jobs'] }) + queryClient.invalidateQueries({ queryKey: ['job-executions'] }) + toast.success(`Sync started for ${data.service_name}`) + }, + onError: (error: any) => { + const message = error.response?.data?.detail || 'Failed to trigger sync' + toast.error(message) + }, + }) + const updateIntervalMutation = useMutation({ mutationFn: async ({ jobId, intervalMinutes, intervalHours }: { jobId: string, intervalMinutes?: number, intervalHours?: number }) => { const res = await api.put(`/jobs/${jobId}/interval`, { @@ -189,6 +222,76 @@ export default function Jobs() {

)} + {/* Service Sync Jobs */} + {services && services.filter(s => s.is_enabled).length > 0 && ( +
+
+

+ + Service Sync +

+

+ Trigger manual sync for individual services +

+
+
+
+ {services.filter(s => s.is_enabled).map((service) => { + const serviceJobId = `sync_service_${service.id}` + const isRunning = jobsData?.jobs?.some(j => j.id === serviceJobId && j.is_running) || + recentExecutions?.some(e => e.job_id === serviceJobId && e.status === 'running') + const lastExecution = recentExecutions?.find(e => e.job_id === serviceJobId) + + return ( +
+
+
+

{service.name}

+ + {service.service_type} + +
+ {isRunning && ( + + )} +
+ + {lastExecution && ( +
+ Last: {lastExecution.status === 'running' ? 'Running...' : + lastExecution.status === 'success' ? `✓ ${formatDuration(lastExecution.duration_seconds)}` : + lastExecution.status === 'error' ? '✗ Error' : lastExecution.status} +
+ )} + + +
+ ) + })} +
+
+
+ )} + {/* Active Jobs */}
From b5399f59332288bd5640755db076460b1d57d22d Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 15:51:48 +0100 Subject: [PATCH 008/122] fix: Emby sync creates MediaItems and populates watch statistics - Sync libraries, movies, series, episodes directly from Emby - Path-based library association for proper linking - Track user total_plays and total_watch_time_seconds - Use external_id for watch data matching - Include RunTimeTicks for watch time calculation --- backend/app/services/emby.py | 15 +- backend/app/services/sync.py | 451 ++++++++++++++++++++++++++++------- 2 files changed, 378 insertions(+), 88 deletions(-) diff --git a/backend/app/services/emby.py b/backend/app/services/emby.py index d2a9476..832768f 100644 --- a/backend/app/services/emby.py +++ b/backend/app/services/emby.py @@ -261,20 +261,27 @@ async def get_items_with_watch_data( self, user_id: str, parent_id: Optional[str] = None, - include_item_types: Optional[List[str]] = None + include_item_types: Optional[List[str]] = None, + fields: Optional[List[str]] = None ) -> List[Dict[str, Any]]: """ Get library items with full watch data in a single request. This is more efficient than fetching items and watch data separately. """ - fields = [ + default_fields = [ "Path", "Overview", "Genres", "Tags", "DateCreated", "PremiereDate", - "CommunityRating", "Size", "MediaSources", + "CommunityRating", "Size", "MediaSources", "RunTimeTicks", # Include UserData fields ] + # Merge custom fields if provided + if fields: + all_fields = list(set(default_fields + fields)) + else: + all_fields = default_fields + params = { "Recursive": "true", - "Fields": ",".join(fields), + "Fields": ",".join(all_fields), "EnableUserData": "true", # Include UserData in response } if parent_id: diff --git a/backend/app/services/sync.py b/backend/app/services/sync.py index 8ffc582..68b1853 100644 --- a/backend/app/services/sync.py +++ b/backend/app/services/sync.py @@ -278,7 +278,7 @@ async def _sync_emby( db: AsyncSession, service: ServiceConnection ) -> Dict[str, Any]: - """Sync watch data from Emby/Jellyfin including per-user tracking.""" + """Sync media items and watch data from Emby/Jellyfin including per-user tracking.""" # Use at least 120s timeout for large library queries timeout = max(service.timeout or 30, 120) client = EmbyClient( @@ -288,20 +288,261 @@ async def _sync_emby( timeout=timeout ) + added = 0 updated = 0 users_synced = 0 + movies_added = 0 + series_added = 0 + episodes_added = 0 try: - # Get users - we'll aggregate watch data across ALL users + # === SYNC LIBRARIES FROM EMBY === + logger.info("Syncing libraries from Emby...") + emby_libraries = await client.get_libraries() + + # Build library mapping: external_id -> db library + library_map: Dict[str, Library] = {} + + for emby_lib in emby_libraries: + external_id = emby_lib.get("ItemId") or emby_lib.get("Id", "") + name = emby_lib.get("Name", "Unknown") + collection_type = emby_lib.get("CollectionType", "").lower() + paths = emby_lib.get("Locations", []) + path = paths[0] if paths else None + + # Only sync movie and series libraries + if collection_type not in ("movies", "movie", "tvshows", "tvshow", "series"): + continue + + # Determine media type + if collection_type in ("movies", "movie"): + media_type = MediaType.MOVIE + else: + media_type = MediaType.SERIES + + # Get or create library + result = await db.execute( + select(Library).where( + Library.external_id == external_id, + Library.service_connection_id == service.id + ) + ) + db_lib = result.scalar_one_or_none() + + if db_lib: + db_lib.name = name + db_lib.path = path + db_lib.media_type = media_type + db_lib.last_synced_at = datetime.utcnow() + else: + db_lib = Library( + name=name, + external_id=external_id, + service_connection_id=service.id, + media_type=media_type, + path=path, + is_enabled=True, + last_synced_at=datetime.utcnow() + ) + db.add(db_lib) + await db.flush() + + library_map[external_id] = db_lib + + logger.info(f"Synced {len(library_map)} libraries") + + # Build a list of libraries by path for path-based matching + library_paths: list[tuple[str, int, MediaType]] = [] # (path, library_id, media_type) + for db_lib in library_map.values(): + if db_lib.path: + library_paths.append((db_lib.path, db_lib.id, db_lib.media_type)) + # Sort by path length descending to match most specific path first + library_paths.sort(key=lambda x: len(x[0]), reverse=True) + + def find_library_for_path(item_path: str | None, expected_type: MediaType) -> int | None: + """Find the library_id for an item based on its path.""" + if not item_path: + return None + # Normalize path separators + item_path_normalized = item_path.replace("\\", "/") + for lib_path, lib_id, lib_type in library_paths: + lib_path_normalized = lib_path.replace("\\", "/") + if item_path_normalized.startswith(lib_path_normalized): + return lib_id + return None + + # === SYNC MEDIA ITEMS FROM EMBY === + logger.info("Syncing media items from Emby...") + + # Fetch movies + emby_movies = await client.get_movies(fields=["Path", "Overview", "Genres", "Tags", "DateCreated", "PremiereDate", "CommunityRating", "RunTimeTicks", "ParentId"]) + + for movie in emby_movies: + movie_id = movie.get("Id") + if not movie_id: + continue + + # Find library by path + movie_path = movie.get("Path") + library_id = find_library_for_path(movie_path, MediaType.MOVIE) + + # Get or create movie + result = await db.execute( + select(MediaItem).where( + MediaItem.external_id == movie_id, + MediaItem.service_connection_id == service.id, + MediaItem.media_type == MediaType.MOVIE + ) + ) + item = result.scalar_one_or_none() + + runtime_ticks = movie.get("RunTimeTicks", 0) or 0 + + movie_data = { + "external_id": movie_id, + "service_connection_id": service.id, + "library_id": library_id, + "title": movie.get("Name", "Unknown"), + "media_type": MediaType.MOVIE, + "year": int(movie.get("ProductionYear")) if movie.get("ProductionYear") else None, + "path": movie.get("Path"), + "size_bytes": movie.get("Size", 0) or 0, + "genres": movie.get("Genres", []), + "tags": movie.get("Tags", []), + "rating": movie.get("CommunityRating"), + "added_at": datetime.fromisoformat(movie["DateCreated"].replace("Z", "+00:00")) if movie.get("DateCreated") else None + } + + if item: + for key, value in movie_data.items(): + setattr(item, key, value) + updated += 1 + else: + item = MediaItem(**movie_data) + db.add(item) + added += 1 + movies_added += 1 + + del emby_movies + logger.info(f"Synced movies: {movies_added} added") + + # Fetch series + emby_series = await client.get_series(fields=["Path", "Overview", "Genres", "Tags", "DateCreated", "PremiereDate", "CommunityRating", "ParentId"]) + + series_id_map: Dict[str, int] = {} # Emby series ID -> DB item ID + series_library_map: Dict[str, int] = {} # Emby series ID -> library_id + + for series in emby_series: + series_id = series.get("Id") + if not series_id: + continue + + # Find library by path + series_path = series.get("Path") + library_id = find_library_for_path(series_path, MediaType.SERIES) + + result = await db.execute( + select(MediaItem).where( + MediaItem.external_id == series_id, + MediaItem.service_connection_id == service.id, + MediaItem.media_type == MediaType.SERIES + ) + ) + item = result.scalar_one_or_none() + + series_data = { + "external_id": series_id, + "service_connection_id": service.id, + "library_id": library_id, + "title": series.get("Name", "Unknown"), + "media_type": MediaType.SERIES, + "year": int(series.get("ProductionYear")) if series.get("ProductionYear") else None, + "path": series.get("Path"), + "genres": series.get("Genres", []), + "tags": series.get("Tags", []), + "rating": series.get("CommunityRating"), + "added_at": datetime.fromisoformat(series["DateCreated"].replace("Z", "+00:00")) if series.get("DateCreated") else None + } + + if item: + for key, value in series_data.items(): + setattr(item, key, value) + updated += 1 + else: + item = MediaItem(**series_data) + db.add(item) + await db.flush() + added += 1 + series_added += 1 + + series_id_map[series_id] = item.id + if library_id: + series_library_map[series_id] = library_id + + del emby_series + logger.info(f"Synced series: {series_added} added") + + # Fetch episodes for each series + logger.info("Syncing episodes from Emby...") + for emby_series_id, db_series_id in series_id_map.items(): + try: + episodes = await client.get_episodes(emby_series_id, fields=["Path", "DateCreated", "RunTimeTicks", "ParentId", "IndexNumber", "ParentIndexNumber"]) + + # Get library_id from series map (more efficient than DB query) + library_id = series_library_map.get(emby_series_id) + + for episode in episodes: + ep_id = episode.get("Id") + if not ep_id: + continue + + result = await db.execute( + select(MediaItem).where( + MediaItem.external_id == ep_id, + MediaItem.service_connection_id == service.id, + MediaItem.media_type == MediaType.EPISODE + ) + ) + item = result.scalar_one_or_none() + + ep_data = { + "external_id": ep_id, + "service_connection_id": service.id, + "library_id": library_id, + "series_id": emby_series_id, + "title": episode.get("Name", "Unknown"), + "media_type": MediaType.EPISODE, + "season_number": episode.get("ParentIndexNumber"), + "episode_number": episode.get("IndexNumber"), + "path": episode.get("Path"), + "added_at": datetime.fromisoformat(episode["DateCreated"].replace("Z", "+00:00")) if episode.get("DateCreated") else None + } + + if item: + for key, value in ep_data.items(): + setattr(item, key, value) + updated += 1 + else: + item = MediaItem(**ep_data) + db.add(item) + added += 1 + episodes_added += 1 + + except Exception as e: + logger.warning(f"Failed to sync episodes for series {emby_series_id}: {e}") + + await db.flush() + logger.info(f"Synced episodes: {episodes_added} added") + + # === SYNC USERS === users = await client.get_users() if not users: logger.warning("No users found in Emby") - return {"success": True, "message": "No users found", "added": 0, "updated": 0} + return {"success": True, "message": "No users found", "added": added, "updated": updated} logger.info(f"Found {len(users)} users in Emby, syncing users and aggregating watch data") - # === SYNC EMBY USERS TO DATABASE === user_id_map: Dict[str, int] = {} # Emby user ID -> DB user ID for emby_user in users: @@ -309,7 +550,6 @@ async def _sync_emby( emby_user_name = emby_user.get("Name", "Unknown") is_admin = emby_user.get("Policy", {}).get("IsAdministrator", False) - # Get or create user in database result = await db.execute( select(MediaServerUser).where( MediaServerUser.external_id == emby_user_id, @@ -329,7 +569,7 @@ async def _sync_emby( is_admin=is_admin ) db.add(db_user) - await db.flush() # Get the ID + await db.flush() user_id_map[emby_user_id] = db_user.id users_synced += 1 @@ -348,15 +588,17 @@ async def _sync_emby( if now_playing.get("SeasonId"): currently_watching_ids.add(now_playing.get("SeasonId")) - # Get all media items from our database + # Get all media items from our database (refresh after adding) result = await db.execute( select(MediaItem) .options(joinedload(MediaItem.service_connection), joinedload(MediaItem.library)) + .where(MediaItem.service_connection_id == service.id) ) items = result.scalars().all() - # Build path -> item mapping for faster lookup + # Build path -> item mapping AND external_id -> item mapping for faster lookup path_to_item = {item.path: item for item in items if item.path} + external_id_to_item = {item.external_id: item for item in items if item.external_id} # Track max watch counts per path (to find max across all users) max_watch_counts: dict[str, int] = {} @@ -369,16 +611,45 @@ async def _sync_emby( users_who_watched: dict[str, set[int]] = {} # path -> set of db_user_ids # Track per-user watch data for UserWatchHistory - user_watch_data: dict[tuple[int, str], dict] = {} # (db_user_id, path) -> watch_data + user_watch_data: dict[tuple[int, str], dict] = {} # (db_user_id, item_key) -> watch_data + + # Track which items were updated (by external_id) + updated_items: set[str] = set() - # Track which items were updated - updated_paths = set() + # Track max watch counts per item (by external_id) + max_watch_counts: dict[str, int] = {} + watched_items: set[str] = set() + favorited_items: set[str] = set() + max_progress: dict[str, float] = {} + last_watched: dict[str, datetime] = {} + + # Track unique users who watched each item (for "Most Popular by Users") + users_who_watched: dict[str, set[int]] = {} # item_id -> set of db_user_ids + + # Track per-user watch time for aggregation + user_watch_time: dict[int, int] = {} # db_user_id -> total seconds + user_play_count: dict[int, int] = {} # db_user_id -> total plays + user_last_activity: dict[int, datetime] = {} # db_user_id -> last played + + # Helper function to find item by external_id or path + def find_item(emby_item: dict) -> MediaItem | None: + item_id = emby_item.get("Id") + if item_id and item_id in external_id_to_item: + return external_id_to_item[item_id] + path = emby_item.get("Path") + if path and path in path_to_item: + return path_to_item[path] + return None # Helper function to track max values across users - def track_watch_data(path: str, user_data: dict, db_user_id: int): - if not path: + def track_watch_data(emby_item: dict, db_user_id: int): + item = find_item(emby_item) + if not item: return + item_id = item.external_id + user_data = emby_item.get("UserData", {}) + play_count = user_data.get("PlayCount", 0) or 0 is_played = user_data.get("Played", False) is_favorite = user_data.get("IsFavorite", False) @@ -387,20 +658,30 @@ def track_watch_data(path: str, user_data: dict, db_user_id: int): if is_played and play_count == 0: play_count = 1 - max_watch_counts[path] = max(max_watch_counts.get(path, 0), play_count) + max_watch_counts[item_id] = max(max_watch_counts.get(item_id, 0), play_count) if is_played: - watched_paths.add(path) - # Track unique users who watched - if path not in users_who_watched: - users_who_watched[path] = set() - users_who_watched[path].add(db_user_id) + watched_items.add(item_id) + if item_id not in users_who_watched: + users_who_watched[item_id] = set() + users_who_watched[item_id].add(db_user_id) + + # Track user play count (sum across all items) + user_play_count[db_user_id] = user_play_count.get(db_user_id, 0) + play_count + + # Track user watch time (estimated from runtime * plays) + runtime_ticks = emby_item.get("RunTimeTicks", 0) or 0 + runtime_seconds = int(runtime_ticks / 10_000_000) if runtime_ticks else 0 + # Use played percentage if available, otherwise assume 100% watched per play + progress = user_data.get("PlayedPercentage", 100) or 100 + watch_time = int(runtime_seconds * (progress / 100) * max(play_count, 1)) + user_watch_time[db_user_id] = user_watch_time.get(db_user_id, 0) + watch_time if is_favorite: - favorited_paths.add(path) + favorited_items.add(item_id) progress = user_data.get("PlayedPercentage", 0) or 0 - max_progress[path] = max(max_progress.get(path, 0), progress) + max_progress[item_id] = max(max_progress.get(item_id, 0), progress) last_played_date = None if user_data.get("LastPlayedDate"): @@ -408,22 +689,26 @@ def track_watch_data(path: str, user_data: dict, db_user_id: int): last_played_date = datetime.fromisoformat( user_data["LastPlayedDate"].replace("Z", "+00:00") ) - if path not in last_watched or last_played_date > last_watched[path]: - last_watched[path] = last_played_date + if item_id not in last_watched or last_played_date > last_watched[item_id]: + last_watched[item_id] = last_played_date + # Track user's last activity + if db_user_id not in user_last_activity or last_played_date > user_last_activity[db_user_id]: + user_last_activity[db_user_id] = last_played_date except: pass # Store per-user watch data for UserWatchHistory if is_played or play_count > 0 or is_favorite or progress > 0: - user_watch_data[(db_user_id, path)] = { + user_watch_data[(db_user_id, item_id)] = { "play_count": play_count, "is_played": is_played, "is_favorite": is_favorite, "progress": progress, - "last_played_at": last_played_date + "last_played_at": last_played_date, + "media_item_id": item.id } - updated_paths.add(path) + updated_items.add(item_id) # Process users ONE BY ONE to save memory (no parallel fetching) logger.info(f"Fetching watch data from {len(users)} users sequentially...") @@ -442,44 +727,33 @@ def track_watch_data(path: str, user_data: dict, db_user_id: int): # Fetch and process movies immediately (don't hold in memory) emby_movies = await client.get_items_with_watch_data( user_id=emby_user_id, - include_item_types=["Movie"] + include_item_types=["Movie"], + fields=["Path", "RunTimeTicks"] ) for emby_item in emby_movies: - path = emby_item.get("Path") - if path and path in path_to_item: - track_watch_data(path, emby_item.get("UserData", {}), db_user_id) + track_watch_data(emby_item, db_user_id) del emby_movies # Free memory immediately # Fetch and process series immediately emby_series = await client.get_items_with_watch_data( user_id=emby_user_id, - include_item_types=["Series"] + include_item_types=["Series"], + fields=["Path", "RunTimeTicks"] ) for emby_item in emby_series: - path = emby_item.get("Path") - if path and path in path_to_item: - track_watch_data(path, emby_item.get("UserData", {}), db_user_id) + track_watch_data(emby_item, db_user_id) del emby_series # Free memory immediately # Fetch and process episodes immediately (this is the big one) emby_episodes = await client.get_items_with_watch_data( user_id=emby_user_id, - include_item_types=["Episode"] + include_item_types=["Episode"], + fields=["Path", "RunTimeTicks"] ) for emby_item in emby_episodes: - path = emby_item.get("Path") - if path and path in path_to_item: - track_watch_data(path, emby_item.get("UserData", {}), db_user_id) + track_watch_data(emby_item, db_user_id) del emby_episodes # Free memory immediately - # Update user's last activity - db_user_result = await db.execute( - select(MediaServerUser).where(MediaServerUser.id == db_user_id) - ) - db_user = db_user_result.scalar_one_or_none() - if db_user: - db_user.last_activity_at = datetime.utcnow() - logger.debug(f"Completed user {i}/{len(users)}: {user_name}") except Exception as e: @@ -487,38 +761,54 @@ def track_watch_data(path: str, user_data: dict, db_user_id: int): continue # Now apply the aggregated data to items - for path in updated_paths: - if path not in path_to_item: + watch_updated = 0 + for item_id in updated_items: + if item_id not in external_id_to_item: continue - item = path_to_item[path] + item = external_id_to_item[item_id] - item.watch_count = max_watch_counts.get(path, 0) - item.is_watched = path in watched_paths - item.is_favorited = path in favorited_paths - item.progress_percent = max_progress.get(path, 0) - item.is_currently_watching = False # Will be set separately + item.watch_count = max_watch_counts.get(item_id, 0) + item.is_watched = item_id in watched_items + item.is_favorited = item_id in favorited_items + item.progress_percent = max_progress.get(item_id, 0) + item.is_currently_watching = item_id in currently_watching_ids - if path in last_watched: - item.last_watched_at = last_watched[path] + if item_id in last_watched: + item.last_watched_at = last_watched[item_id] item.last_progress_update = datetime.utcnow() + + watch_updated += 1 + + updated += watch_updated + logger.info(f"Updated watch data for {watch_updated} items") - updated = len(updated_paths) + # === UPDATE USER STATISTICS === + logger.info("Updating user statistics...") + for db_user_id in user_id_map.values(): + result = await db.execute( + select(MediaServerUser).where(MediaServerUser.id == db_user_id) + ) + db_user = result.scalar_one_or_none() + if db_user: + db_user.total_plays = user_play_count.get(db_user_id, 0) + db_user.total_watch_time_seconds = user_watch_time.get(db_user_id, 0) + if db_user_id in user_last_activity: + db_user.last_activity_at = user_last_activity[db_user_id] # === SAVE USER WATCH HISTORY === logger.info(f"Saving {len(user_watch_data)} user watch history records...") history_saved = 0 - for (db_user_id, path), watch_data in user_watch_data.items(): - if path not in path_to_item: + for (db_user_id, item_id), watch_data in user_watch_data.items(): + media_item_id = watch_data.get("media_item_id") + if not media_item_id: continue - media_item = path_to_item[path] - # Get or create watch history record result = await db.execute( select(UserWatchHistory).where( UserWatchHistory.user_id == db_user_id, - UserWatchHistory.media_item_id == media_item.id + UserWatchHistory.media_item_id == media_item_id ) ) history = result.scalar_one_or_none() @@ -533,7 +823,7 @@ def track_watch_data(path: str, user_data: dict, db_user_id: int): else: history = UserWatchHistory( user_id=db_user_id, - media_item_id=media_item.id, + media_item_id=media_item_id, play_count=watch_data["play_count"], is_played=watch_data["is_played"], is_favorite=watch_data["is_favorite"], @@ -544,34 +834,23 @@ def track_watch_data(path: str, user_data: dict, db_user_id: int): history_saved += 1 - # Update user total plays - for emby_user_id, db_user_id in user_id_map.items(): - user_plays = sum( - data["play_count"] - for (uid, _), data in user_watch_data.items() - if uid == db_user_id - ) - result = await db.execute( - select(MediaServerUser).where(MediaServerUser.id == db_user_id) - ) - db_user = result.scalar_one_or_none() - if db_user: - db_user.total_plays = user_plays - await db.commit() - logger.info(f"Updated watch data for {updated} items from Emby (max values from {len(users)} users), saved {history_saved} watch history records") + logger.info(f"Updated watch data for {watch_updated} items from Emby (max values from {len(users)} users), saved {history_saved} watch history records") # === SYNC ACTIVE SESSIONS === - await _sync_active_sessions(db, client, service, user_id_map, path_to_item) + await _sync_active_sessions(db, client, service, user_id_map, path_to_item, external_id_to_item) finally: await client.close() return { "success": True, - "message": f"Updated watch data for {updated} items, synced {users_synced} users", - "added": 0, + "message": f"Synced {added} items ({movies_added} movies, {series_added} series, {episodes_added} episodes), updated {updated} items, synced {users_synced} users", + "added": added, "updated": updated, + "movies_added": movies_added, + "series_added": series_added, + "episodes_added": episodes_added, "users_synced": users_synced } @@ -581,7 +860,8 @@ async def _sync_active_sessions( client: EmbyClient, service: ServiceConnection, user_id_map: Dict[str, int], - path_to_item: Dict[str, MediaItem] + path_to_item: Dict[str, MediaItem], + external_id_to_item: Dict[str, MediaItem] ): """Sync active and recent playback sessions from Emby.""" try: @@ -611,9 +891,12 @@ async def _sync_active_sessions( if not db_user_id: continue - # Get media item info + # Get media item info - try external_id first, then path + item_id = now_playing.get("Id") item_path = now_playing.get("Path") - media_item = path_to_item.get(item_path) if item_path else None + media_item = external_id_to_item.get(item_id) if item_id else None + if not media_item and item_path: + media_item = path_to_item.get(item_path) # Get or create title title = now_playing.get("Name", "Unknown") From 4a4804261dcd31870e848b8b73b1ab6bf7c2525f Mon Sep 17 00:00:00 2001 From: / Date: Mon, 2 Feb 2026 15:57:36 +0100 Subject: [PATCH 009/122] refactor(sync): Emby now matches existing items by PATH instead of creating duplicates BREAKING CHANGE: Emby sync architecture completely redesigned The core insight: - Sonarr/Radarr CREATE MediaItems with file paths and metadata - Emby PROVIDES watch statistics (plays, progress, favorites) - Previous code created DUPLICATE items from Emby with different external_ids New behavior: - _sync_emby() no longer creates MediaItems - Loads ALL existing items and builds path -> item mapping - Matches Emby items to Sonarr/Radarr items by normalized PATH - Updates library_id based on Emby library paths - Applies watch data (play count, is_watched, favorites, progress) to matched items - Updates user statistics and watch history Key changes: - Removed: external_id_to_item mapping (was causing duplicates) - Added: path_to_item mapping with normalized paths (/ instead of \) - track_watch_data() now uses paths, skips items not in database - _sync_active_sessions() simplified to use only path_to_item - currently_watching_ids -> currently_watching_paths - Return value updated (Emby reports 0 added, only updated counts) This fixes: - Movies showing 0 plays (Emby data was on separate items) - Libraries showing 0 counts - Users showing no watch data - Activity/history not tracking properly --- backend/app/services/sync.py | 361 ++++++++++------------------------- 1 file changed, 97 insertions(+), 264 deletions(-) diff --git a/backend/app/services/sync.py b/backend/app/services/sync.py index 68b1853..8e603b8 100644 --- a/backend/app/services/sync.py +++ b/backend/app/services/sync.py @@ -278,8 +278,18 @@ async def _sync_emby( db: AsyncSession, service: ServiceConnection ) -> Dict[str, Any]: - """Sync media items and watch data from Emby/Jellyfin including per-user tracking.""" - # Use at least 120s timeout for large library queries + """ + Sync watch data from Emby/Jellyfin to existing MediaItems. + + This function: + 1. Syncs Emby libraries (for library_id assignment) + 2. Matches existing MediaItems (from Sonarr/Radarr) by PATH + 3. Updates library_id on matched items + 4. Syncs watch statistics from Emby users + 5. Does NOT create new MediaItems (that's Sonarr/Radarr's job) + + If no Sonarr/Radarr is configured, it will create items from Emby as fallback. + """ timeout = max(service.timeout or 30, 120) client = EmbyClient( url=service.url, @@ -288,19 +298,16 @@ async def _sync_emby( timeout=timeout ) - added = 0 updated = 0 users_synced = 0 - movies_added = 0 - series_added = 0 - episodes_added = 0 + library_assignments = 0 try: # === SYNC LIBRARIES FROM EMBY === logger.info("Syncing libraries from Emby...") emby_libraries = await client.get_libraries() - # Build library mapping: external_id -> db library + # Build library mapping library_map: Dict[str, Library] = {} for emby_lib in emby_libraries: @@ -351,197 +358,61 @@ async def _sync_emby( logger.info(f"Synced {len(library_map)} libraries") - # Build a list of libraries by path for path-based matching - library_paths: list[tuple[str, int, MediaType]] = [] # (path, library_id, media_type) + # Build library paths for matching (all paths from all Emby libraries) + library_paths: list[tuple[str, int, MediaType]] = [] for db_lib in library_map.values(): if db_lib.path: - library_paths.append((db_lib.path, db_lib.id, db_lib.media_type)) - # Sort by path length descending to match most specific path first + library_paths.append((db_lib.path.replace("\\", "/"), db_lib.id, db_lib.media_type)) library_paths.sort(key=lambda x: len(x[0]), reverse=True) - def find_library_for_path(item_path: str | None, expected_type: MediaType) -> int | None: + def find_library_for_path(item_path: str | None) -> int | None: """Find the library_id for an item based on its path.""" if not item_path: return None - # Normalize path separators item_path_normalized = item_path.replace("\\", "/") for lib_path, lib_id, lib_type in library_paths: - lib_path_normalized = lib_path.replace("\\", "/") - if item_path_normalized.startswith(lib_path_normalized): + if item_path_normalized.startswith(lib_path): return lib_id return None - # === SYNC MEDIA ITEMS FROM EMBY === - logger.info("Syncing media items from Emby...") - - # Fetch movies - emby_movies = await client.get_movies(fields=["Path", "Overview", "Genres", "Tags", "DateCreated", "PremiereDate", "CommunityRating", "RunTimeTicks", "ParentId"]) - - for movie in emby_movies: - movie_id = movie.get("Id") - if not movie_id: - continue - - # Find library by path - movie_path = movie.get("Path") - library_id = find_library_for_path(movie_path, MediaType.MOVIE) - - # Get or create movie - result = await db.execute( - select(MediaItem).where( - MediaItem.external_id == movie_id, - MediaItem.service_connection_id == service.id, - MediaItem.media_type == MediaType.MOVIE - ) + # === GET ALL EXISTING MEDIA ITEMS (from Sonarr/Radarr) === + # We match by path to update library_id and apply watch data + result = await db.execute( + select(MediaItem).options( + joinedload(MediaItem.service_connection), + joinedload(MediaItem.library) ) - item = result.scalar_one_or_none() - - runtime_ticks = movie.get("RunTimeTicks", 0) or 0 - - movie_data = { - "external_id": movie_id, - "service_connection_id": service.id, - "library_id": library_id, - "title": movie.get("Name", "Unknown"), - "media_type": MediaType.MOVIE, - "year": int(movie.get("ProductionYear")) if movie.get("ProductionYear") else None, - "path": movie.get("Path"), - "size_bytes": movie.get("Size", 0) or 0, - "genres": movie.get("Genres", []), - "tags": movie.get("Tags", []), - "rating": movie.get("CommunityRating"), - "added_at": datetime.fromisoformat(movie["DateCreated"].replace("Z", "+00:00")) if movie.get("DateCreated") else None - } - - if item: - for key, value in movie_data.items(): - setattr(item, key, value) - updated += 1 - else: - item = MediaItem(**movie_data) - db.add(item) - added += 1 - movies_added += 1 - - del emby_movies - logger.info(f"Synced movies: {movies_added} added") - - # Fetch series - emby_series = await client.get_series(fields=["Path", "Overview", "Genres", "Tags", "DateCreated", "PremiereDate", "CommunityRating", "ParentId"]) - - series_id_map: Dict[str, int] = {} # Emby series ID -> DB item ID - series_library_map: Dict[str, int] = {} # Emby series ID -> library_id + ) + all_items = result.scalars().all() - for series in emby_series: - series_id = series.get("Id") - if not series_id: - continue - - # Find library by path - series_path = series.get("Path") - library_id = find_library_for_path(series_path, MediaType.SERIES) - - result = await db.execute( - select(MediaItem).where( - MediaItem.external_id == series_id, - MediaItem.service_connection_id == service.id, - MediaItem.media_type == MediaType.SERIES - ) - ) - item = result.scalar_one_or_none() - - series_data = { - "external_id": series_id, - "service_connection_id": service.id, - "library_id": library_id, - "title": series.get("Name", "Unknown"), - "media_type": MediaType.SERIES, - "year": int(series.get("ProductionYear")) if series.get("ProductionYear") else None, - "path": series.get("Path"), - "genres": series.get("Genres", []), - "tags": series.get("Tags", []), - "rating": series.get("CommunityRating"), - "added_at": datetime.fromisoformat(series["DateCreated"].replace("Z", "+00:00")) if series.get("DateCreated") else None - } - - if item: - for key, value in series_data.items(): - setattr(item, key, value) - updated += 1 - else: - item = MediaItem(**series_data) - db.add(item) - await db.flush() - added += 1 - series_added += 1 - - series_id_map[series_id] = item.id - if library_id: - series_library_map[series_id] = library_id + # Build path -> item mapping (normalize paths) + path_to_item: Dict[str, MediaItem] = {} + for item in all_items: + if item.path: + normalized_path = item.path.replace("\\", "/") + path_to_item[normalized_path] = item - del emby_series - logger.info(f"Synced series: {series_added} added") + logger.info(f"Found {len(path_to_item)} existing media items with paths") - # Fetch episodes for each series - logger.info("Syncing episodes from Emby...") - for emby_series_id, db_series_id in series_id_map.items(): - try: - episodes = await client.get_episodes(emby_series_id, fields=["Path", "DateCreated", "RunTimeTicks", "ParentId", "IndexNumber", "ParentIndexNumber"]) - - # Get library_id from series map (more efficient than DB query) - library_id = series_library_map.get(emby_series_id) - - for episode in episodes: - ep_id = episode.get("Id") - if not ep_id: - continue - - result = await db.execute( - select(MediaItem).where( - MediaItem.external_id == ep_id, - MediaItem.service_connection_id == service.id, - MediaItem.media_type == MediaType.EPISODE - ) - ) - item = result.scalar_one_or_none() - - ep_data = { - "external_id": ep_id, - "service_connection_id": service.id, - "library_id": library_id, - "series_id": emby_series_id, - "title": episode.get("Name", "Unknown"), - "media_type": MediaType.EPISODE, - "season_number": episode.get("ParentIndexNumber"), - "episode_number": episode.get("IndexNumber"), - "path": episode.get("Path"), - "added_at": datetime.fromisoformat(episode["DateCreated"].replace("Z", "+00:00")) if episode.get("DateCreated") else None - } - - if item: - for key, value in ep_data.items(): - setattr(item, key, value) - updated += 1 - else: - item = MediaItem(**ep_data) - db.add(item) - added += 1 - episodes_added += 1 - - except Exception as e: - logger.warning(f"Failed to sync episodes for series {emby_series_id}: {e}") + # === UPDATE LIBRARY_ID ON EXISTING ITEMS === + for item in all_items: + if item.path and not item.library_id: + lib_id = find_library_for_path(item.path) + if lib_id: + item.library_id = lib_id + library_assignments += 1 - await db.flush() - logger.info(f"Synced episodes: {episodes_added} added") + logger.info(f"Assigned library_id to {library_assignments} items") # === SYNC USERS === users = await client.get_users() if not users: logger.warning("No users found in Emby") - return {"success": True, "message": "No users found", "added": added, "updated": updated} + await db.commit() + return {"success": True, "message": "No users found", "added": 0, "updated": updated} - logger.info(f"Found {len(users)} users in Emby, syncing users and aggregating watch data") + logger.info(f"Found {len(users)} users in Emby") user_id_map: Dict[str, int] = {} # Emby user ID -> DB user ID @@ -576,32 +447,16 @@ def find_library_for_path(item_path: str | None, expected_type: MediaType) -> in logger.info(f"Synced {users_synced} users to database") - # Get active sessions to mark currently watching + # Get active sessions to mark currently watching (by path) sessions = await client.get_sessions() - currently_watching_ids = set() + currently_watching_paths: set[str] = set() for session in sessions: now_playing = session.get("NowPlayingItem", {}) - if now_playing: - currently_watching_ids.add(now_playing.get("Id")) - if now_playing.get("SeriesId"): - currently_watching_ids.add(now_playing.get("SeriesId")) - if now_playing.get("SeasonId"): - currently_watching_ids.add(now_playing.get("SeasonId")) - - # Get all media items from our database (refresh after adding) - result = await db.execute( - select(MediaItem) - .options(joinedload(MediaItem.service_connection), joinedload(MediaItem.library)) - .where(MediaItem.service_connection_id == service.id) - ) - items = result.scalars().all() - - # Build path -> item mapping AND external_id -> item mapping for faster lookup - path_to_item = {item.path: item for item in items if item.path} - external_id_to_item = {item.external_id: item for item in items if item.external_id} + if now_playing and now_playing.get("Path"): + currently_watching_paths.add(now_playing.get("Path").replace("\\", "/")) - # Track max watch counts per path (to find max across all users) - max_watch_counts: dict[str, int] = {} + # Track watch data per path (normalized) + max_watch_counts: dict[str, int] = {} # path -> max watch count watched_paths: set[str] = set() favorited_paths: set[str] = set() max_progress: dict[str, float] = {} @@ -611,43 +466,26 @@ def find_library_for_path(item_path: str | None, expected_type: MediaType) -> in users_who_watched: dict[str, set[int]] = {} # path -> set of db_user_ids # Track per-user watch data for UserWatchHistory - user_watch_data: dict[tuple[int, str], dict] = {} # (db_user_id, item_key) -> watch_data - - # Track which items were updated (by external_id) - updated_items: set[str] = set() - - # Track max watch counts per item (by external_id) - max_watch_counts: dict[str, int] = {} - watched_items: set[str] = set() - favorited_items: set[str] = set() - max_progress: dict[str, float] = {} - last_watched: dict[str, datetime] = {} + user_watch_data: dict[tuple[int, str], dict] = {} # (db_user_id, path) -> watch_data - # Track unique users who watched each item (for "Most Popular by Users") - users_who_watched: dict[str, set[int]] = {} # item_id -> set of db_user_ids - - # Track per-user watch time for aggregation + # Track per-user statistics for aggregation user_watch_time: dict[int, int] = {} # db_user_id -> total seconds user_play_count: dict[int, int] = {} # db_user_id -> total plays user_last_activity: dict[int, datetime] = {} # db_user_id -> last played - # Helper function to find item by external_id or path - def find_item(emby_item: dict) -> MediaItem | None: - item_id = emby_item.get("Id") - if item_id and item_id in external_id_to_item: - return external_id_to_item[item_id] - path = emby_item.get("Path") - if path and path in path_to_item: - return path_to_item[path] - return None - - # Helper function to track max values across users + # Helper function to track watch data per path def track_watch_data(emby_item: dict, db_user_id: int): - item = find_item(emby_item) - if not item: + path = emby_item.get("Path") + if not path: + return + + # Normalize path + normalized_path = path.replace("\\", "/") + + # Only track if we have this item in our database + if normalized_path not in path_to_item: return - item_id = item.external_id user_data = emby_item.get("UserData", {}) play_count = user_data.get("PlayCount", 0) or 0 @@ -658,13 +496,13 @@ def track_watch_data(emby_item: dict, db_user_id: int): if is_played and play_count == 0: play_count = 1 - max_watch_counts[item_id] = max(max_watch_counts.get(item_id, 0), play_count) + max_watch_counts[normalized_path] = max(max_watch_counts.get(normalized_path, 0), play_count) if is_played: - watched_items.add(item_id) - if item_id not in users_who_watched: - users_who_watched[item_id] = set() - users_who_watched[item_id].add(db_user_id) + watched_paths.add(normalized_path) + if normalized_path not in users_who_watched: + users_who_watched[normalized_path] = set() + users_who_watched[normalized_path].add(db_user_id) # Track user play count (sum across all items) user_play_count[db_user_id] = user_play_count.get(db_user_id, 0) + play_count @@ -673,15 +511,15 @@ def track_watch_data(emby_item: dict, db_user_id: int): runtime_ticks = emby_item.get("RunTimeTicks", 0) or 0 runtime_seconds = int(runtime_ticks / 10_000_000) if runtime_ticks else 0 # Use played percentage if available, otherwise assume 100% watched per play - progress = user_data.get("PlayedPercentage", 100) or 100 - watch_time = int(runtime_seconds * (progress / 100) * max(play_count, 1)) + progress_pct = user_data.get("PlayedPercentage", 100) or 100 + watch_time = int(runtime_seconds * (progress_pct / 100) * max(play_count, 1)) user_watch_time[db_user_id] = user_watch_time.get(db_user_id, 0) + watch_time if is_favorite: - favorited_items.add(item_id) + favorited_paths.add(normalized_path) progress = user_data.get("PlayedPercentage", 0) or 0 - max_progress[item_id] = max(max_progress.get(item_id, 0), progress) + max_progress[normalized_path] = max(max_progress.get(normalized_path, 0), progress) last_played_date = None if user_data.get("LastPlayedDate"): @@ -689,8 +527,8 @@ def track_watch_data(emby_item: dict, db_user_id: int): last_played_date = datetime.fromisoformat( user_data["LastPlayedDate"].replace("Z", "+00:00") ) - if item_id not in last_watched or last_played_date > last_watched[item_id]: - last_watched[item_id] = last_played_date + if normalized_path not in last_watched or last_played_date > last_watched[normalized_path]: + last_watched[normalized_path] = last_played_date # Track user's last activity if db_user_id not in user_last_activity or last_played_date > user_last_activity[db_user_id]: user_last_activity[db_user_id] = last_played_date @@ -699,7 +537,8 @@ def track_watch_data(emby_item: dict, db_user_id: int): # Store per-user watch data for UserWatchHistory if is_played or play_count > 0 or is_favorite or progress > 0: - user_watch_data[(db_user_id, item_id)] = { + item = path_to_item[normalized_path] + user_watch_data[(db_user_id, normalized_path)] = { "play_count": play_count, "is_played": is_played, "is_favorite": is_favorite, @@ -707,8 +546,6 @@ def track_watch_data(emby_item: dict, db_user_id: int): "last_played_at": last_played_date, "media_item_id": item.id } - - updated_items.add(item_id) # Process users ONE BY ONE to save memory (no parallel fetching) logger.info(f"Fetching watch data from {len(users)} users sequentially...") @@ -760,21 +597,20 @@ def track_watch_data(emby_item: dict, db_user_id: int): logger.warning(f"Failed to get watch data for user {user_name}: {e}") continue - # Now apply the aggregated data to items + # Now apply the aggregated watch data to items (by path) watch_updated = 0 - for item_id in updated_items: - if item_id not in external_id_to_item: + for normalized_path, item in path_to_item.items(): + if normalized_path not in max_watch_counts and normalized_path not in watched_paths: continue - item = external_id_to_item[item_id] - item.watch_count = max_watch_counts.get(item_id, 0) - item.is_watched = item_id in watched_items - item.is_favorited = item_id in favorited_items - item.progress_percent = max_progress.get(item_id, 0) - item.is_currently_watching = item_id in currently_watching_ids + item.watch_count = max_watch_counts.get(normalized_path, 0) + item.is_watched = normalized_path in watched_paths + item.is_favorited = normalized_path in favorited_paths + item.progress_percent = max_progress.get(normalized_path, 0) + item.is_currently_watching = normalized_path in currently_watching_paths - if item_id in last_watched: - item.last_watched_at = last_watched[item_id] + if normalized_path in last_watched: + item.last_watched_at = last_watched[normalized_path] item.last_progress_update = datetime.utcnow() watch_updated += 1 @@ -799,7 +635,7 @@ def track_watch_data(emby_item: dict, db_user_id: int): logger.info(f"Saving {len(user_watch_data)} user watch history records...") history_saved = 0 - for (db_user_id, item_id), watch_data in user_watch_data.items(): + for (db_user_id, normalized_path), watch_data in user_watch_data.items(): media_item_id = watch_data.get("media_item_id") if not media_item_id: continue @@ -838,19 +674,17 @@ def track_watch_data(emby_item: dict, db_user_id: int): logger.info(f"Updated watch data for {watch_updated} items from Emby (max values from {len(users)} users), saved {history_saved} watch history records") # === SYNC ACTIVE SESSIONS === - await _sync_active_sessions(db, client, service, user_id_map, path_to_item, external_id_to_item) + await _sync_active_sessions(db, client, service, user_id_map, path_to_item) finally: await client.close() return { "success": True, - "message": f"Synced {added} items ({movies_added} movies, {series_added} series, {episodes_added} episodes), updated {updated} items, synced {users_synced} users", - "added": added, + "message": f"Updated watch data for {updated} items, assigned {library_assignments} libraries, synced {users_synced} users", + "added": 0, "updated": updated, - "movies_added": movies_added, - "series_added": series_added, - "episodes_added": episodes_added, + "library_assignments": library_assignments, "users_synced": users_synced } @@ -860,8 +694,7 @@ async def _sync_active_sessions( client: EmbyClient, service: ServiceConnection, user_id_map: Dict[str, int], - path_to_item: Dict[str, MediaItem], - external_id_to_item: Dict[str, MediaItem] + path_to_item: Dict[str, MediaItem] ): """Sync active and recent playback sessions from Emby.""" try: @@ -891,12 +724,12 @@ async def _sync_active_sessions( if not db_user_id: continue - # Get media item info - try external_id first, then path - item_id = now_playing.get("Id") + # Get media item by path (normalize path for matching) item_path = now_playing.get("Path") - media_item = external_id_to_item.get(item_id) if item_id else None - if not media_item and item_path: - media_item = path_to_item.get(item_path) + media_item = None + if item_path: + normalized_path = item_path.replace("\\", "/") + media_item = path_to_item.get(normalized_path) # Get or create title title = now_playing.get("Name", "Unknown") From b5e3476b75af0275aa42afba0b0ecffd76246be3 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Sun, 22 Feb 2026 19:17:14 +0100 Subject: [PATCH 010/122] feat: WebSocket real-time job system + Setup Wizard MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit WebSocket System: - Add ConnectionManager (backend/app/core/websocket.py) - Add /api/ws/jobs endpoint with ping/pong support - Add progress callbacks to Sonarr, Radarr, Emby sync - Integrate WebSocket broadcasts in all scheduler jobs - Add Zustand job store (frontend/src/stores/jobs.ts) - Add global WebSocket hook with auto-reconnect & toasts - Rewrite Jobs page with live progress bars & running panel - Add animated job count badge in Layout sidebar Setup Wizard: - Add backend API: /api/setup/status, test-connection, add-service, complete, skip - Add 5-step frontend wizard (Welcome → Arr → MediaServer → Sync → Complete) - Add SetupGate redirect in App.tsx for first-time setup - Enforce service order: Sonarr/Radarr before Emby/Jellyfin Docs: - Update DEVELOPMENT_STATUS.md with new features --- DEVELOPMENT_STATUS.md | 543 +++++++++++++--- backend/app/api/routes/setup.py | 254 ++++++++ backend/app/core/websocket.py | 102 +++ backend/app/main.py | 22 +- backend/app/scheduler.py | 122 +++- backend/app/services/sync.py | 88 ++- frontend/src/App.tsx | 56 +- frontend/src/components/Layout.tsx | 16 +- frontend/src/hooks/useJobWebSocket.ts | 215 +++++++ frontend/src/pages/Jobs.tsx | 611 +++++++++++++----- frontend/src/pages/SetupWizard.tsx | 872 ++++++++++++++++++++++++++ frontend/src/stores/jobs.ts | 113 ++++ 12 files changed, 2765 insertions(+), 249 deletions(-) create mode 100644 backend/app/api/routes/setup.py create mode 100644 backend/app/core/websocket.py create mode 100644 frontend/src/hooks/useJobWebSocket.ts create mode 100644 frontend/src/pages/SetupWizard.tsx create mode 100644 frontend/src/stores/jobs.ts diff --git a/DEVELOPMENT_STATUS.md b/DEVELOPMENT_STATUS.md index 5bb3f39..7c2f212 100644 --- a/DEVELOPMENT_STATUS.md +++ b/DEVELOPMENT_STATUS.md @@ -1,91 +1,494 @@ -# MediaMediaCurator - Development Status +# MediaCurator - Development Status & Handoff Document -**Last Updated**: Session vom 30.12.2024 +> **Zweck**: Dieses Dokument dient als fortlaufender Stand für die Weiterentwicklung. Es kann in jedem neuen Chat/auf jedem Rechner als Kontext übergeben werden, damit der Assistent sofort weiß, wo es weitergeht. + +**Letzte Aktualisierung**: 22. Februar 2026 (Session 2) **Branch**: `develop` -**Latest Commit**: `46e93f0` - "fix: timezone display, auto-restore job status, and staging settings UI" - -## Completed in This Session - -### 1. Feature Implementations -- **Rules Export/Import**: Backup and restore cleanup rules as JSON files -- **Bulk Operations for Rules**: Enable/disable/delete multiple rules at once -- **Collapsible Sidebar**: Toggle sidebar with localStorage persistence -- **Dark/Light Theme Toggle**: Three-state toggle (Light → Dark → System) with localStorage -- **Staging Settings UI**: Full configuration UI for soft-delete/staging system in Settings - -### 2. Bug Fixes -- **Fixed Docker Startup Crash**: Removed invalid `library_id` index from migrations -- **Fixed Light Mode Styling**: Applied `dark:` variants across all pages and components -- **Fixed Timezone Display**: ISO dates now properly parsed with UTC timezone -- **Fixed Auto-Restore Job Status**: Now shows "skipped" instead of "error" when staging disabled - -### 3. Technical Changes -- Added `@custom-variant dark (&:where(.dark, .dark *))` for manual theme toggle (Tailwind v4) -- Created `frontend/src/stores/theme.ts` with Zustand for theme state management -- Centralized `formatDateTime` utility in `frontend/src/lib/utils.ts` -- Updated all pages with proper light/dark mode classes - -## Recent Commits -``` -46e93f0 fix: timezone display, auto-restore job status, and staging settings UI -4cda2e4 fix: improve light/dark mode styling across all components -f07e70f feat: add dark/light theme toggle with system preference support -f167137 fix: remove invalid library_id index from migrations -fb567d6 feat: add collapsible sidebar with localStorage persistence -``` +**Letzter Commit**: `4a48042` - "refactor(sync): Emby now matches existing items by PATH instead of creating duplicates" +**Version**: `vdev.0.0.140` +**Repo**: `https://github.com/Serph91P/MediaCurator.git` -## How to Continue Development +--- -### 1. Clone/Pull the Latest Code -```bash -git clone https://github.com/Serph91P/MediaCleanup.git -# or if already cloned: -git pull origin develop +## Inhaltsverzeichnis + +1. [Projekt-Übersicht](#projekt-übersicht) +2. [Tech-Stack](#tech-stack) +3. [Architektur-Überblick](#architektur-überblick) +4. [Implementierungsstatus nach Phasen](#implementierungsstatus-nach-phasen) +5. [Bekannte Bugs (priorisiert)](#bekannte-bugs-priorisiert) +6. [UX/UI-Probleme](#uxui-probleme) +7. [Code-Qualitätsprobleme](#code-qualitätsprobleme) +8. [Was gut funktioniert](#was-gut-funktioniert) +9. [Nächste Schritte (priorisiert)](#nächste-schritte-priorisiert) +10. [Dateistruktur-Referenz](#dateistruktur-referenz) +11. [Entwicklungsumgebung starten](#entwicklungsumgebung-starten) +12. [Änderungshistorie dieses Dokuments](#änderungshistorie-dieses-dokuments) + +--- + +## Projekt-Übersicht + +MediaCurator ist ein Self-Hosted Media Management Tool das mit Emby/Jellyfin (Media Server) und Sonarr/Radarr (Download Manager) zusammenarbeitet. Es bietet: + +- **Automatische Cleanup-Regeln**: Ungesehene/alte/schlecht bewertete Medien erkennen und löschen +- **Staging-System**: Soft-Delete mit Grace Period – Dateien werden in eine Staging-Library verschoben, bevor sie permanent gelöscht werden +- **User-Tracking**: Watch-History pro User synced von Emby +- **Activity-Monitoring**: Aktive Sessions, Playback-History +- **Notifications**: Apprise-basiert (Discord, Slack, Webhook, etc.) +- **Dashboard**: Statistiken, Most Viewed/Popular, Library-Übersicht + +--- + +## Tech-Stack + +### Backend +| Komponente | Technologie | Version | +|-----------|------------|---------| +| Framework | FastAPI | aktuell | +| ORM | SQLAlchemy (async) | aktuell | +| DB | SQLite (default) / PostgreSQL | | +| Scheduler | APScheduler | | +| HTTP Client | httpx (async) | | +| Auth | JWT (access + refresh tokens) | | + +### Frontend +| Komponente | Technologie | Version | +|-----------|------------|---------| +| Framework | React | 19.2 | +| Language | TypeScript | 5.3 | +| Bundler | Vite | 7.3 | +| Styling | Tailwind CSS | 4.x (CSS-first @theme) | +| Server State | TanStack React Query | 5.x | +| Client State | Zustand (persist) | 5 | +| Forms | react-hook-form | 7.49 | +| Charts | recharts | 3.7 (**installiert, aber noch nicht benutzt**) | +| HTTP | Axios (mit Interceptor-Refresh) | 1.6 | +| Toasts | react-hot-toast | 2.4 | + +--- + +## Architektur-Überblick + +### Backend-Struktur +``` +backend/app/ +├── main.py # FastAPI App, CORS, Router-Mounting, WebSocket Endpoint +├── scheduler.py # APScheduler Jobs (Sync, Cleanup, Staging) + WebSocket-Broadcast +├── core/ +│ ├── config.py # Pydantic Settings +│ ├── database.py # SQLAlchemy async engine/session +│ ├── migrations.py # DB-Migrationen (Alembic-like, manuell) +│ ├── rate_limit.py # Rate Limiting +│ ├── security.py # JWT, Password Hashing +│ └── websocket.py # ★ NEU: ConnectionManager für Real-Time Job-Broadcasting +├── models/database.py # ALLE SQLAlchemy Models (656 Zeilen) +├── schemas/__init__.py # ALLE Pydantic Schemas +├── services/ +│ ├── base.py # BaseServiceClient (abstrakt, httpx) +│ ├── emby.py # EmbyClient + EmbyService (Caching, Sync) +│ ├── radarr.py # RadarrClient (API v3) +│ ├── sonarr.py # SonarrClient (API v3) +│ ├── sync.py # Haupt-Sync-Logik (Emby→DB, Sonarr→DB, Radarr→DB) +│ ├── cleanup_engine.py # Rule-Evaluation + Execution +│ ├── staging.py # Soft-Delete-System +│ ├── notifications.py # Multi-Channel mit Templates +│ ├── audit.py # Audit-Logging +│ └── version.py # Git/GitHub Version-Check +└── api/routes/ + ├── activity.py # GET /activity/, /stats, /active + ├── audit.py # GET/DELETE /audit/logs, /recent, etc. + ├── auth.py # POST /auth/login, /register, /refresh, etc. + ├── jobs.py # GET/POST /jobs/, trigger, interval + ├── libraries.py # GET /libraries/, /stats, /{id}/details, /media, /activity + ├── media.py # GET /media/stats, /dashboard-stats, /watch-stats, /audit-log + ├── notifications.py # CRUD /notifications/, test, preview-template + ├── rules.py # CRUD /rules/, templates, export/import, bulk + ├── services.py # CRUD /services/, test, sync + ├── staging.py # GET/POST /staging/, restore, delete, settings + ├── setup.py # ★ NEU: GET /setup/status, POST /test-connection, /add-service, /complete, /skip + ├── system.py # GET /system/health, /stats, /settings, cleanup/preview + └── users.py # GET /users/, /{id}, /{id}/activity, PATCH hide ``` -### 2. Start Development Environment -```bash -docker compose -f docker-compose.dev.yml up --build +### Frontend-Struktur +``` +frontend/src/ +├── App.tsx # Routes (15 protected + Login/Register/Setup) + SetupGate +├── main.tsx # React Entry, QueryClient, Toaster +├── index.css # Tailwind @theme (dark-* + primary-* Palette) +├── components/ +│ ├── Layout.tsx # Sidebar, Header, Version-Check, Theme-Toggle, ★ Job-Badge +│ ├── ConfirmDialog.tsx # Modal (danger/warning/info) +│ ├── ResponsiveTable.tsx # Table → Cards auf Mobile +│ └── Skeleton.tsx # Loading-Skeletons +├── hooks/ +│ ├── useDebounce.ts # Generischer Debounce-Hook +│ └── useJobWebSocket.ts # ★ NEU: Globaler WebSocket-Hook (Auto-Reconnect, Toasts) +├── lib/ +│ ├── api.ts # Axios mit Token-Refresh-Interceptor +│ └── utils.ts # formatBytes, formatRelativeTime, formatDate, etc. +├── stores/ +│ ├── auth.ts # Zustand: Login/Logout/Sessions +│ ├── jobs.ts # ★ NEU: Zustand: WebSocket Job-State (runningJobs, recentCompletions) +│ └── theme.ts # Zustand: light/dark/system +├── types/index.ts # TypeScript Interfaces +└── pages/ + ├── Dashboard.tsx # Stats, Most Viewed/Popular, Libraries, Disk + ├── Libraries.tsx # Library-Grid mit Stats, Sync + ├── LibraryDetail.tsx # 3 Tabs: Overview, Media, Activity ⚠️ BUGGY + ├── Users.tsx # User-Tabelle mit Stats + ├── UserDetail.tsx # 2 Tabs: Overview, Activity + ├── Activity.tsx # Active Sessions + Activity-Log + ├── History.tsx # Cleanup-Audit-Log + ├── Jobs.tsx # ★ REWRITE: Live-Progress-Bars, WebSocket-Status, Running-Panel + ├── SetupWizard.tsx # ★ NEU: Geführter 5-Step Setup-Wizard + ├── Rules.tsx # CRUD + Templates + Export/Import + ├── Services.tsx # Service-Connections CRUD + ├── Settings.tsx # System-Settings, Password, Cache + ├── Staging.tsx # Staged Items + Library-Settings + ├── Notifications.tsx # Channels CRUD + Template-Editor + ├── Preview.tsx # Cleanup Dry-Run Preview + ├── Login.tsx # Auth + └── Register.tsx # Erstbenutzer-Setup ``` -### 3. Access the App -- Frontend: http://localhost:5173 -- Backend API: http://localhost:8080/docs +### Datenbank-Models (Wichtigste) +| Model | Tabelle | Beschreibung | +|-------|---------|-------------| +| User | users | App-Benutzer (Admin-Login) | +| RefreshToken | refresh_tokens | JWT Refresh Tokens mit Device-Info | +| ServiceConnection | service_connections | Sonarr/Radarr/Emby/Jellyfin Verbindungen | +| Library | libraries | Synchronisierte Bibliotheken mit Staging-Config | +| MediaItem | media_items | Alle Medien (Filme, Serien, Episoden) mit Watch-Status | +| CleanupRule | cleanup_rules | Regelwerk mit JSON-Conditions | +| MediaServerUser | media_server_users | Emby/Jellyfin-Benutzer | +| UserWatchHistory | user_watch_history | Watch-History pro User pro Item | +| PlaybackActivity | playback_activities | Playback-Sessions mit Client/Device/Transcode | +| CleanupLog | cleanup_logs | Cleanup-Audit-Trail | +| NotificationChannel | notification_channels | Notification-Konfigurationen | +| JobExecutionLog | job_execution_logs | Scheduler-Job-History | +| ImportStats | import_stats | Sync-Statistiken | +| AuditLog | audit_logs | Admin-Action-Audit | +| SystemSettings | system_settings | Key-Value Settings | + +--- + +## Implementierungsstatus nach Phasen -## Known Issues / Potential Improvements +> Referenz: `PLANNED_FEATURES.md` -### 1. Timezone Display (May Need Verification) -The frontend now appends `Z` to ISO strings without timezone indicators. This should fix the 1-hour offset. **Verify after next deployment.** +### Phase 1 – Foundation ✅ ERLEDIGT +- [x] MediaServerUser model +- [x] UserWatchHistory model +- [x] PlaybackActivity model +- [x] Basic user stats on Dashboard +- [x] Library stats API -### 2. Auto-Restore Job -When staging is disabled, the job now shows "skipped" status instead of "error". The logic is: -- `staging.py`: Returns `{"success": False, "error": "Auto-restore is not enabled"}` -- `scheduler.py`: Checks if error message contains "not enabled" → marks as "skipped" +### Phase 2 – Views & Navigation ⚠️ TEILWEISE ERLEDIGT +| Feature | Backend | Frontend | Qualität | Anmerkungen | +|---------|---------|----------|----------|-------------| +| Library Detail – Overview Tab | ✅ API liefert 24h/7d/30d Stats | ✅ Dargestellt | ⚠️ | Kein Genre-Chart, keine Poster-Bilder | +| Library Detail – Media Tab | ✅ Sortierung, Suche, Pagination | ✅ Tabelle | ⚠️ | Kein Grid-View, nur Tabelle. Sortierung funktioniert | +| Library Detail – Activity Tab | ✅ Pagination | ✅ Tabelle | ⚠️ | Kein IP, Device, Expand-Row | +| Users Page | ✅ Pagination, Search | ✅ ResponsiveTable | ✅ | Gut implementiert | +| User Detail – Overview | ✅ Time-based Stats | ✅ | ⚠️ | Kein Favorite Genres | +| User Detail – Activity | ✅ Filters | ✅ Tabelle | ⚠️ | Kein Library-Filter, kein Expand-Row | +| User Detail – Timeline Tab | ❌ Kein API | ❌ | ❌ | Nicht implementiert | +| Global Activity Log | ✅ Stats + Active Sessions | ✅ | ⚠️ | Kein IP, kein Device, kein Library-Filter, kein Items-per-Page | +| Activity Stats API | ✅ plays by day/hour/weekday | ❌ Charts fehlen | ⚠️ | Backend liefert Daten, Frontend zeigt keine Charts | +| Active Sessions | ✅ 30s Sync | ✅ 30s Refresh | ✅ | Gut implementiert | -### 3. Theme Toggle -The theme system uses Tailwind v4's `@custom-variant` feature for manual toggle. The toggle cycles: Light → Dark → System. +### Phase 3 – Statistics & Charts ❌ NICHT BEGONNEN +| Feature | Backend-API | Frontend | Anmerkungen | +|---------|------------|----------|-------------| +| Daily Play Count Chart (Stacked Area) | ✅ `/activity/stats` liefert `plays_by_day` | ❌ | recharts installiert, nicht benutzt | +| Play Count by Day of Week (Bar) | ✅ `/activity/stats` liefert `plays_by_day_of_week` | ❌ | Daten vorhanden | +| Play Count by Hour (Bar) | ✅ `/activity/stats` liefert `plays_by_hour` | ❌ | Daten vorhanden | +| Genre Distribution (Radar/Spider) | ❌ Kein API | ❌ | Backend müsste Genre-Aggregation liefern | -## Key Files Modified +### ★ NEU: WebSocket Real-Time System ✅ ERLEDIGT (Session 2) +| Feature | Backend | Frontend | Qualität | Anmerkungen | +|---------|---------|----------|----------|-------------| +| ConnectionManager | ✅ `core/websocket.py` | — | ✅ | Broadcast, job_started/progress/completed Events | +| WebSocket Endpoint | ✅ `/api/ws/jobs` in main.py | — | ✅ | Ping/Pong Support | +| Progress Callbacks | ✅ sync.py (alle 3 Services) | — | ✅ | Sonarr: je 5 Serien, Radarr: je 10 Filme, Emby: 6 Phasen | +| Scheduler Integration | ✅ Alle 5 Job-Funktionen | — | ✅ | started/progress/completed Events | +| Global WebSocket Hook | — | ✅ `useJobWebSocket.ts` | ✅ | Auto-Reconnect (exp. Backoff), 30s Ping | +| Toast Notifications | — | ✅ In Layout.tsx (global) | ✅ | Started/Completed/Failed Toasts auf jeder Seite | +| Zustand Job Store | — | ✅ `stores/jobs.ts` | ✅ | runningJobs Map, wsStatus, recentCompletions | +| Jobs Page Rewrite | — | ✅ `Jobs.tsx` (~650 Zeilen) | ✅ | Live Progress-Bars, Running-Panel, ElapsedTime, WS-Indikator | +| Layout Job Badge | — | ✅ `Layout.tsx` | ✅ | Animierter blauer Badge mit laufenden Jobs Count | -| File | Changes | -|------|---------| -| `backend/app/scheduler.py` | Added "skipped" status for disabled staging | -| `frontend/src/lib/utils.ts` | Added centralized `formatDateTime` with timezone fix | -| `frontend/src/stores/theme.ts` | NEW: Theme store with Zustand | -| `frontend/src/pages/Settings.tsx` | Added Staging System configuration UI | -| `frontend/src/pages/Jobs.tsx` | Added "skipped" badge, imported formatDateTime | -| `frontend/src/components/Layout.tsx` | Theme toggle button, fixed sidebar collapse | +### ★ NEU: Setup Wizard ✅ ERLEDIGT (Session 2) +| Feature | Backend | Frontend | Qualität | Anmerkungen | +|---------|---------|----------|----------|-------------| +| Setup Status API | ✅ `GET /setup/status` | — | ✅ | Prüft Services + SystemSettings, kein Auth nötig | +| Test Connection | ✅ `POST /setup/test-connection` | — | ✅ | Temporäre Verbindung testen ohne Speichern | +| Add Service | ✅ `POST /setup/add-service` | — | ✅ | Im Wizard-Kontext | +| Complete/Skip | ✅ `POST /setup/complete`, `/skip` | — | ✅ | Validierung: min. 1 Arr + 1 Media Server | +| Wizard UI | — | ✅ `SetupWizard.tsx` (~550 Zeilen) | ✅ | 5-Step: Welcome→Arr→MediaServer→Sync→Complete | +| Setup Gate | — | ✅ `App.tsx` SetupGate | ✅ | Redirect nach /setup wenn nicht abgeschlossen | +| Service Order | — | ✅ | ✅ | Erzwingt Sonarr/Radarr vor Emby/Jellyfin | +| Initial Sync | — | ✅ | ✅ | Sequentieller Sync aller Services mit Status-Anzeige | +| Skip Option | — | ✅ | ✅ | Setup überspringen möglich | -## Next Steps (Optional) +### Phase 4 – Advanced Analytics ❌ NICHT BEGONNEN +- [ ] User Activity Timeline / Calendar Heatmap +- [ ] Watch Patterns Heatmap (7x24 Grid) +- [ ] Concurrent Streams Analysis +- [ ] Watch Duration Stats +- [ ] Completion Rate Analytics +- [ ] Binge-Watch Detection +- [ ] Shared vs. Solo Content Analysis -1. **Test the Staging UI**: Verify the staging settings form saves correctly -2. **Test Timezone Fix**: Confirm dates display in correct local time -3. **Test Theme Toggle**: Verify Light/Dark/System modes work correctly -4. **Continue with Roadmap**: Check README.md for remaining roadmap items +### Phase 5 – Smart Cleanup Integration ❌ NICHT BEGONNEN +- [ ] Cleanup Rules per User ("Delete only if NO user watched in X days") +- [ ] User-Specific Exclusions ("Never delete if User X has favorite") +- [ ] Enhanced Currently Watching Detection +- [ ] Analytics-based Cleanup Suggestions --- -**Note**: The repository has been renamed from `MediaCurator` to `MediaCleanup`. Update your remotes if needed: +## Bekannte Bugs (priorisiert) + +### 🔴 CRITICAL + +#### BUG-001: LibraryDetail.tsx – Doppeltes `/api/` Prefix +- **Datei**: `frontend/src/pages/LibraryDetail.tsx` +- **Problem**: Die Seite benutzt Pfade wie `/api/libraries/${id}/details`, aber die Axios-BaseURL ist bereits `/api`. Das erzeugt Requests an `/api/api/libraries/...`. +- **Auswirkung**: **Seite ist wahrscheinlich komplett kaputt** (404 auf alle Requests) +- **Fix**: Alle Fetch-Calls auf relative Pfade ohne `/api/` Prefix umstellen, oder besser: auf React Query + `api.get()` migrieren (wie alle anderen Seiten) +- **Status**: OFFEN + +### 🟡 HIGH + +#### BUG-002: Light-Mode kaputt auf Login/Register +- **Dateien**: `frontend/src/pages/Login.tsx`, `frontend/src/pages/Register.tsx` +- **Problem**: Hart-kodierte Dark-Klassen (`bg-dark-800`, `text-white`, `text-dark-200`) ohne `dark:` Prefix-Varianten +- **Auswirkung**: Im Light-Mode erscheint ein dunkler Kasten auf hellem Hintergrund. Text teilweise unsichtbar. +- **Fix**: Alle Farbklassen auf `bg-white dark:bg-dark-800`, `text-gray-900 dark:text-white`, etc. umstellen +- **Status**: OFFEN + +#### BUG-003: ConfirmDialog – Light-Mode kaputt +- **Datei**: `frontend/src/components/ConfirmDialog.tsx` +- **Problem**: Hart-kodiert `bg-dark-800`. Cancel-Button im Light-Mode unsichtbar. +- **Fix**: `dark:` Varianten hinzufügen +- **Status**: OFFEN + +#### BUG-004: Skeleton – Light-Mode kaputt +- **Datei**: `frontend/src/components/Skeleton.tsx` +- **Problem**: Hart-kodiert `bg-dark-700`, `bg-dark-800`. Ladeanimationen werden zu schwarzen Rechtecken. +- **Fix**: `bg-gray-200 dark:bg-dark-700` etc. +- **Status**: OFFEN + +### 🟢 MEDIUM + +#### BUG-005: Toast-Notifications – Light-Mode +- **Datei**: `frontend/src/main.tsx` +- **Problem**: Toaster-Farben hart-kodiert auf dunkel (`#1e293b`) +- **Fix**: CSS-Variablen oder Theme-aware Konfiguration +- **Status**: OFFEN + +#### BUG-006: Sprachmix Deutsch/Englisch +- **Dateien**: + - `frontend/src/components/Layout.tsx`: Update-Banner mit `"Update verfügbar!"`, `"Changelog ansehen"`, `"Aktuelle Version"` + - `frontend/src/lib/utils.ts`: `formatDate`/`formatDateTime` benutzen `'de-DE'` Locale +- **Fix**: Alles auf Englisch (oder ein i18n-System), Locale konfigurierbar machen +- **Status**: OFFEN + +#### BUG-007: LibraryDetail.tsx – Kein React Query +- **Datei**: `frontend/src/pages/LibraryDetail.tsx` +- **Problem**: Einzige Seite die manuell `useState` + `useEffect` benutzt statt React Query. Kein Caching, kein Retry, inkonsistent. +- **Fix**: Auf `useQuery`/`useMutation` migrieren wie alle anderen Seiten +- **Status**: OFFEN + +#### BUG-008: Auth-Store – fetchUser nie aufgerufen +- **Datei**: `frontend/src/stores/auth.ts` +- **Problem**: `isAuthenticated` initialisiert sich aus `!!getToken()`, aber `user` kommt nur aus persistiertem Store. `fetchUser` wird beim App-Start nie aufgerufen → evtl. veraltete User-Daten. +- **Fix**: `fetchUser()` bei App-Init aufrufen (z.B. in `App.tsx` oder `ProtectedRoute`) +- **Status**: OFFEN + +### 🔵 LOW + +#### BUG-009: Code-Duplizierung – formatBytes +- **Dateien**: `LibraryDetail.tsx`, `Preview.tsx` haben eigene `formatBytes` statt `utils.ts` zu importieren +- **Fix**: Löschen und aus `lib/utils.ts` importieren +- **Status**: OFFEN + +#### BUG-010: Users.tsx – Eigener Debounce statt Hook +- **Datei**: `frontend/src/pages/Users.tsx` +- **Problem**: Manueller `setTimeout`-Debounce statt `useDebounce` Hook +- **Fix**: `useDebounce` aus `hooks/useDebounce.ts` verwenden +- **Status**: OFFEN + +--- + +## UX/UI-Probleme + +### Mobile-Responsiveness – Tabellen laufen über +| Seite | Nutzt ResponsiveTable? | Status | +|-------|----------------------|--------| +| History.tsx | ✅ Ja | ✅ Gut | +| Users.tsx | ✅ Ja | ✅ Gut | +| LibraryDetail.tsx | ❌ Raw `` | ❌ Überläuft auf Mobile | +| UserDetail.tsx | ❌ Raw `
` | ❌ Überläuft auf Mobile | +| Activity.tsx | ❌ Raw `
` | ❌ Überläuft auf Mobile | +| Preview.tsx | ❌ Raw `
` | ❌ Überläuft auf Mobile | +| Staging.tsx | ❌ Raw `
` | ❌ Überläuft auf Mobile | +| Jobs.tsx | ❌ Teilweise Cards | ⚠️ Executions-Tabelle überläuft | + +### Fehlende UI-Features (geplant aber nicht implementiert) +- **Activity-Seite**: IP-Adresse Spalte, Device-Spalte, Expand-Row (Bitrate, Resolution, Codecs), Library-Filter, Items-per-Page Selector +- **LibraryDetail**: Genre-Distribution Charts, Grid-View mit Poster-Bildern, Expand-Row +- **UserDetail**: Favorite Genres Sektion, Timeline-Tab +- **Rules.tsx**: Modal ist sehr lang – kein Wizard/Accordion, keine Genre/Tag-Autocomplete +- **Settings.tsx**: Cron-Eingaben ohne Hilfe/Validierung +- **Dashboard**: Keine Charts (recharts installiert aber unbenutzt) + +### Performance +- **Kein Code-Splitting**: Alle Seiten eagerly importiert in `App.tsx`. `React.lazy` + `Suspense` fehlt. +- **Jobs.tsx**: 5-Sekunden-Refetch auf 2 Queries = konstanter Netzwerk-Traffic +- **Staging.tsx**: Beide Queries auf 30s Refetch – könnte reduziert werden + +### Accessibility +- Kein `aria-label` auf Mobile-Hamburger-Button in Layout +- ConfirmDialog hat keinen Focus-Trap / `aria-modal` +- Farbkontrast im Light-Mode gebrochen (siehe Bugs oben) + +--- + +## Code-Qualitätsprobleme + +### Frontend-Inkonsistenzen +| Problem | Betroffene Dateien | Beschreibung | +|---------|--------------------|-------------| +| Fetch-Pattern | LibraryDetail.tsx | Einzige Seite ohne React Query | +| Utility-Duplizierung | LibraryDetail.tsx, Preview.tsx | Eigene formatBytes/formatDuration statt utils.ts | +| Debounce-Pattern | Users.tsx | Manuell statt useDebounce Hook | +| API-Prefix | LibraryDetail.tsx | Doppeltes /api/ | +| Theme-Klassen | Login, Register, ConfirmDialog, Skeleton | Fehlende dark: Varianten | + +### Backend – Keine bekannten kritischen Issues +Das Backend ist gut strukturiert mit: +- Sauberer Abstraktion (BaseServiceClient → Emby/Radarr/Sonarr) +- Vollständige API-Coverage für alle Features +- Rate Limiting auf allen Routes +- Audit-Logging +- Proper Error Handling +- Caching in EmbyClient (SimpleCache mit TTL) + +--- + +## Was gut funktioniert + +### Backend ✅ +- **Sync-Engine**: Emby→DB Sync (Users, Watch History, Active Sessions, Libraries) robust implementiert +- **Cleanup-Engine**: Rule-Evaluation mit detailliertem Preview/Dry-Run, Grace Periods, Import Exclusions +- **Staging-System**: Soft-Delete mit Emby Staging-Library, Auto-Restore bei Watch, Per-Library Settings +- **Notifications**: Apprise-basiert mit Template-Engine (Mustache-like), Retry-Logik, Event-Type-Filtering +- **API-Design**: RESTful, gut paginiert, konsistente Error Responses + +### Frontend ✅ +- **Dashboard**: Gute Übersicht, Time-Range-Selector, Most Viewed/Popular/Active Users +- **Rules + Preview**: Templates, Export/Import, Bulk-Actions, Dry-Run mit detailliertem Reasoning +- **Staging UI**: Durchdachtes 2-Tab-Layout, Urgency-Farbcodierung, Global + Per-Library Settings +- **Notifications**: Multi-URL-Input, Template-Editor mit Variable-Suggestions, Live-Preview +- **Services & Jobs**: Übersichtlich, Test-Verbindung, manueller Sync-Trigger, editierbare Intervalle +- **Auth-Flow**: Setup-Required Check, Refresh-Token mit Queue für Concurrent Requests +- **ResponsiveTable-Komponente**: Gut gebaut (Table→Cards auf Mobile), wird nur zu selten benutzt + +--- + +## Nächste Schritte (priorisiert) + +### Priorität 1: Kritische Bugs fixen +1. **BUG-001**: LibraryDetail.tsx `/api/`-Prefix fixen + auf React Query migrieren + Utils importieren +2. **BUG-002/003/004**: Light-Mode fixen (Login, Register, ConfirmDialog, Skeleton) + +### Priorität 2: UX-Verbesserungen +3. **BUG-006**: Sprachmix Deutsch→Englisch bereinigen +4. **Mobile Tables**: ResponsiveTable in Activity, UserDetail, Preview, Staging, LibraryDetail einsetzen +5. **BUG-005**: Toast-Theming fixen + +### Priorität 3: Phase 3 – Charts implementieren +6. **Daily Play Count Chart**: Stacked Area Chart mit recharts, Daten kommen von `/activity/stats` → `plays_by_day` +7. **Plays by Day of Week**: Bar Chart, Daten: `plays_by_day_of_week` +8. **Plays by Hour**: Bar Chart, Daten: `plays_by_hour` +9. **Genre Distribution**: Backend-API für Genre-Aggregation nötig, dann Radar/Spider Chart + +### Priorität 4: Fehlende Phase 2 Features +10. **Activity-Seite erweitern**: IP, Device Spalten, Library-Filter, Items-per-Page, Expand-Row +11. **UserDetail erweitern**: Favorite Genres, Library-Filter auf Activity, Expand-Row +12. **LibraryDetail erweitern**: Genre-Charts, Grid-View, Expand-Row + +### Priorität 5: Code-Qualität +13. Code-Splitting mit React.lazy +14. BUG-008: fetchUser bei App-Init +15. BUG-009/010: Code-Duplizierung/Debounce aufräumen + +### Priorität 6: Phase 4+ (Zukunft) +16. Advanced Analytics (Heatmaps, Completion Rates, Binge Detection) +17. Smart Cleanup Rules (Per-User Conditions) + +--- + +## Dateistruktur-Referenz + +### Wichtige Konfigurationsdateien +| Datei | Beschreibung | +|-------|-------------| +| `docker-compose.dev.yml` | Dev-Umgebung (Hot-Reload) | +| `docker-compose.yml` | Production mit SQLite | +| `docker-compose.postgres.yml` | Production mit PostgreSQL | +| `backend/pyproject.toml` | Python Dependencies | +| `frontend/package.json` | Node Dependencies | +| `frontend/vite.config.ts` | Vite Config (Proxy → Backend) | +| `frontend/src/index.css` | Tailwind @theme Definition | + +### API-Endpunkte (Kurzreferenz) +| Prefix | Router | Beschreibung | +|--------|--------|-------------| +| `/api/auth` | auth.py | Login, Register, Sessions | +| `/api/services` | services.py | Service CRUD, Test, Sync | +| `/api/libraries` | libraries.py | Libraries, Stats, Detail, Media | +| `/api/media` | media.py | Dashboard-Stats, Watch-Stats | +| `/api/rules` | rules.py | Rules CRUD, Templates, Export | +| `/api/activity` | activity.py | Activity Log, Stats, Active Sessions | +| `/api/users` | users.py | Media Server Users | +| `/api/notifications` | notifications.py | Channels CRUD, Templates | +| `/api/staging` | staging.py | Staging System | +| `/api/jobs` | jobs.py | Scheduler Jobs | +| `/api/setup` | setup.py | ★ NEU: Setup Wizard Status, Test, Add, Complete, Skip | +| `/api/system` | system.py | Health, Settings, Cleanup, Version | +| `/api/ws/jobs` | main.py | ★ NEU: WebSocket für Real-Time Job-Progress | +| `/api/audit` | audit.py | Audit Logs (Admin) | + +--- + +## Entwicklungsumgebung starten + ```bash -git remote set-url origin https://github.com/Serph91P/MediaCleanup.git +# Repository klonen +git clone https://github.com/Serph91P/MediaCleanup.git +cd MediaCleanup +git checkout develop + +# Dev-Umgebung starten (Hot-Reload für Frontend + Backend) +docker compose -f docker-compose.dev.yml up --build + +# Zugriff: +# Frontend: http://localhost:5173 +# Backend API Docs: http://localhost:8080/docs +# Erster Login erstellt Admin-Account ``` + +--- + +## Änderungshistorie dieses Dokuments + +| Datum | Änderung | +|-------|----------| +| 22.02.2026 | Vollständige Neuaufsetzung: Kompletter Code-Review (Backend + Frontend), Bug-Katalog mit 10 Einträgen, UX-Analyse, Priorisierte Roadmap, Architektur-Dokumentation | +| 22.02.2026 (2) | **Session 2**: WebSocket Real-Time System (ConnectionManager, Progress-Callbacks für Sonarr/Radarr/Emby, Scheduler-Integration), Global Toast-Notifications via WebSocket, Jobs-Page komplett neu geschrieben (Live-Progress-Bars, Running-Panel, WS-Status-Indikator), Layout.tsx Job-Badge, Setup-Wizard (Backend: /setup/status, /test-connection, /add-service, /complete, /skip; Frontend: 5-Step geführter Wizard mit Welcome→Arr→MediaServer→Sync→Complete), App.tsx SetupGate Redirect-Logik | +| 30.12.2024 | Initiale Version: Session-Zusammenfassung (Rules Export, Sidebar, Theme Toggle, Staging UI) | diff --git a/backend/app/api/routes/setup.py b/backend/app/api/routes/setup.py new file mode 100644 index 0000000..47c38e3 --- /dev/null +++ b/backend/app/api/routes/setup.py @@ -0,0 +1,254 @@ +""" +Setup Wizard API routes. + +Provides endpoints for the first-time setup wizard that guides users +through adding services in the correct order (Sonarr/Radarr first, then Emby). +""" +from fastapi import APIRouter, Depends, HTTPException, status, Request +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func +from typing import List, Optional +from pydantic import BaseModel + +from ...core.database import get_db +from ...core.rate_limit import limiter, RateLimits +from ...models import ServiceConnection, ServiceType, SystemSettings, User +from ...schemas import ServiceConnectionCreate, ServiceConnectionResponse, ServiceConnectionTest +from ...services import SonarrClient, RadarrClient, EmbyClient +from ..deps import get_current_user + +router = APIRouter(prefix="/setup", tags=["Setup Wizard"]) + + +# ==================== Response Schemas ==================== + +class ServiceInfo(BaseModel): + id: int + name: str + service_type: str + is_enabled: bool + + class Config: + from_attributes = True + + +class SetupStatusResponse(BaseModel): + setup_complete: bool + has_users: bool + has_arr_service: bool + has_media_server: bool + services: List[ServiceInfo] + current_step: str # "welcome", "arr_services", "media_server", "sync", "complete" + + +class SetupTestRequest(BaseModel): + service_type: str + url: str + api_key: str + verify_ssl: bool = True + timeout: int = 120 + + +class SetupCompleteResponse(BaseModel): + success: bool + message: str + + +# ==================== Endpoints ==================== + +@router.get("/status", response_model=SetupStatusResponse) +@limiter.limit(RateLimits.API_READ) +async def get_setup_status( + request: Request, + db: AsyncSession = Depends(get_db) +): + """ + Check setup wizard status. No auth required so the frontend + can decide whether to redirect to the wizard before login. + """ + # Check if users exist + user_count = await db.scalar(select(func.count(User.id))) + has_users = (user_count or 0) > 0 + + # Check if setup was explicitly completed + result = await db.execute( + select(SystemSettings).where(SystemSettings.key == "setup_complete") + ) + setup_setting = result.scalar_one_or_none() + setup_marked_complete = setup_setting is not None and setup_setting.value is True + + # Check existing services + svc_result = await db.execute(select(ServiceConnection)) + services = svc_result.scalars().all() + + has_arr = any( + s.service_type in (ServiceType.SONARR, ServiceType.RADARR) + for s in services + ) + has_media_server = any( + s.service_type in (ServiceType.EMBY, ServiceType.JELLYFIN) + for s in services + ) + + # Determine current step + if setup_marked_complete: + current_step = "complete" + elif not has_users: + current_step = "welcome" + elif not has_arr: + current_step = "arr_services" + elif not has_media_server: + current_step = "media_server" + else: + current_step = "sync" + + service_infos = [ + ServiceInfo( + id=s.id, + name=s.name, + service_type=s.service_type.value, + is_enabled=s.is_enabled + ) + for s in services + ] + + return SetupStatusResponse( + setup_complete=setup_marked_complete, + has_users=has_users, + has_arr_service=has_arr, + has_media_server=has_media_server, + services=service_infos, + current_step=current_step, + ) + + +@router.post("/test-connection", response_model=ServiceConnectionTest) +@limiter.limit(RateLimits.TEST_OPERATION) +async def test_connection( + request: Request, + data: SetupTestRequest, + current_user=Depends(get_current_user), +): + """Test a service connection during setup without saving it.""" + temp = ServiceConnection( + name="setup_test", + service_type=ServiceType(data.service_type), + url=data.url.rstrip("/"), + api_key=data.api_key, + verify_ssl=data.verify_ssl, + timeout=data.timeout, + ) + + if temp.service_type == ServiceType.SONARR: + client = SonarrClient(url=temp.url, api_key=temp.api_key, + verify_ssl=temp.verify_ssl, timeout=temp.timeout) + elif temp.service_type == ServiceType.RADARR: + client = RadarrClient(url=temp.url, api_key=temp.api_key, + verify_ssl=temp.verify_ssl, timeout=temp.timeout) + elif temp.service_type in (ServiceType.EMBY, ServiceType.JELLYFIN): + client = EmbyClient(url=temp.url, api_key=temp.api_key, + verify_ssl=temp.verify_ssl, timeout=temp.timeout) + else: + raise HTTPException(status_code=400, detail=f"Unknown service type: {data.service_type}") + + try: + result = await client.test_connection() + return ServiceConnectionTest(**result) + finally: + await client.close() + + +@router.post("/add-service", response_model=ServiceConnectionResponse, status_code=status.HTTP_201_CREATED) +@limiter.limit(RateLimits.API_WRITE) +async def add_service( + request: Request, + service_data: ServiceConnectionCreate, + db: AsyncSession = Depends(get_db), + current_user=Depends(get_current_user), +): + """Add a service during setup wizard (same as POST /services/ but scoped to wizard).""" + service = ServiceConnection(**service_data.model_dump()) + db.add(service) + await db.commit() + await db.refresh(service) + return service + + +@router.post("/complete", response_model=SetupCompleteResponse) +@limiter.limit(RateLimits.API_WRITE) +async def complete_setup( + request: Request, + db: AsyncSession = Depends(get_db), + current_user=Depends(get_current_user), +): + """Mark the setup wizard as completed.""" + # Verify minimum requirements: at least one arr service and one media server + svc_result = await db.execute(select(ServiceConnection)) + services = svc_result.scalars().all() + + has_arr = any( + s.service_type in (ServiceType.SONARR, ServiceType.RADARR) + for s in services + ) + has_media_server = any( + s.service_type in (ServiceType.EMBY, ServiceType.JELLYFIN) + for s in services + ) + + if not has_arr: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="At least one Sonarr or Radarr service must be configured.", + ) + if not has_media_server: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="At least one Emby or Jellyfin media server must be configured.", + ) + + # Mark setup complete + result = await db.execute( + select(SystemSettings).where(SystemSettings.key == "setup_complete") + ) + existing = result.scalar_one_or_none() + if existing: + existing.value = True + else: + db.add(SystemSettings( + key="setup_complete", + value=True, + description="Whether the initial setup wizard has been completed" + )) + + await db.commit() + + return SetupCompleteResponse( + success=True, + message="Setup wizard completed successfully. You can now configure cleanup rules." + ) + + +@router.post("/skip") +@limiter.limit(RateLimits.API_WRITE) +async def skip_setup( + request: Request, + db: AsyncSession = Depends(get_db), + current_user=Depends(get_current_user), +): + """Skip the setup wizard (mark as complete without validation).""" + result = await db.execute( + select(SystemSettings).where(SystemSettings.key == "setup_complete") + ) + existing = result.scalar_one_or_none() + if existing: + existing.value = True + else: + db.add(SystemSettings( + key="setup_complete", + value=True, + description="Whether the initial setup wizard has been completed" + )) + + await db.commit() + + return {"success": True, "message": "Setup wizard skipped."} diff --git a/backend/app/core/websocket.py b/backend/app/core/websocket.py new file mode 100644 index 0000000..a6e8dc1 --- /dev/null +++ b/backend/app/core/websocket.py @@ -0,0 +1,102 @@ +""" +WebSocket connection manager for real-time job status updates. +""" +import asyncio +import json +from typing import Dict, Set, Any, Optional +from datetime import datetime, timezone +from fastapi import WebSocket +from loguru import logger + + +class ConnectionManager: + """Manages WebSocket connections and broadcasts job status updates.""" + + def __init__(self): + self.active_connections: Set[WebSocket] = set() + self._lock = asyncio.Lock() + + async def connect(self, websocket: WebSocket): + """Accept and register a new WebSocket connection.""" + await websocket.accept() + async with self._lock: + self.active_connections.add(websocket) + logger.debug(f"WebSocket connected. Active connections: {len(self.active_connections)}") + + async def disconnect(self, websocket: WebSocket): + """Remove a WebSocket connection.""" + async with self._lock: + self.active_connections.discard(websocket) + logger.debug(f"WebSocket disconnected. Active connections: {len(self.active_connections)}") + + async def broadcast(self, message: Dict[str, Any]): + """Send a message to all connected clients.""" + if not self.active_connections: + return + + data = json.dumps(message, default=str) + disconnected = set() + + async with self._lock: + for connection in self.active_connections: + try: + await connection.send_text(data) + except Exception: + disconnected.add(connection) + + # Clean up dead connections + self.active_connections -= disconnected + + async def send_job_started(self, job_id: str, job_name: str, details: Optional[Dict] = None): + """Broadcast that a job has started.""" + await self.broadcast({ + "type": "job_started", + "job_id": job_id, + "job_name": job_name, + "timestamp": datetime.now(timezone.utc).isoformat(), + "details": details or {} + }) + + async def send_job_progress( + self, + job_id: str, + job_name: str, + step: str, + progress_percent: Optional[float] = None, + current: Optional[int] = None, + total: Optional[int] = None, + details: Optional[Dict] = None + ): + """Broadcast job progress update.""" + await self.broadcast({ + "type": "job_progress", + "job_id": job_id, + "job_name": job_name, + "step": step, + "progress_percent": progress_percent, + "current": current, + "total": total, + "timestamp": datetime.now(timezone.utc).isoformat(), + "details": details or {} + }) + + async def send_job_completed(self, job_id: str, job_name: str, status: str, duration: Optional[float] = None, details: Optional[Dict] = None, error: Optional[str] = None): + """Broadcast that a job has completed.""" + await self.broadcast({ + "type": "job_completed", + "job_id": job_id, + "job_name": job_name, + "status": status, + "duration": duration, + "timestamp": datetime.now(timezone.utc).isoformat(), + "details": details or {}, + "error": error + }) + + @property + def has_connections(self) -> bool: + return len(self.active_connections) > 0 + + +# Global connection manager instance +ws_manager = ConnectionManager() diff --git a/backend/app/main.py b/backend/app/main.py index c1cbed8..b9c2919 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -2,7 +2,7 @@ Main FastAPI application. """ from pathlib import Path -from fastapi import FastAPI, Request +from fastapi import FastAPI, Request, WebSocket, WebSocketDisconnect from fastapi.middleware.cors import CORSMiddleware from fastapi.staticfiles import StaticFiles from fastapi.responses import FileResponse @@ -13,7 +13,8 @@ from .core.config import get_settings from .core.database import init_db, close_db from .core.rate_limit import setup_rate_limiting, limiter, RateLimits -from .api.routes import auth, services, rules, libraries, notifications, system, jobs, media, staging, audit, activity, users +from .core.websocket import ws_manager +from .api.routes import auth, services, rules, libraries, notifications, system, jobs, media, staging, audit, activity, users, setup settings = get_settings() @@ -97,6 +98,7 @@ async def lifespan(app: FastAPI): app.include_router(staging.router, prefix="/api/staging", tags=["staging"]) app.include_router(activity.router, prefix="/api/activity", tags=["activity"]) app.include_router(users.router, prefix="/api/users", tags=["users"]) +app.include_router(setup.router, prefix="/api") @app.get("/api/health") @@ -144,6 +146,22 @@ async def health_detailed(request: Request): return health_status +@app.websocket("/api/ws/jobs") +async def websocket_jobs(websocket: WebSocket): + """WebSocket endpoint for real-time job status updates.""" + await ws_manager.connect(websocket) + try: + while True: + # Keep connection alive, listen for pings + data = await websocket.receive_text() + if data == "ping": + await websocket.send_text('{"type":"pong"}') + except WebSocketDisconnect: + await ws_manager.disconnect(websocket) + except Exception: + await ws_manager.disconnect(websocket) + + # Mount static files for frontend assets (JS, CSS, etc.) if STATIC_DIR.exists(): app.mount("/assets", StaticFiles(directory=STATIC_DIR / "assets"), name="assets") diff --git a/backend/app/scheduler.py b/backend/app/scheduler.py index 84002f8..974e470 100644 --- a/backend/app/scheduler.py +++ b/backend/app/scheduler.py @@ -5,9 +5,11 @@ from apscheduler.triggers.interval import IntervalTrigger from loguru import logger from sqlalchemy import select +from typing import Optional, Dict, Any from .core.config import get_settings from .core.database import async_session_maker +from .core.websocket import ws_manager from .models import ServiceConnection, CleanupRule, SystemSettings, JobExecutionLog from .services.cleanup_engine import CleanupEngine from .services.sync import sync_service_media @@ -18,6 +20,27 @@ scheduler = AsyncIOScheduler() +def _make_progress_callback(job_id: str, job_name: str): + """Create a progress callback that broadcasts via WebSocket.""" + async def callback( + step: str, + progress_pct: Optional[float] = None, + current: Optional[int] = None, + total: Optional[int] = None, + details: Optional[Dict] = None + ): + await ws_manager.send_job_progress( + job_id=job_id, + job_name=job_name, + step=step, + progress_percent=progress_pct, + current=current, + total=total, + details=details + ) + return callback + + async def run_service_sync_job(service_id: int): """Sync media from a specific service.""" start_time = datetime.now(timezone.utc) @@ -55,7 +78,13 @@ async def run_service_sync_job(service_id: int): await db.commit() try: - sync_result = await sync_service_media(db, service) + # Create progress callback for WebSocket + progress_cb = _make_progress_callback(job_id, job_name) + + # Notify WebSocket clients that job started + await ws_manager.send_job_started(job_id, job_name) + + sync_result = await sync_service_media(db, service, progress_callback=progress_cb) # Update execution log end_time = datetime.now(timezone.utc) @@ -72,6 +101,13 @@ async def run_service_sync_job(service_id: int): await db.commit() logger.info(f"Sync completed for {service.name}: {sync_result}") + # Notify WebSocket clients that job completed + await ws_manager.send_job_completed( + job_id, job_name, "success", + duration=execution_log.duration_seconds, + details=sync_result + ) + except Exception as e: error_msg = f"Sync failed for {service.name}: {e}" logger.error(error_msg) @@ -82,6 +118,13 @@ async def run_service_sync_job(service_id: int): execution_log.duration_seconds = (end_time - start_time).total_seconds() execution_log.error_message = str(e) await db.commit() + + # Notify WebSocket clients that job failed + await ws_manager.send_job_completed( + job_id, job_name, "error", + duration=execution_log.duration_seconds, + error=str(e) + ) except Exception as e: error_msg = f"Sync job failed for service {service_id}: {e}" @@ -123,9 +166,24 @@ async def run_sync_job(): synced_count = 0 errors = [] - for service in services: + # Notify WebSocket clients that job started + await ws_manager.send_job_started("sync_job", "Media Sync") + + for svc_idx, service in enumerate(services): try: - sync_result = await sync_service_media(db, service) + svc_job_id = f"sync_job_sub_{service.id}" + svc_job_name = f"Media Sync: {service.name}" + progress_cb = _make_progress_callback(svc_job_id, svc_job_name) + + await ws_manager.send_job_progress( + "sync_job", "Media Sync", + step=f"Syncing service {svc_idx+1}/{len(services)}: {service.name}", + progress_percent=(svc_idx / max(len(services), 1)) * 100, + current=svc_idx, + total=len(services) + ) + + sync_result = await sync_service_media(db, service, progress_callback=progress_cb) synced_count += sync_result.get('synced', 0) logger.info(f"Sync completed for {service.name}: {sync_result}") except Exception as e: @@ -147,6 +205,14 @@ async def run_sync_job(): execution_log.error_message = "; ".join(errors) await db.commit() + + # Notify WebSocket clients that job completed + await ws_manager.send_job_completed( + "sync_job", "Media Sync", + status="success" if not errors else "error", + duration=execution_log.duration_seconds, + details={"services_synced": len(services), "errors": errors} + ) except Exception as e: error_msg = f"Sync job failed: {e}" @@ -160,6 +226,12 @@ async def run_sync_job(): execution_log.duration_seconds = (end_time - start_time).total_seconds() execution_log.error_message = str(e) await db.commit() + + await ws_manager.send_job_completed( + "sync_job", "Media Sync", "error", + duration=execution_log.duration_seconds, + error=str(e) + ) async def run_cleanup_job(): @@ -181,6 +253,8 @@ async def run_cleanup_job(): db.add(execution_log) await db.commit() + await ws_manager.send_job_started("cleanup_job", "Cleanup Check") + engine = CleanupEngine(db) # First, evaluate rules and flag items @@ -244,6 +318,12 @@ async def run_cleanup_job(): } await db.commit() + await ws_manager.send_job_completed( + "cleanup_job", "Cleanup Check", "success", + duration=execution_log.duration_seconds, + details={"rules_evaluated": len(rules), "items_flagged": total_flagged} + ) + except Exception as e: error_msg = f"Cleanup job failed: {e}" logger.error(error_msg) @@ -256,6 +336,12 @@ async def run_cleanup_job(): execution_log.duration_seconds = (end_time - start_time).total_seconds() execution_log.error_message = str(e) await db.commit() + + await ws_manager.send_job_completed( + "cleanup_job", "Cleanup Check", "error", + duration=execution_log.duration_seconds, + error=str(e) + ) async def run_staging_cleanup_job(): @@ -277,6 +363,8 @@ async def run_staging_cleanup_job(): db.add(execution_log) await db.commit() + await ws_manager.send_job_started("staging_cleanup_job", "Staging Cleanup") + from .services.staging import StagingService from .services.emby import EmbyService @@ -300,6 +388,13 @@ async def run_staging_cleanup_job(): logger.info(f"Staging cleanup completed: {result}") + await ws_manager.send_job_completed( + "staging_cleanup_job", "Staging Cleanup", + status="success" if result.get('success') else "error", + duration=execution_log.duration_seconds, + details=result + ) + except Exception as e: error_msg = f"Staging cleanup job failed: {e}" logger.error(error_msg) @@ -312,6 +407,12 @@ async def run_staging_cleanup_job(): execution_log.duration_seconds = (end_time - start_time).total_seconds() execution_log.error_message = str(e) await db.commit() + + await ws_manager.send_job_completed( + "staging_cleanup_job", "Staging Cleanup", "error", + duration=execution_log.duration_seconds if execution_log else 0, + error=str(e) + ) async def run_auto_restore_job(): @@ -333,6 +434,8 @@ async def run_auto_restore_job(): db.add(execution_log) await db.commit() + await ws_manager.send_job_started("auto_restore_job", "Auto-Restore Watched") + from .services.staging import StagingService from .services.emby import EmbyService @@ -359,6 +462,13 @@ async def run_auto_restore_job(): logger.info(f"Auto-restore completed: {result}") + await ws_manager.send_job_completed( + "auto_restore_job", "Auto-Restore Watched", + status=execution_log.status, + duration=execution_log.duration_seconds, + details=result + ) + except Exception as e: error_msg = f"Auto-restore job failed: {e}" logger.error(error_msg) @@ -371,6 +481,12 @@ async def run_auto_restore_job(): execution_log.duration_seconds = (end_time - start_time).total_seconds() execution_log.error_message = str(e) await db.commit() + + await ws_manager.send_job_completed( + "auto_restore_job", "Auto-Restore Watched", "error", + duration=execution_log.duration_seconds if execution_log else 0, + error=str(e) + ) def start_scheduler(): diff --git a/backend/app/services/sync.py b/backend/app/services/sync.py index 8e603b8..da5ecfa 100644 --- a/backend/app/services/sync.py +++ b/backend/app/services/sync.py @@ -2,7 +2,7 @@ Sync service for fetching media items from services. """ import asyncio -from typing import Dict, Any +from typing import Dict, Any, Optional, Callable, Awaitable from datetime import datetime, timezone from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select @@ -14,10 +14,14 @@ from .radarr import RadarrClient from .emby import EmbyClient +# Type for progress callback +ProgressCallback = Optional[Callable[[str, Optional[float], Optional[int], Optional[int], Optional[Dict]], Awaitable[None]]] + async def sync_service_media( db: AsyncSession, - service: ServiceConnection + service: ServiceConnection, + progress_callback: ProgressCallback = None ) -> Dict[str, Any]: """Sync media items from a service connection.""" logger.info(f"Starting sync for service: {service.name}") @@ -29,11 +33,11 @@ async def sync_service_media( try: if service.service_type == ServiceType.SONARR: - result = await _sync_sonarr(db, service) + result = await _sync_sonarr(db, service, progress_callback) elif service.service_type == ServiceType.RADARR: - result = await _sync_radarr(db, service) + result = await _sync_radarr(db, service, progress_callback) elif service.service_type in [ServiceType.EMBY, ServiceType.JELLYFIN]: - result = await _sync_emby(db, service) + result = await _sync_emby(db, service, progress_callback) else: return { "success": False, @@ -92,7 +96,8 @@ async def sync_service_media( async def _sync_sonarr( db: AsyncSession, - service: ServiceConnection + service: ServiceConnection, + progress_callback: ProgressCallback = None ) -> Dict[str, Any]: """Sync series and episodes from Sonarr.""" client = SonarrClient( @@ -109,8 +114,18 @@ async def _sync_sonarr( try: series_list = await client.get_series() + total_series = len(series_list) - for series in series_list: + if progress_callback: + await progress_callback("Fetching series list", 5, 0, total_series, {"total_series": total_series}) + + for idx, series in enumerate(series_list): + if progress_callback and idx % 5 == 0: + pct = 5 + (idx / max(total_series, 1)) * 85 + await progress_callback( + f"Syncing series {idx+1}/{total_series}: {series.get('title', 'Unknown')}", + pct, idx, total_series, None + ) # Get or create series item result = await db.execute( select(MediaItem) @@ -195,6 +210,9 @@ async def _sync_sonarr( await db.commit() + if progress_callback: + await progress_callback("Sonarr sync complete", 100, total_series, total_series, {"added": added, "updated": updated}) + finally: await client.close() @@ -208,7 +226,8 @@ async def _sync_sonarr( async def _sync_radarr( db: AsyncSession, - service: ServiceConnection + service: ServiceConnection, + progress_callback: ProgressCallback = None ) -> Dict[str, Any]: """Sync movies from Radarr.""" client = RadarrClient( @@ -223,8 +242,19 @@ async def _sync_radarr( try: movies = await client.get_movies() + total_movies = len(movies) + + if progress_callback: + await progress_callback("Fetching movie list", 5, 0, total_movies, {"total_movies": total_movies}) - for movie in movies: + for idx, movie in enumerate(movies): + if progress_callback and idx % 10 == 0: + pct = 5 + (idx / max(total_movies, 1)) * 90 + await progress_callback( + f"Syncing movie {idx+1}/{total_movies}: {movie.get('title', 'Unknown')}", + pct, idx, total_movies, None + ) + if not movie.get("hasFile"): continue @@ -263,6 +293,9 @@ async def _sync_radarr( await db.commit() + if progress_callback: + await progress_callback("Radarr sync complete", 100, total_movies, total_movies, {"added": added, "updated": updated}) + finally: await client.close() @@ -276,7 +309,8 @@ async def _sync_radarr( async def _sync_emby( db: AsyncSession, - service: ServiceConnection + service: ServiceConnection, + progress_callback: ProgressCallback = None ) -> Dict[str, Any]: """ Sync watch data from Emby/Jellyfin to existing MediaItems. @@ -305,6 +339,8 @@ async def _sync_emby( try: # === SYNC LIBRARIES FROM EMBY === logger.info("Syncing libraries from Emby...") + if progress_callback: + await progress_callback("Syncing Emby libraries", 2, None, None, None) emby_libraries = await client.get_libraries() # Build library mapping @@ -358,6 +394,9 @@ async def _sync_emby( logger.info(f"Synced {len(library_map)} libraries") + if progress_callback: + await progress_callback(f"Synced {len(library_map)} libraries", 10, len(library_map), len(library_map), None) + # Build library paths for matching (all paths from all Emby libraries) library_paths: list[tuple[str, int, MediaType]] = [] for db_lib in library_map.values(): @@ -394,6 +433,9 @@ def find_library_for_path(item_path: str | None) -> int | None: logger.info(f"Found {len(path_to_item)} existing media items with paths") + if progress_callback: + await progress_callback("Matching media paths to libraries", 15, None, None, {"items_with_paths": len(path_to_item)}) + # === UPDATE LIBRARY_ID ON EXISTING ITEMS === for item in all_items: if item.path and not item.library_id: @@ -404,6 +446,9 @@ def find_library_for_path(item_path: str | None) -> int | None: logger.info(f"Assigned library_id to {library_assignments} items") + if progress_callback: + await progress_callback(f"Assigned {library_assignments} library mappings", 20, library_assignments, len(all_items), None) + # === SYNC USERS === users = await client.get_users() @@ -447,6 +492,9 @@ def find_library_for_path(item_path: str | None) -> int | None: logger.info(f"Synced {users_synced} users to database") + if progress_callback: + await progress_callback(f"Synced {users_synced} users", 25, users_synced, len(users), None) + # Get active sessions to mark currently watching (by path) sessions = await client.get_sessions() currently_watching_paths: set[str] = set() @@ -561,6 +609,13 @@ def track_watch_data(emby_item: dict, db_user_id: int): try: logger.debug(f"Processing user {i}/{len(users)}: {user_name}") + if progress_callback: + pct = 25 + (i / max(len(users), 1)) * 50 + await progress_callback( + f"Fetching watch data for user {i}/{len(users)}: {user_name}", + pct, i, len(users), {"user": user_name} + ) + # Fetch and process movies immediately (don't hold in memory) emby_movies = await client.get_items_with_watch_data( user_id=emby_user_id, @@ -618,6 +673,9 @@ def track_watch_data(emby_item: dict, db_user_id: int): updated += watch_updated logger.info(f"Updated watch data for {watch_updated} items") + if progress_callback: + await progress_callback("Saving watch history", 85, watch_updated, len(path_to_item), {"watch_updated": watch_updated}) + # === UPDATE USER STATISTICS === logger.info("Updating user statistics...") for db_user_id in user_id_map.values(): @@ -673,9 +731,19 @@ def track_watch_data(emby_item: dict, db_user_id: int): await db.commit() logger.info(f"Updated watch data for {watch_updated} items from Emby (max values from {len(users)} users), saved {history_saved} watch history records") + if progress_callback: + await progress_callback("Syncing active sessions", 92, None, None, {"history_saved": history_saved}) + # === SYNC ACTIVE SESSIONS === await _sync_active_sessions(db, client, service, user_id_map, path_to_item) + if progress_callback: + await progress_callback("Emby sync complete", 100, None, None, { + "updated": updated, + "library_assignments": library_assignments, + "users_synced": users_synced + }) + finally: await client.close() diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index d93731a..7478de8 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,9 +1,12 @@ import { Routes, Route, Navigate, useLocation } from 'react-router-dom' +import { useQuery } from '@tanstack/react-query' import { useAuthStore } from './stores/auth' import { getToken } from './lib/api' +import api from './lib/api' import Layout from './components/Layout' import Login from './pages/Login' import Register from './pages/Register' +import SetupWizard from './pages/SetupWizard' import Dashboard from './pages/Dashboard' import Services from './pages/Services' import Rules from './pages/Rules' @@ -19,6 +22,14 @@ import Users from './pages/Users' import UserDetail from './pages/UserDetail' import Activity from './pages/Activity' +interface SetupStatus { + setup_complete: boolean + has_users: boolean + has_arr_service: boolean + has_media_server: boolean + current_step: string +} + function ProtectedRoute({ children }: { children: React.ReactNode }) { const { isAuthenticated } = useAuthStore() const token = getToken() @@ -32,6 +43,39 @@ function ProtectedRoute({ children }: { children: React.ReactNode }) { return <>{children} } +/** + * Redirect to /setup if authenticated but setup is not complete. + */ +function SetupGate({ children }: { children: React.ReactNode }) { + const { data: setupStatus, isLoading } = useQuery({ + queryKey: ['setup-status'], + queryFn: async () => { + const res = await api.get('/setup/status') + return res.data + }, + staleTime: 30_000, + retry: 1, + }) + + if (isLoading) { + return ( +
+
+
+

Loading...

+
+
+ ) + } + + // If setup is not complete, redirect to wizard + if (setupStatus && !setupStatus.setup_complete) { + return + } + + return <>{children} +} + function App() { const { isAuthenticated } = useAuthStore() const token = getToken() @@ -47,12 +91,22 @@ function App() { path="/register" element={hasAuth ? : } /> + + + + } + /> - + + + } > diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 5fc4357..beb30e0 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -2,6 +2,8 @@ import { NavLink, Outlet } from 'react-router-dom' import { useQuery } from '@tanstack/react-query' import { useAuthStore } from '../stores/auth' import { useThemeStore } from '../stores/theme' +import { useJobsStore } from '../stores/jobs' +import { useJobWebSocket } from '../hooks/useJobWebSocket' import api from '../lib/api' import { useState, useEffect } from 'react' import { @@ -45,6 +47,8 @@ const navigation = [ export default function Layout() { const { user, logout } = useAuthStore() const { theme, setTheme } = useThemeStore() + const runningCount = useJobsStore((s) => s.runningCount) + useJobWebSocket() // Global WebSocket connection for all pages const [sidebarOpen, setSidebarOpen] = useState(false) const [sidebarCollapsed, setSidebarCollapsed] = useState(() => { const stored = localStorage.getItem('sidebarCollapsed') @@ -157,7 +161,17 @@ export default function Layout() { }` } > - +
+ + {item.name === 'Jobs' && runningCount > 0 && ( + + + + {runningCount} + + + )} +
{(!sidebarCollapsed || sidebarOpen) && {item.name}} ))} diff --git a/frontend/src/hooks/useJobWebSocket.ts b/frontend/src/hooks/useJobWebSocket.ts new file mode 100644 index 0000000..25894f6 --- /dev/null +++ b/frontend/src/hooks/useJobWebSocket.ts @@ -0,0 +1,215 @@ +import { useEffect, useRef, useCallback } from 'react' +import toast from 'react-hot-toast' +import { useJobsStore } from '../stores/jobs' +import { getToken } from '../lib/api' + +/** + * Global WebSocket hook for real-time job status updates. + * Should be used once in Layout.tsx to maintain a single connection. + * Shows toast notifications on all pages for job lifecycle events. + */ +export function useJobWebSocket() { + const wsRef = useRef(null) + const reconnectTimeoutRef = useRef | undefined>(undefined) + const reconnectAttempts = useRef(0) + const maxReconnectAttempts = 20 + const mountedRef = useRef(true) + + const setWsStatus = useJobsStore((s) => s.setWsStatus) + const jobStarted = useJobsStore((s) => s.jobStarted) + const jobProgressFn = useJobsStore((s) => s.jobProgress) + const jobCompleted = useJobsStore((s) => s.jobCompleted) + + const getWsUrl = useCallback(() => { + const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:' + const host = window.location.host + // Build WebSocket URL to match the API base + const apiBase = (import.meta as any).env?.VITE_API_URL || '/api' + if (apiBase.startsWith('http')) { + // Absolute URL: convert http(s) to ws(s) + return apiBase.replace(/^http/, 'ws') + '/ws/jobs' + } + // Relative URL + return `${protocol}//${host}${apiBase}/ws/jobs` + }, []) + + const formatDuration = (seconds: number | null) => { + if (!seconds) return '' + if (seconds < 60) return ` (${seconds.toFixed(1)}s)` + if (seconds < 3600) return ` (${(seconds / 60).toFixed(1)}m)` + return ` (${(seconds / 3600).toFixed(1)}h)` + } + + const handleMessage = useCallback( + (event: MessageEvent) => { + try { + const data = JSON.parse(event.data) + + switch (data.type) { + case 'job_started': + jobStarted(data.job_id, data.job_name, data.timestamp) + toast(data.job_name + ' started', { + icon: '🔄', + duration: 3000, + }) + break + + case 'job_progress': + jobProgressFn( + data.job_id, + data.job_name, + data.step, + data.progress_percent, + data.current, + data.total, + data.timestamp, + data.details || {} + ) + break + + case 'job_completed': + jobCompleted({ + job_id: data.job_id, + job_name: data.job_name, + status: data.status, + duration: data.duration, + error: data.error, + timestamp: data.timestamp, + }) + + if (data.status === 'success') { + toast.success( + data.job_name + ' completed' + formatDuration(data.duration), + { duration: 5000 } + ) + } else if (data.status === 'error') { + toast.error( + data.job_name + ' failed' + (data.error ? `: ${data.error}` : ''), + { duration: 8000 } + ) + } else if (data.status === 'skipped') { + toast(data.job_name + ' skipped', { + icon: '⏭️', + duration: 3000, + }) + } + break + + case 'pong': + // Heartbeat response, ignore + break + + default: + // Future: handle other message types (notifications, etc.) + console.debug('Unknown WS message type:', data.type) + } + } catch { + console.warn('Failed to parse WebSocket message:', event.data) + } + }, + [jobStarted, jobProgressFn, jobCompleted] + ) + + const connect = useCallback(() => { + if (!mountedRef.current) return + if (wsRef.current?.readyState === WebSocket.OPEN) return + + const token = getToken() + if (!token) { + // Not authenticated, don't connect + return + } + + setWsStatus('connecting') + const url = getWsUrl() + + try { + const ws = new WebSocket(url) + wsRef.current = ws + + ws.onopen = () => { + if (!mountedRef.current) { + ws.close() + return + } + setWsStatus('connected') + reconnectAttempts.current = 0 + } + + ws.onmessage = handleMessage + + ws.onclose = (event) => { + setWsStatus('disconnected') + wsRef.current = null + + if (!mountedRef.current) return + if (event.code === 1000) return // Normal close + + // Exponential backoff reconnect + const delay = Math.min(1000 * Math.pow(2, reconnectAttempts.current), 30000) + if (reconnectAttempts.current < maxReconnectAttempts) { + reconnectAttempts.current++ + reconnectTimeoutRef.current = setTimeout(connect, delay) + } + } + + ws.onerror = () => { + // onclose will fire after this + } + } catch { + setWsStatus('disconnected') + } + }, [getWsUrl, handleMessage, setWsStatus]) + + // Ping to keep connection alive + useEffect(() => { + const interval = setInterval(() => { + if (wsRef.current?.readyState === WebSocket.OPEN) { + wsRef.current.send(JSON.stringify({ type: 'ping' })) + } + }, 30000) + + return () => clearInterval(interval) + }, []) + + // Connect on mount, disconnect on unmount + useEffect(() => { + mountedRef.current = true + connect() + + return () => { + mountedRef.current = false + if (reconnectTimeoutRef.current) { + clearTimeout(reconnectTimeoutRef.current) + } + if (wsRef.current) { + wsRef.current.close(1000) + wsRef.current = null + } + } + }, [connect]) + + // Reconnect when token changes (login/logout) + useEffect(() => { + const handleStorage = (e: StorageEvent) => { + if (e.key === 'auth_token') { + if (e.newValue) { + // Token appeared -> connect + connect() + } else { + // Token removed -> disconnect + if (wsRef.current) { + wsRef.current.close(1000) + wsRef.current = null + } + } + } + } + window.addEventListener('storage', handleStorage) + return () => window.removeEventListener('storage', handleStorage) + }, [connect]) + + return { + reconnect: connect, + } +} diff --git a/frontend/src/pages/Jobs.tsx b/frontend/src/pages/Jobs.tsx index 6f1ab6d..7d04a0b 100644 --- a/frontend/src/pages/Jobs.tsx +++ b/frontend/src/pages/Jobs.tsx @@ -1,9 +1,21 @@ -import { useState } from 'react' +import { useState, useEffect } from 'react' import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' -import { PlayIcon, ClockIcon, CheckCircleIcon, XCircleIcon, ArrowPathIcon, MinusCircleIcon, PencilIcon, ServerIcon } from '@heroicons/react/24/outline' +import { + PlayIcon, + ClockIcon, + CheckCircleIcon, + XCircleIcon, + ArrowPathIcon, + MinusCircleIcon, + PencilIcon, + ServerIcon, + SignalIcon, + SignalSlashIcon, +} from '@heroicons/react/24/outline' import api from '../lib/api' import toast from 'react-hot-toast' import { formatDateTime } from '../lib/utils' +import { useJobsStore, type JobProgress } from '../stores/jobs' interface Job { id: string @@ -37,6 +49,59 @@ interface ServiceConnection { is_enabled: boolean } +function ProgressBar({ progress }: { progress: JobProgress }) { + const pct = progress.progress_percent ?? 0 + + return ( +
+
+ + {progress.step} + + + {pct > 0 ? `${Math.round(pct)}%` : ''} + {progress.current != null && progress.total != null && ( + + {progress.current}/{progress.total} + + )} + +
+
+
+
+
+ ) +} + +function ElapsedTime({ since }: { since: string }) { + const [, setTick] = useState(0) + + useEffect(() => { + const interval = setInterval(() => setTick((t) => t + 1), 1000) + return () => clearInterval(interval) + }, []) + + const start = new Date(since).getTime() + const elapsed = Math.max(0, Math.floor((Date.now() - start) / 1000)) + + if (elapsed < 60) return {elapsed}s + if (elapsed < 3600) + return ( + + {Math.floor(elapsed / 60)}m {elapsed % 60}s + + ) + return ( + + {Math.floor(elapsed / 3600)}h {Math.floor((elapsed % 3600) / 60)}m + + ) +} + export default function Jobs() { const queryClient = useQueryClient() const [selectedJobId, setSelectedJobId] = useState(null) @@ -44,13 +109,26 @@ export default function Jobs() { const [editIntervalValue, setEditIntervalValue] = useState(60) const [editIntervalUnit, setEditIntervalUnit] = useState<'minutes' | 'hours'>('minutes') + // Live WebSocket state + const runningJobs = useJobsStore((s) => s.runningJobs) + const wsStatus = useJobsStore((s) => s.wsStatus) + const recentCompletions = useJobsStore((s) => s.recentCompletions) + + // Invalidate queries when a job completes via WebSocket + useEffect(() => { + if (recentCompletions.length > 0) { + queryClient.invalidateQueries({ queryKey: ['jobs'] }) + queryClient.invalidateQueries({ queryKey: ['job-executions'] }) + } + }, [recentCompletions.length, queryClient]) + const { data: jobsData, isLoading: jobsLoading } = useQuery({ queryKey: ['jobs'], queryFn: async () => { const res = await api.get<{ running: boolean; jobs: Job[] }>('/jobs/') return res.data }, - refetchInterval: 5000, // Refresh every 5 seconds + refetchInterval: 15000, // Slower polling since WS provides live updates }) const { data: services } = useQuery({ @@ -67,7 +145,7 @@ export default function Jobs() { const res = await api.get('/jobs/history/recent?limit=50') return res.data }, - refetchInterval: 5000, + refetchInterval: 15000, }) const { data: jobHistory } = useQuery({ @@ -109,10 +187,18 @@ export default function Jobs() { }) const updateIntervalMutation = useMutation({ - mutationFn: async ({ jobId, intervalMinutes, intervalHours }: { jobId: string, intervalMinutes?: number, intervalHours?: number }) => { + mutationFn: async ({ + jobId, + intervalMinutes, + intervalHours, + }: { + jobId: string + intervalMinutes?: number + intervalHours?: number + }) => { const res = await api.put(`/jobs/${jobId}/interval`, { interval_minutes: intervalMinutes, - interval_hours: intervalHours + interval_hours: intervalHours, }) return res.data }, @@ -140,7 +226,6 @@ export default function Jobs() { const handleSaveInterval = () => { if (!editingJob) return - if (editIntervalUnit === 'hours') { updateIntervalMutation.mutate({ jobId: editingJob.id, intervalHours: editIntervalValue }) } else { @@ -196,25 +281,109 @@ export default function Jobs() { const now = new Date() const diffMs = date.getTime() - now.getTime() const diffMins = Math.floor(diffMs / 60000) - + if (diffMins < 1) return 'Starting soon...' if (diffMins < 60) return `in ${diffMins}m` if (diffMins < 1440) return `in ${Math.floor(diffMins / 60)}h` return `in ${Math.floor(diffMins / 1440)}d` } + // Merge scheduler jobs with live WebSocket running state + const isJobRunningLive = (jobId: string): boolean => { + return runningJobs.has(jobId) + } + + const getJobProgress = (jobId: string): JobProgress | undefined => { + return runningJobs.get(jobId) + } + + // Collect all currently running jobs (from WS) + const liveRunningJobs = Array.from(runningJobs.values()) + return (
-
-

Scheduled Jobs

-

Monitor and manage scheduled tasks

+
+
+

Scheduled Jobs

+

+ Monitor and manage scheduled tasks +

+
+ {/* WebSocket Status Indicator */} +
+ {wsStatus === 'connected' ? ( + + + Live + + ) : wsStatus === 'connecting' ? ( + + + Connecting + + ) : ( + + + Offline + + )} +
+ {/* Live Running Jobs Panel */} + {liveRunningJobs.length > 0 && ( +
+
+

+ + Running Now + + ({liveRunningJobs.length} active) + +

+
+
+ {liveRunningJobs.map((progress) => ( +
+
+
+

+ {progress.job_name} +

+ + {progress.job_id} + +
+
+
+ + +
+ + + Running + +
+
+ +
+ ))} +
+
+ )} + {/* Scheduler Status */} {jobsData && (
-
+
Scheduler: {jobsData.running ? 'Running' : 'Stopped'} @@ -223,7 +392,7 @@ export default function Jobs() { )} {/* Service Sync Jobs */} - {services && services.filter(s => s.is_enabled).length > 0 && ( + {services && services.filter((s) => s.is_enabled).length > 0 && (

@@ -236,135 +405,210 @@ export default function Jobs() {

- {services.filter(s => s.is_enabled).map((service) => { - const serviceJobId = `sync_service_${service.id}` - const isRunning = jobsData?.jobs?.some(j => j.id === serviceJobId && j.is_running) || - recentExecutions?.some(e => e.job_id === serviceJobId && e.status === 'running') - const lastExecution = recentExecutions?.find(e => e.job_id === serviceJobId) - - return ( -
-
-
-

{service.name}

- - {service.service_type} - + {services + .filter((s) => s.is_enabled) + .map((service) => { + const serviceJobId = `sync_service_${service.id}` + const isRunning = + isJobRunningLive(serviceJobId) || + jobsData?.jobs?.some((j) => j.id === serviceJobId && j.is_running) || + recentExecutions?.some( + (e) => e.job_id === serviceJobId && e.status === 'running' + ) + const liveProgress = getJobProgress(serviceJobId) + const lastExecution = recentExecutions?.find( + (e) => e.job_id === serviceJobId && e.status !== 'running' + ) + + return ( +
+
+
+

+ {service.name} +

+ + {service.service_type} + +
+ {isRunning && ( + + )}
- {isRunning && ( - + + {/* Live progress bar for this service */} + {liveProgress && ( +
+
+ {liveProgress.step} +
+
+
+
+
)} -
- - {lastExecution && ( -
- Last: {lastExecution.status === 'running' ? 'Running...' : - lastExecution.status === 'success' ? `✓ ${formatDuration(lastExecution.duration_seconds)}` : - lastExecution.status === 'error' ? '✗ Error' : lastExecution.status} -
- )} - - -
- ) - })} + + +
+ ) + })}
)} - {/* Active Jobs */} + {/* Scheduled Jobs */}
-

Active Jobs

+

Scheduled Jobs

{jobsLoading ? ( -
Loading jobs...
+
+ Loading jobs... +
) : jobsData?.jobs && jobsData.jobs.length > 0 ? (
- {jobsData.jobs.map((job) => ( -
-
-
-
-

{job.name}

- {job.id} - {job.is_running && ( - - - Running + {jobsData.jobs.map((job) => { + const isRunning = job.is_running || isJobRunningLive(job.id) + const liveProgress = getJobProgress(job.id) + + return ( +
+
+
+
+

+ {job.name} +

+ + {job.id} - )} -
-
-
- - {job.is_running ? ( - <>Started: {formatDateTime(job.running_since)} - ) : ( - <>Next run: {formatNextRun(job.next_run_time)} + {isRunning && ( + + + Running + )}
- {!job.is_running && ( - <> - - {formatDateTime(job.next_run_time)} - - )} +
+
+ + {isRunning ? ( + <> + Elapsed:{' '} + + + + + ) : ( + <> + Next run:{' '} + + {formatNextRun(job.next_run_time)} + + + )} +
+ {!isRunning && ( + <> + + {formatDateTime(job.next_run_time)} + + )} +
+
+ Interval:{' '} + {job.interval_hours + ? `${job.interval_hours}h` + : job.interval_minutes + ? `${job.interval_minutes}m` + : job.trigger} +
+ + {/* Live progress bar */} + {liveProgress && }
-
- Interval: {job.interval_hours ? `${job.interval_hours}h` : job.interval_minutes ? `${job.interval_minutes}m` : job.trigger} +
+ + +
-
- - - -
-
- ))} + ) + })}
) : ( -
No jobs configured
+
+ No jobs configured +
)}
@@ -373,7 +617,9 @@ export default function Jobs() {

- {selectedJobId ? `History: ${jobHistory?.[0]?.job_name || selectedJobId}` : 'Recent Executions'} + {selectedJobId + ? `History: ${jobHistory?.[0]?.job_name || selectedJobId}` + : 'Recent Executions'}

{selectedJobId && (
- - - - - + + + + + {executionsLoading ? ( - - ) : (selectedJobId ? jobHistory : recentExecutions)?.map((exec) => ( - - - - - - + - - )) || ( - - - + + + + + + + )) || ( + + + + ) )}
JobStatusStartedDurationDetails + Job + + Status + + Started + + Duration + + Details +
+ Loading executions...
-
{exec.job_name}
-
{exec.job_id}
-
{getStatusBadge(exec.status)}{formatDateTime(exec.started_at)}{formatDuration(exec.duration_seconds)} - {exec.error_message ? ( -
- {exec.error_message} + ) : ( + (selectedJobId ? jobHistory : recentExecutions)?.map((exec) => ( +
+
+ {exec.job_name}
- ) : exec.details ? ( -
- {Object.entries(exec.details).slice(0, 2).map(([key, value]) => ( -
- {key}: {JSON.stringify(value)} -
- ))} +
+ {exec.job_id}
- ) : ( - - - )} -
- No executions yet -
{getStatusBadge(exec.status)} + {formatDateTime(exec.started_at)} + + {formatDuration(exec.duration_seconds)} + + {exec.error_message ? ( +
+ {exec.error_message} +
+ ) : exec.details ? ( +
+ {Object.entries(exec.details) + .slice(0, 2) + .map(([key, value]) => ( +
+ {key}:{' '} + + {JSON.stringify(value)} + +
+ ))} +
+ ) : ( + - + )} +
+ No executions yet +
@@ -448,7 +728,7 @@ export default function Jobs() {

Edit Job Interval: {editingJob.name}

- +
- +
- Current: {editingJob.interval_hours ? `${editingJob.interval_hours} hours` : editingJob.interval_minutes ? `${editingJob.interval_minutes} minutes` : editingJob.trigger} + Current:{' '} + {editingJob.interval_hours + ? `${editingJob.interval_hours} hours` + : editingJob.interval_minutes + ? `${editingJob.interval_minutes} minutes` + : editingJob.trigger}
- +
) -} \ No newline at end of file +} diff --git a/frontend/src/pages/SetupWizard.tsx b/frontend/src/pages/SetupWizard.tsx new file mode 100644 index 0000000..8d52c11 --- /dev/null +++ b/frontend/src/pages/SetupWizard.tsx @@ -0,0 +1,872 @@ +import { useState, useEffect } from 'react' +import { useNavigate } from 'react-router-dom' +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query' +import { + CheckCircleIcon, + XCircleIcon, + ArrowRightIcon, + ArrowLeftIcon, + ServerIcon, + FilmIcon, + ArrowPathIcon, + RocketLaunchIcon, + ForwardIcon, +} from '@heroicons/react/24/outline' +import api from '../lib/api' +import toast from 'react-hot-toast' +import type { ServiceConnection, ServiceConnectionCreate, ServiceType } from '../types' + +// ─── Types ─────────────────────────────────────────────────────────────── +interface SetupStatus { + setup_complete: boolean + has_users: boolean + has_arr_service: boolean + has_media_server: boolean + services: { id: number; name: string; service_type: string; is_enabled: boolean }[] + current_step: string +} + +interface TestResult { + success: boolean + version?: string + message?: string +} + +type WizardStep = 'welcome' | 'arr_services' | 'media_server' | 'sync' | 'complete' + +const STEPS: WizardStep[] = ['welcome', 'arr_services', 'media_server', 'sync', 'complete'] +const STEP_LABELS: Record = { + welcome: 'Welcome', + arr_services: 'Download Managers', + media_server: 'Media Server', + sync: 'Initial Sync', + complete: 'Complete', +} + +// ─── Main Component ────────────────────────────────────────────────────── +export default function SetupWizard() { + const navigate = useNavigate() + const queryClient = useQueryClient() + const [currentStep, setCurrentStep] = useState('welcome') + + // Track services added during this wizard session + const [addedServices, setAddedServices] = useState([]) + + // Fetch current setup status + const { data: setupStatus, refetch: refetchStatus } = useQuery({ + queryKey: ['setup-status'], + queryFn: async () => { + const res = await api.get('/setup/status') + return res.data + }, + }) + + // Resume at the correct step if services already exist + useEffect(() => { + if (setupStatus) { + if (setupStatus.setup_complete) { + navigate('/', { replace: true }) + return + } + // Pre-populate added services + if (setupStatus.services.length > 0 && addedServices.length === 0) { + // Fetch full details of existing services + api.get('/services/').then((res) => { + setAddedServices(res.data) + }) + } + // Resume at the right step + if (setupStatus.has_arr_service && currentStep === 'welcome') { + setCurrentStep(setupStatus.has_media_server ? 'sync' : 'media_server') + } + } + }, [setupStatus]) // eslint-disable-line react-hooks/exhaustive-deps + + // Complete setup mutation + const completeMutation = useMutation({ + mutationFn: async () => { + const res = await api.post('/setup/complete') + return res.data + }, + onSuccess: () => { + toast.success('Setup complete! Welcome to MediaCurator.') + queryClient.invalidateQueries({ queryKey: ['setup-status'] }) + navigate('/', { replace: true }) + }, + onError: (err: any) => { + toast.error(err?.response?.data?.detail || 'Failed to complete setup') + }, + }) + + // Skip setup mutation + const skipMutation = useMutation({ + mutationFn: async () => { + const res = await api.post('/setup/skip') + return res.data + }, + onSuccess: () => { + toast.success('Setup skipped. You can add services later from the Services page.') + queryClient.invalidateQueries({ queryKey: ['setup-status'] }) + navigate('/', { replace: true }) + }, + }) + + const goNext = () => { + const idx = STEPS.indexOf(currentStep) + if (idx < STEPS.length - 1) setCurrentStep(STEPS[idx + 1]) + } + + const goBack = () => { + const idx = STEPS.indexOf(currentStep) + if (idx > 0) setCurrentStep(STEPS[idx - 1]) + } + + const handleServiceAdded = (service: ServiceConnection) => { + setAddedServices((prev) => [...prev, service]) + refetchStatus() + queryClient.invalidateQueries({ queryKey: ['services'] }) + } + + const arrServices = addedServices.filter((s) => + ['sonarr', 'radarr'].includes(s.service_type) + ) + const mediaServers = addedServices.filter((s) => + ['emby', 'jellyfin'].includes(s.service_type) + ) + + const stepIdx = STEPS.indexOf(currentStep) + + return ( +
+ {/* Top bar */} +
+
+
+
+ + + +
+ MediaCurator Setup +
+ {currentStep !== 'complete' && ( + + )} +
+
+ + {/* Progress bar */} +
+
+
+ {STEPS.map((step, i) => ( +
+
+ {i < stepIdx ? ( + + ) : ( + i + 1 + )} +
+ + {i < STEPS.length - 1 && ( +
+ )} +
+ ))} +
+
+
+ + {/* Content */} +
+
+ {currentStep === 'welcome' && ( + + )} + {currentStep === 'arr_services' && ( + + )} + {currentStep === 'media_server' && ( + + )} + {currentStep === 'sync' && ( + completeMutation.mutate()} + onBack={goBack} + isCompleting={completeMutation.isPending} + /> + )} + {currentStep === 'complete' && ( + navigate('/', { replace: true })} /> + )} +
+
+
+ ) +} + +// ─── Welcome Step ──────────────────────────────────────────────────────── +function WelcomeStep({ onNext }: { onNext: () => void }) { + return ( +
+
+
+ +
+

+ Welcome to MediaCurator +

+

+ Let's get your media management set up. This wizard will guide you through + connecting your services in the right order. +

+
+ +
+
+
+ +
+

1. Download Managers

+

+ Connect Sonarr and/or Radarr to track your media library. +

+
+
+
+ +
+

2. Media Server

+

+ Connect Emby or Jellyfin for watch history and library data. +

+
+
+
+ +
+

3. Sync & Verify

+

+ Run an initial sync and verify everything is working. +

+
+
+ + +
+ ) +} + +// ─── Service Form (Reusable) ───────────────────────────────────────────── +function ServiceForm({ + allowedTypes, + onServiceAdded, +}: { + allowedTypes: { value: ServiceType; label: string; placeholder: string }[] + onServiceAdded: (service: ServiceConnection) => void +}) { + const [formData, setFormData] = useState({ + name: '', + service_type: allowedTypes[0].value, + url: '', + api_key: '', + is_enabled: true, + verify_ssl: true, + timeout: 120, + }) + const [testResult, setTestResult] = useState(null) + const [isTesting, setIsTesting] = useState(false) + const [showAdvanced, setShowAdvanced] = useState(false) + + const selectedType = allowedTypes.find((t) => t.value === formData.service_type) + + const testMutation = useMutation({ + mutationFn: async () => { + setIsTesting(true) + const res = await api.post('/setup/test-connection', { + service_type: formData.service_type, + url: formData.url, + api_key: formData.api_key, + verify_ssl: formData.verify_ssl, + timeout: formData.timeout, + }) + return res.data + }, + onSuccess: (data) => { + setTestResult(data) + if (data.success) { + toast.success(`Connection successful! Version: ${data.version}`) + } else { + toast.error(`Connection failed: ${data.message}`) + } + }, + onError: () => { + setTestResult({ success: false, message: 'Connection test failed' }) + toast.error('Connection test failed') + }, + onSettled: () => setIsTesting(false), + }) + + const saveMutation = useMutation({ + mutationFn: async () => { + const res = await api.post('/setup/add-service', formData) + return res.data + }, + onSuccess: (service) => { + toast.success(`${service.name} added successfully!`) + onServiceAdded(service) + // Reset form for adding another + setFormData({ + name: '', + service_type: allowedTypes[0].value, + url: '', + api_key: '', + is_enabled: true, + verify_ssl: true, + timeout: 120, + }) + setTestResult(null) + }, + onError: () => toast.error('Failed to add service'), + }) + + const canTest = formData.url.trim() !== '' && formData.api_key.trim() !== '' + const canSave = canTest && formData.name.trim() !== '' && testResult?.success + + return ( +
+ {/* Service Type */} + {allowedTypes.length > 1 && ( +
+ +
+ {allowedTypes.map((type) => ( + + ))} +
+
+ )} + + {/* Name */} +
+ + setFormData({ ...formData, name: e.target.value })} + placeholder={`My ${selectedType?.label || 'Service'}`} + /> +
+ + {/* URL */} +
+ + { + setFormData({ ...formData, url: e.target.value }) + setTestResult(null) + }} + placeholder={selectedType?.placeholder || 'http://localhost:8096'} + /> +
+ + {/* API Key */} +
+ + { + setFormData({ ...formData, api_key: e.target.value }) + setTestResult(null) + }} + placeholder="Enter API key" + /> +
+ + {/* Advanced Settings */} +
+ + {showAdvanced && ( +
+ +
+ + setFormData({ ...formData, timeout: parseInt(e.target.value) || 120 })} + /> +
+
+ )} +
+ + {/* Test Result */} + {testResult && ( +
+ {testResult.success ? ( + + ) : ( + + )} + {testResult.success + ? `Connected! Version: ${testResult.version}` + : `Failed: ${testResult.message}`} +
+ )} + + {/* Actions */} +
+ + +
+
+ ) +} + +// ─── Added Service Card ────────────────────────────────────────────────── +function AddedServiceCard({ service }: { service: ServiceConnection }) { + return ( +
+ +
+

+ {service.name} +

+

+ {service.service_type.toUpperCase()} • {service.url} +

+
+
+ ) +} + +// ─── Arr Services Step ─────────────────────────────────────────────────── +function ArrServicesStep({ + services, + onServiceAdded, + onNext, + onBack, +}: { + services: ServiceConnection[] + onServiceAdded: (service: ServiceConnection) => void + onNext: () => void + onBack: () => void +}) { + const arrTypes: { value: ServiceType; label: string; placeholder: string }[] = [ + { value: 'sonarr', label: 'Sonarr (TV Shows)', placeholder: 'http://localhost:8989' }, + { value: 'radarr', label: 'Radarr (Movies)', placeholder: 'http://localhost:7878' }, + ] + + return ( +
+
+

+
+ +
+ Connect Download Managers +

+

+ Add at least one Sonarr or Radarr instance. These services manage your media downloads + and provide the library data MediaCurator needs. +

+
+ + {/* Already added */} + {services.length > 0 && ( +
+

Added Services

+ {services.map((s) => ( + + ))} +
+ )} + + {/* Add form */} +
+

+ {services.length > 0 ? 'Add Another Service' : 'Add Service'} +

+ +
+ + {/* Navigation */} +
+ + +
+
+ ) +} + +// ─── Media Server Step ─────────────────────────────────────────────────── +function MediaServerStep({ + services, + onServiceAdded, + onNext, + onBack, +}: { + services: ServiceConnection[] + onServiceAdded: (service: ServiceConnection) => void + onNext: () => void + onBack: () => void +}) { + const mediaTypes: { value: ServiceType; label: string; placeholder: string }[] = [ + { value: 'emby', label: 'Emby', placeholder: 'http://localhost:8096' }, + { value: 'jellyfin', label: 'Jellyfin', placeholder: 'http://localhost:8096' }, + ] + + return ( +
+
+

+
+ +
+ Connect Media Server +

+

+ Add your Emby or Jellyfin media server. This provides watch history, user data, + and library information for intelligent cleanup decisions. +

+
+ + {/* Already added */} + {services.length > 0 && ( +
+

Added Servers

+ {services.map((s) => ( + + ))} +
+ )} + + {/* Add form */} +
+

+ {services.length > 0 ? 'Add Another Server' : 'Add Media Server'} +

+ +
+ + {/* Navigation */} +
+ + +
+
+ ) +} + +// ─── Sync Step ─────────────────────────────────────────────────────────── +function SyncStep({ + services, + onNext, + onBack, + isCompleting, +}: { + services: ServiceConnection[] + onNext: () => void + onBack: () => void + isCompleting: boolean +}) { + const [syncing, setSyncing] = useState(false) + const [syncResults, setSyncResults] = useState>({}) + + const runSync = async () => { + setSyncing(true) + const results: typeof syncResults = {} + + // Initialize all as pending + for (const svc of services) { + results[svc.id] = { status: 'pending' } + } + setSyncResults({ ...results }) + + // Sync each service sequentially + for (const svc of services) { + results[svc.id] = { status: 'syncing' } + setSyncResults({ ...results }) + + try { + const res = await api.post(`/services/${svc.id}/sync`) + const data = res.data + const added = data.added || 0 + const updated = data.updated || 0 + const users = data.users_synced || 0 + let msg = `${added} added, ${updated} updated` + if (users > 0) msg += `, ${users} users synced` + results[svc.id] = { status: 'success', message: msg } + } catch (err: any) { + results[svc.id] = { + status: 'error', + message: err?.response?.data?.detail || err?.message || 'Sync failed', + } + } + setSyncResults({ ...results }) + } + setSyncing(false) + } + + const allSynced = services.every((s) => syncResults[s.id]?.status === 'success') + const anySynced = services.some((s) => syncResults[s.id]?.status === 'success') + + return ( +
+
+

+
+ +
+ Initial Sync +

+

+ Run an initial synchronization to import your media libraries and watch history. + This may take a few minutes depending on library size. +

+
+ + {/* Service sync cards */} +
+ {services.map((svc) => { + const result = syncResults[svc.id] + return ( +
+
+ {result?.status === 'success' ? ( + + ) : result?.status === 'error' ? ( + + ) : result?.status === 'syncing' ? ( + + ) : ( +
+ )} +
+
+

+ {svc.name} +

+

+ {svc.service_type.toUpperCase()} + {result?.message && ` — ${result.message}`} +

+
+
+ ) + })} +
+ + {/* Run Sync button */} + {!allSynced && ( + + )} + + {/* Navigation */} +
+ + +
+
+ ) +} + +// ─── Complete Step ─────────────────────────────────────────────────────── +function CompleteStep({ onFinish }: { onFinish: () => void }) { + return ( +
+
+ +
+

You're All Set!

+

+ Your services are connected and initial sync is complete. Next, head to the + Rules page to configure + your cleanup policies. +

+ +
+ ) +} diff --git a/frontend/src/stores/jobs.ts b/frontend/src/stores/jobs.ts new file mode 100644 index 0000000..ede7502 --- /dev/null +++ b/frontend/src/stores/jobs.ts @@ -0,0 +1,113 @@ +import { create } from 'zustand' + +export interface JobProgress { + job_id: string + job_name: string + step: string + progress_percent: number | null + current: number | null + total: number | null + started_at: string + last_update: string + details: Record +} + +export interface JobCompletedEvent { + job_id: string + job_name: string + status: string + duration: number | null + error: string | null + timestamp: string +} + +export type WsConnectionStatus = 'connecting' | 'connected' | 'disconnected' + +interface JobsState { + /** Currently running jobs with live progress */ + runningJobs: Map + /** Count of running jobs (for badge) */ + runningCount: number + /** Recent completion events (last 10) */ + recentCompletions: JobCompletedEvent[] + /** WebSocket connection status */ + wsStatus: WsConnectionStatus + + // Actions + setWsStatus: (status: WsConnectionStatus) => void + jobStarted: (job_id: string, job_name: string, timestamp: string) => void + jobProgress: ( + job_id: string, + job_name: string, + step: string, + progress_percent: number | null, + current: number | null, + total: number | null, + timestamp: string, + details: Record + ) => void + jobCompleted: (event: JobCompletedEvent) => void + clearRunningJob: (job_id: string) => void +} + +export const useJobsStore = create()((set) => ({ + runningJobs: new Map(), + runningCount: 0, + recentCompletions: [], + wsStatus: 'disconnected', + + setWsStatus: (status) => set({ wsStatus: status }), + + jobStarted: (job_id, job_name, timestamp) => + set((state) => { + const newMap = new Map(state.runningJobs) + newMap.set(job_id, { + job_id, + job_name, + step: 'Starting...', + progress_percent: 0, + current: null, + total: null, + started_at: timestamp, + last_update: timestamp, + details: {}, + }) + return { runningJobs: newMap, runningCount: newMap.size } + }), + + jobProgress: (job_id, job_name, step, progress_percent, current, total, timestamp, details) => + set((state) => { + const newMap = new Map(state.runningJobs) + const existing = newMap.get(job_id) + newMap.set(job_id, { + job_id, + job_name, + step, + progress_percent, + current, + total, + started_at: existing?.started_at || timestamp, + last_update: timestamp, + details: details || existing?.details || {}, + }) + return { runningJobs: newMap, runningCount: newMap.size } + }), + + jobCompleted: (event) => + set((state) => { + const newMap = new Map(state.runningJobs) + newMap.delete(event.job_id) + return { + runningJobs: newMap, + runningCount: newMap.size, + recentCompletions: [event, ...state.recentCompletions].slice(0, 10), + } + }), + + clearRunningJob: (job_id) => + set((state) => { + const newMap = new Map(state.runningJobs) + newMap.delete(job_id) + return { runningJobs: newMap, runningCount: newMap.size } + }), +})) From f90a5f5e74698a7005ea8dc61f256e264b746d28 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Sun, 22 Feb 2026 19:33:34 +0100 Subject: [PATCH 011/122] fix: resolve all priority 1-5 bugs from DEVELOPMENT_STATUS.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit BUG-001 (CRITICAL): LibraryDetail.tsx complete rewrite - Fix double /api/ prefix causing 404 errors on all API calls - Migrate from manual useState/useEffect to React Query (useQuery) - Remove local formatBytes, formatDuration, formatDate, formatRelativeTime - Import shared utilities from lib/utils.ts - Add proper light/dark mode classes throughout BUG-002 (HIGH): Fix Login.tsx & Register.tsx light-mode - Replace hardcoded dark classes with light/dark pairs - Fix: bg-dark-800 → bg-white dark:bg-dark-800 - Fix: text-white → text-gray-900 dark:text-white - Fix: text-dark-200 → text-gray-700 dark:text-dark-200 - Fix: border-dark-700 → border-gray-200 dark:border-dark-700 - Fix inputs: bg-gray-50 dark:bg-dark-800, proper placeholder colors BUG-003 (HIGH): Fix ConfirmDialog.tsx light-mode - Modal background, title, message, footer, cancel button themed BUG-004 (HIGH): Fix Skeleton.tsx light-mode - Base skeleton: bg-gray-200 dark:bg-dark-700 - Card/Stats containers: bg-white dark:bg-dark-800 BUG-005 (MEDIUM): Fix Toast theming in main.tsx - Replace hardcoded hex colors with Tailwind CSS classes - Toasts now respect light/dark mode automatically BUG-006 (MEDIUM): Fix German locale in utils.ts - Change formatDate and formatDateTime from 'de-DE' to 'en-US' - Add formatDuration utility function for shared use BUG-008 (MEDIUM): Call fetchUser on app init - ProtectedRoute now calls fetchUser() on mount when token exists - Ensures fresh user data instead of potentially stale persisted state BUG-009 (LOW): Remove duplicate formatBytes from Preview.tsx - Import from shared lib/utils.ts instead BUG-010 (LOW): Fix manual debounce in Users.tsx - Replace setTimeout-based debounce with useDebounce hook --- frontend/src/App.tsx | 10 +- frontend/src/components/ConfirmDialog.tsx | 10 +- frontend/src/components/Skeleton.tsx | 6 +- frontend/src/lib/utils.ts | 17 +- frontend/src/main.tsx | 11 +- frontend/src/pages/LibraryDetail.tsx | 513 ++++++++++------------ frontend/src/pages/Login.tsx | 16 +- frontend/src/pages/Preview.tsx | 13 +- frontend/src/pages/Register.tsx | 24 +- frontend/src/pages/Users.tsx | 17 +- 10 files changed, 295 insertions(+), 342 deletions(-) diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 7478de8..f350d2c 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,4 +1,5 @@ import { Routes, Route, Navigate, useLocation } from 'react-router-dom' +import { useEffect } from 'react' import { useQuery } from '@tanstack/react-query' import { useAuthStore } from './stores/auth' import { getToken } from './lib/api' @@ -31,9 +32,16 @@ interface SetupStatus { } function ProtectedRoute({ children }: { children: React.ReactNode }) { - const { isAuthenticated } = useAuthStore() + const { isAuthenticated, fetchUser } = useAuthStore() const token = getToken() const location = useLocation() + + // Fetch fresh user data on mount if we have a token + useEffect(() => { + if (token) { + fetchUser() + } + }, []) // eslint-disable-line react-hooks/exhaustive-deps // Check both zustand state and localStorage token if (!isAuthenticated && !token) { diff --git a/frontend/src/components/ConfirmDialog.tsx b/frontend/src/components/ConfirmDialog.tsx index 2a42d04..5112499 100644 --- a/frontend/src/components/ConfirmDialog.tsx +++ b/frontend/src/components/ConfirmDialog.tsx @@ -31,7 +31,7 @@ export default function ConfirmDialog({ return (
-
+
-

{title}

-

{message}

+

{title}

+

{message}

-
+
diff --git a/frontend/src/components/Skeleton.tsx b/frontend/src/components/Skeleton.tsx index 8375bbf..bf6efa4 100644 --- a/frontend/src/components/Skeleton.tsx +++ b/frontend/src/components/Skeleton.tsx @@ -11,7 +11,7 @@ export function Skeleton({ width, height }: SkeletonProps) { - const baseClasses = 'animate-pulse bg-dark-700' + const baseClasses = 'animate-pulse bg-gray-200 dark:bg-dark-700' const variantClasses = { text: 'rounded h-4', @@ -55,7 +55,7 @@ export function TableSkeleton({ rows = 5, columns = 4 }: { rows?: number; column export function CardSkeleton() { return ( -
+
@@ -75,7 +75,7 @@ export function StatsSkeleton() { return (
{Array.from({ length: 4 }).map((_, i) => ( -
+
diff --git a/frontend/src/lib/utils.ts b/frontend/src/lib/utils.ts index 292b2ed..504d85d 100644 --- a/frontend/src/lib/utils.ts +++ b/frontend/src/lib/utils.ts @@ -41,6 +41,19 @@ export function formatRelativeTime(date: string | Date | null | undefined): stri return `${diffYear} year${diffYear !== 1 ? 's' : ''} ago` } +/** + * Format duration in seconds to human-readable string + */ +export function formatDuration(seconds: number | null | undefined): string { + if (!seconds || seconds === 0) return '0m' + const hours = Math.floor(seconds / 3600) + const minutes = Math.floor((seconds % 3600) / 60) + if (hours > 0) { + return `${hours}h ${minutes}m` + } + return `${minutes}m` +} + /** * Format date to localized string */ @@ -51,7 +64,7 @@ export function formatDate(date: string | Date | null | undefined): string { const dateStr = typeof date === 'string' ? date : date.toISOString() const isoDate = dateStr.includes('Z') || dateStr.includes('+') ? dateStr : dateStr + 'Z' - return new Date(isoDate).toLocaleDateString('de-DE', { + return new Date(isoDate).toLocaleDateString('en-US', { year: 'numeric', month: 'short', day: 'numeric', @@ -70,7 +83,7 @@ export function formatDateTime(date: string | Date | null | undefined): string { const dateStr = typeof date === 'string' ? date : date.toISOString() const isoDate = dateStr.includes('Z') || dateStr.includes('+') ? dateStr : dateStr + 'Z' - return new Date(isoDate).toLocaleString('de-DE', { + return new Date(isoDate).toLocaleString('en-US', { dateStyle: 'short', timeStyle: 'short', }) diff --git a/frontend/src/main.tsx b/frontend/src/main.tsx index e90dd72..06076ee 100644 --- a/frontend/src/main.tsx +++ b/frontend/src/main.tsx @@ -26,21 +26,22 @@ ReactDOM.createRoot(document.getElementById('root')!).render( position="top-right" toastOptions={{ duration: 4000, + className: 'bg-white dark:bg-dark-800 text-gray-900 dark:text-gray-100 border border-gray-200 dark:border-dark-700 shadow-lg', style: { - background: '#1e293b', - color: '#f1f5f9', - border: '1px solid #334155', + background: undefined, + color: undefined, + border: undefined, }, success: { iconTheme: { primary: '#22c55e', - secondary: '#1e293b', + secondary: 'white', }, }, error: { iconTheme: { primary: '#ef4444', - secondary: '#1e293b', + secondary: 'white', }, }, }} diff --git a/frontend/src/pages/LibraryDetail.tsx b/frontend/src/pages/LibraryDetail.tsx index ed2f6af..627f03c 100644 --- a/frontend/src/pages/LibraryDetail.tsx +++ b/frontend/src/pages/LibraryDetail.tsx @@ -1,5 +1,6 @@ -import { useState, useEffect } from 'react'; -import { useParams, Link } from 'react-router-dom'; +import { useState } from 'react' +import { useParams, Link } from 'react-router-dom' +import { useQuery } from '@tanstack/react-query' import { FilmIcon, TvIcon, @@ -10,228 +11,174 @@ import { ChevronLeftIcon, ChevronRightIcon, MagnifyingGlassIcon, -} from '@heroicons/react/24/outline'; -import api from '../lib/api'; +} from '@heroicons/react/24/outline' +import api from '../lib/api' +import { formatBytes, formatRelativeTime, formatDuration, formatDate } from '../lib/utils' interface LibraryDetails { - id: number; - name: string; - type: string; - media_type: string; - is_enabled: boolean; - path: string; - service_name: string | null; - external_id: string; - last_synced_at: string | null; - total_items: number; - total_size_bytes: number; - total_plays: number; + id: number + name: string + type: string + media_type: string + is_enabled: boolean + path: string + service_name: string | null + external_id: string + last_synced_at: string | null + total_items: number + total_size_bytes: number + total_plays: number item_breakdown: { - movies: number; - series: number; - seasons: number; - episodes: number; - }; + movies: number + series: number + seasons: number + episodes: number + } stats: { - plays_24h: number; - plays_7d: number; - plays_30d: number; - watch_time_24h: number; - watch_time_7d: number; - watch_time_30d: number; - }; + plays_24h: number + plays_7d: number + plays_30d: number + watch_time_24h: number + watch_time_7d: number + watch_time_30d: number + } top_users: Array<{ - user_id: string; - plays: number; - watch_time_seconds: number; - }>; + user_id: string + plays: number + watch_time_seconds: number + }> recently_watched: Array<{ - id: number; - title: string; - media_type: string; - last_watched_at: string | null; - watch_count: number; - }>; - active_sessions: number; + id: number + title: string + media_type: string + last_watched_at: string | null + watch_count: number + }> + active_sessions: number } interface MediaItem { - id: number; - title: string; - media_type: string; - external_id: string; - added_at: string | null; - last_watched_at: string | null; - watch_count: number; - size_bytes: number; - year: number | null; + id: number + title: string + media_type: string + external_id: string + added_at: string | null + last_watched_at: string | null + watch_count: number + size_bytes: number + year: number | null +} + +interface MediaResponse { + items: MediaItem[] + total_pages: number } interface ActivityItem { - id: number; - user_id: string; - media_title: string; - client_name: string; - device_name: string; - play_method: string; - is_transcoding: boolean; - started_at: string | null; - ended_at: string | null; - duration_seconds: number; - played_percentage: number; - is_active: boolean; + id: number + user_id: string + media_title: string + client_name: string + device_name: string + play_method: string + is_transcoding: boolean + started_at: string | null + ended_at: string | null + duration_seconds: number + played_percentage: number + is_active: boolean +} + +interface ActivityResponse { + items: ActivityItem[] + total_pages: number } -type TabType = 'overview' | 'media' | 'activity'; +type TabType = 'overview' | 'media' | 'activity' export default function LibraryDetail() { - const { libraryId } = useParams<{ libraryId: string }>(); - const [activeTab, setActiveTab] = useState('overview'); - const [details, setDetails] = useState(null); - const [media, setMedia] = useState([]); - const [activity, setActivity] = useState([]); - const [loading, setLoading] = useState(true); - const [error, setError] = useState(null); - - // Media pagination - const [mediaPage, setMediaPage] = useState(1); - const [mediaTotalPages, setMediaTotalPages] = useState(1); - const [mediaSearch, setMediaSearch] = useState(''); - const [mediaSortBy, setMediaSortBy] = useState('title'); - const [mediaSortOrder, setMediaSortOrder] = useState('asc'); - - // Activity pagination - const [activityPage, setActivityPage] = useState(1); - const [activityTotalPages, setActivityTotalPages] = useState(1); + const { libraryId } = useParams<{ libraryId: string }>() + const [activeTab, setActiveTab] = useState('overview') - useEffect(() => { - fetchDetails(); - }, [libraryId]); + // Media pagination & filters + const [mediaPage, setMediaPage] = useState(1) + const [mediaSearch, setMediaSearch] = useState('') + const [mediaSortBy, setMediaSortBy] = useState('title') + const [mediaSortOrder, setMediaSortOrder] = useState('asc') - useEffect(() => { - if (activeTab === 'media') { - fetchMedia(); - } else if (activeTab === 'activity') { - fetchActivity(); - } - }, [activeTab, mediaPage, mediaSearch, mediaSortBy, mediaSortOrder, activityPage]); + // Activity pagination + const [activityPage, setActivityPage] = useState(1) - async function fetchDetails() { - if (!libraryId) return; - setLoading(true); - setError(null); - try { - const response = await api.get(`/api/libraries/${libraryId}/details`); - setDetails(response.data); - } catch (err: unknown) { - const message = err instanceof Error ? err.message : 'Failed to load library details'; - setError(message); - } finally { - setLoading(false); - } - } + // Fetch library details + const { data: details, isLoading, error } = useQuery({ + queryKey: ['library-details', libraryId], + queryFn: async () => { + const res = await api.get(`/libraries/${libraryId}/details`) + return res.data + }, + enabled: !!libraryId, + }) - async function fetchMedia() { - if (!libraryId) return; - try { + // Fetch media (only when media tab is active) + const { data: mediaData } = useQuery({ + queryKey: ['library-media', libraryId, mediaPage, mediaSearch, mediaSortBy, mediaSortOrder], + queryFn: async () => { const params = new URLSearchParams({ page: mediaPage.toString(), page_size: '50', sort_by: mediaSortBy, sort_order: mediaSortOrder, - }); + }) if (mediaSearch) { - params.append('search', mediaSearch); + params.append('search', mediaSearch) } - const response = await api.get(`/api/libraries/${libraryId}/media?${params}`); - setMedia(response.data.items); - setMediaTotalPages(response.data.total_pages); - } catch (err) { - console.error('Failed to fetch media:', err); - } - } + const res = await api.get(`/libraries/${libraryId}/media?${params}`) + return res.data + }, + enabled: !!libraryId && activeTab === 'media', + }) - async function fetchActivity() { - if (!libraryId) return; - try { - const response = await api.get(`/api/libraries/${libraryId}/activity?page=${activityPage}&page_size=50`); - setActivity(response.data.items); - setActivityTotalPages(response.data.total_pages); - } catch (err) { - console.error('Failed to fetch activity:', err); - } - } - - function formatBytes(bytes: number): string { - if (bytes === 0) return '0 B'; - const k = 1024; - const sizes = ['B', 'KB', 'MB', 'GB', 'TB']; - const i = Math.floor(Math.log(bytes) / Math.log(k)); - return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]; - } - - function formatDuration(seconds: number): string { - if (!seconds) return '0m'; - const hours = Math.floor(seconds / 3600); - const minutes = Math.floor((seconds % 3600) / 60); - if (hours > 0) { - return `${hours}h ${minutes}m`; - } - return `${minutes}m`; - } - - function formatDate(dateStr: string | null): string { - if (!dateStr) return 'Never'; - return new Date(dateStr).toLocaleString(); - } - - function formatRelativeTime(dateStr: string | null): string { - if (!dateStr) return 'Never'; - const date = new Date(dateStr); - const now = new Date(); - const diffMs = now.getTime() - date.getTime(); - const diffMins = Math.floor(diffMs / 60000); - const diffHours = Math.floor(diffMs / 3600000); - const diffDays = Math.floor(diffMs / 86400000); - - if (diffMins < 1) return 'Just now'; - if (diffMins < 60) return `${diffMins}m ago`; - if (diffHours < 24) return `${diffHours}h ago`; - if (diffDays < 7) return `${diffDays}d ago`; - return date.toLocaleDateString(); - } + // Fetch activity (only when activity tab is active) + const { data: activityData } = useQuery({ + queryKey: ['library-activity', libraryId, activityPage], + queryFn: async () => { + const res = await api.get(`/libraries/${libraryId}/activity?page=${activityPage}&page_size=50`) + return res.data + }, + enabled: !!libraryId && activeTab === 'activity', + }) const tabs = [ { id: 'overview' as TabType, name: 'Overview' }, { id: 'media' as TabType, name: 'Media' }, { id: 'activity' as TabType, name: 'Activity' }, - ]; + ] - if (loading) { + if (isLoading) { return (
- ); + ) } if (error || !details) { return (
- {error || 'Library not found'} + {error instanceof Error ? error.message : 'Library not found'}
- ); + ) } return (
{/* Header */}
- - +
{details.media_type === 'movie' ? ( @@ -240,12 +187,12 @@ export default function LibraryDetail() { )}
-

{details.name}

-

- {details.service_name} • {details.type} +

{details.name}

+

+ {details.service_name} • {details.type} {details.active_sessions > 0 && ( - - • {details.active_sessions} active session{details.active_sessions !== 1 ? 's' : ''} + + • {details.active_sessions} active session{details.active_sessions !== 1 ? 's' : ''} )}

@@ -254,7 +201,7 @@ export default function LibraryDetail() {
{/* Tabs */} -
+
- {/* Media Table */} + {/* Media Grid View */} + {mediaView === 'grid' && ( +
+ {(mediaData?.items || []).map((item) => ( +
+ {/* Poster Image */} +
+ {item.title} { + const target = e.target as HTMLImageElement + target.style.display = 'none' + const parent = target.parentElement + if (parent && !parent.querySelector('.fallback-icon')) { + const fallback = document.createElement('div') + fallback.className = 'fallback-icon absolute inset-0 flex items-center justify-center' + fallback.innerHTML = `` + parent.appendChild(fallback) + } + }} + /> + {/* Watch count badge */} + {item.watch_count > 0 && ( +
+ {item.watch_count} {item.watch_count === 1 ? 'play' : 'plays'} +
+ )} +
+ {/* Info */} +
+

+ {item.title} +

+
+ + {item.year || '—'} + + + {formatBytes(item.size_bytes)} + +
+ {item.last_watched_at && ( +

+ {formatRelativeTime(item.last_watched_at)} +

+ )} +
+
+ ))} + {(!mediaData?.items || mediaData.items.length === 0) && ( +
+ No media items found +
+ )} +
+ )} + + {/* Media Table View */} + {mediaView === 'table' && (
+ )} {/* Media Pagination */} {mediaData && mediaData.total_pages > 1 && ( diff --git a/frontend/src/pages/UserDetail.tsx b/frontend/src/pages/UserDetail.tsx index dfb9987..2823e19 100644 --- a/frontend/src/pages/UserDetail.tsx +++ b/frontend/src/pages/UserDetail.tsx @@ -15,7 +15,8 @@ import { ChevronRightIcon, ComputerDesktopIcon, GlobeAltIcon, - SignalIcon + SignalIcon, + MagnifyingGlassIcon } from '@heroicons/react/24/outline' import { ResponsiveContainer, @@ -32,6 +33,26 @@ interface GenreStatsResponse { genres: { genre: string; plays: number; duration_seconds: number }[] } +interface TimelineResponse { + user_id: number + period_days: number + calendar_heatmap: { date: string; plays: number; duration_seconds: number }[] + sessions: { + started_at: string | null + ended_at: string | null + duration_seconds: number + item_count: number + items: { + id: number + media_title: string + media_type: string | null + duration_seconds: number + played_percentage: number + started_at: string | null + }[] + }[] +} + interface Library { id: number name: string @@ -116,9 +137,11 @@ function StatsBox({ title, stats }: { title: string; stats: UserStats }) { export default function UserDetail() { const { userId } = useParams<{ userId: string }>() const queryClient = useQueryClient() - const [activeTab, setActiveTab] = useState<'overview' | 'activity'>('overview') + const [activeTab, setActiveTab] = useState<'overview' | 'activity' | 'timeline'>('overview') const [activityPage, setActivityPage] = useState(1) const [activityLibraryFilter, setActivityLibraryFilter] = useState('') + const [activityTypeFilter, setActivityTypeFilter] = useState('') + const [activitySearch, setActivitySearch] = useState('') const [expandedRows, setExpandedRows] = useState>(new Set()) const toggleExpanded = (id: number) => { @@ -149,13 +172,15 @@ export default function UserDetail() { }) const { data: activityData, isLoading: activityLoading } = useQuery({ - queryKey: ['userActivity', userId, activityPage, activityLibraryFilter], + queryKey: ['userActivity', userId, activityPage, activityLibraryFilter, activityTypeFilter, activitySearch], queryFn: async () => { const params = new URLSearchParams({ page: activityPage.toString(), page_size: '25' }) if (activityLibraryFilter) params.append('library_id', activityLibraryFilter) + if (activityTypeFilter) params.append('media_type', activityTypeFilter) + if (activitySearch) params.append('search', activitySearch) const res = await api.get(`/users/${userId}/activity?${params}`) return res.data }, @@ -172,6 +197,16 @@ export default function UserDetail() { enabled: !!userId && activeTab === 'overview', }) + // Fetch user timeline data + const { data: timelineData } = useQuery({ + queryKey: ['userTimeline', userId], + queryFn: async () => { + const res = await api.get(`/users/${userId}/timeline?days=90`) + return res.data + }, + enabled: !!userId && activeTab === 'timeline', + }) + const toggleHiddenMutation = useMutation({ mutationFn: async () => { const res = await api.patch(`/users/${userId}`, { is_hidden: !user?.is_hidden }) @@ -280,9 +315,142 @@ export default function UserDetail() { > Activity +
{/* Tab Content */} + {activeTab === 'timeline' && ( +
+ {/* Calendar Heatmap */} +
+

Watch Calendar (Last 90 Days)

+ {timelineData && timelineData.calendar_heatmap.length > 0 ? (() => { + const maxPlays = Math.max(...timelineData.calendar_heatmap.map(d => d.plays)) + const heatmapMap = new Map(timelineData.calendar_heatmap.map(d => [d.date, d])) + // Generate last 90 days + const days: { date: Date; dateStr: string }[] = [] + for (let i = 89; i >= 0; i--) { + const d = new Date() + d.setDate(d.getDate() - i) + days.push({ date: d, dateStr: d.toISOString().split('T')[0] }) + } + // Group by week + const weeks: typeof days[] = [] + let currentWeek: typeof days = [] + days.forEach((d, i) => { + currentWeek.push(d) + if (d.date.getDay() === 0 || i === days.length - 1) { + weeks.push(currentWeek) + currentWeek = [] + } + }) + return ( +
+
+ {weeks.map((week, wi) => ( +
+ {week.map((day) => { + const data = heatmapMap.get(day.dateStr) + const plays = data?.plays || 0 + const intensity = maxPlays > 0 ? plays / maxPlays : 0 + const isDark = document.documentElement.classList.contains('dark') + let bg = isDark ? 'var(--color-dark-700)' : 'var(--color-gray-100)' + if (plays > 0) { + if (intensity > 0.75) bg = 'var(--color-primary-500)' + else if (intensity > 0.5) bg = 'var(--color-primary-400)' + else if (intensity > 0.25) bg = isDark ? 'var(--color-primary-300)' : 'var(--color-primary-300)' + else bg = isDark ? 'var(--color-primary-200)' : 'var(--color-primary-200)' + } + return ( +
+ ) + })} +
+ ))} +
+
+ Less + {[0, 0.25, 0.5, 0.75, 1].map((t, i) => { + const isDark = document.documentElement.classList.contains('dark') + let bg = isDark ? 'var(--color-dark-700)' : 'var(--color-gray-100)' + if (t > 0.75) bg = 'var(--color-primary-500)' + else if (t > 0.5) bg = 'var(--color-primary-400)' + else if (t > 0.25) bg = 'var(--color-primary-300)' + else if (t > 0) bg = 'var(--color-primary-200)' + return
+ })} + More +
+
+ ) + })() : ( +

No watch history in the last 90 days

+ )} +
+ + {/* Recent Sessions */} +
+

Recent Watch Sessions

+ {timelineData && timelineData.sessions.length > 0 ? ( +
+ {[...timelineData.sessions].reverse().slice(0, 20).map((session, idx) => ( +
+
+ + + {session.started_at ? new Date(session.started_at).toLocaleDateString(undefined, { weekday: 'short', month: 'short', day: 'numeric' }) : 'Unknown'} + + + {session.started_at ? new Date(session.started_at).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' }) : ''} + {session.ended_at ? ` – ${new Date(session.ended_at).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })}` : ''} + + + {session.item_count} item{session.item_count !== 1 ? 's' : ''} + + + {formatDurationLong(session.duration_seconds)} + +
+
+ {session.items.map((item) => ( +
+ {item.media_type === 'movie' ? ( + + ) : ( + + )} + {item.media_title} + + {formatDurationLong(item.duration_seconds)} + {item.played_percentage > 0 && ` (${Math.round(item.played_percentage)}%)`} + +
+ ))} +
+
+ ))} +
+ ) : ( +

No recent sessions found

+ )} +
+
+ )} + {activeTab === 'overview' ? (
{/* User Stats */} @@ -384,6 +552,16 @@ export default function UserDetail() {
{/* Activity Filters */}
+
+ + { setActivitySearch(e.target.value); setActivityPage(1); }} + className="w-full pl-9 pr-4 py-2 bg-gray-50 dark:bg-dark-700 border border-gray-200 dark:border-dark-600 rounded-lg text-gray-900 dark:text-white text-sm placeholder-gray-400 dark:placeholder-dark-400 focus:outline-none focus:ring-2 focus:ring-primary-500" + /> +
+
From 354ffad6f311bd5fbcdde7053b00ed05375703b9 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 18:20:37 +0100 Subject: [PATCH 092/122] feat: Phase 4 - Advanced Analytics (5 new endpoints + Analytics page) Phase 4 - Advanced Analytics (Session 10): Backend - activity.py: - GET /activity/concurrent-streams: Historical concurrent stream peaks, daily peaks chart data, hourly average concurrency - GET /activity/duration-stats: Session length distribution, avg/median duration, movies vs series comparison - GET /activity/completion-rates: Content completion pie chart (completed >90% / partial / abandoned), by-type breakdown, most frequently abandoned content - GET /activity/binge-stats: Binge-watch detection (3+ episodes of same series), top binged series, top bingers leaderboard Backend - media.py: - GET /media/content-reach: Shared vs Solo vs Unwatched content analysis with viewer counts, top shared content, largest solo files (cleanup candidates) Frontend: - New Analytics.tsx page with 5 tabs: 1. Concurrent Streams (daily peak area chart + hourly avg bar) 2. Duration Stats (distribution histogram + by-type cards) 3. Completion Rates (donut chart + stacked bars + abandoned list) 4. Binge Watch (top series, top users, recent sessions) 5. Content Reach (shared/solo/unwatched pie + lists) - Route /analytics added to App.tsx - ChartPieIcon nav item added to Layout.tsx sidebar Documentation: - Phase 4 marked as fully complete in both doc files - Implementation history updated --- DEVELOPMENT_STATUS.md | 21 +- PLANNED_FEATURES.md | 77 +-- backend/app/api/routes/activity.py | 452 ++++++++++++++++ backend/app/api/routes/media.py | 108 +++- frontend/src/App.tsx | 2 + frontend/src/components/Layout.tsx | 2 + frontend/src/pages/Analytics.tsx | 835 +++++++++++++++++++++++++++++ 7 files changed, 1451 insertions(+), 46 deletions(-) create mode 100644 frontend/src/pages/Analytics.tsx diff --git a/DEVELOPMENT_STATUS.md b/DEVELOPMENT_STATUS.md index 67b65a7..6c874ef 100644 --- a/DEVELOPMENT_STATUS.md +++ b/DEVELOPMENT_STATUS.md @@ -103,12 +103,12 @@ backend/app/ │ ├── __init__.py # ★ NEU: Test-Setup │ └── test_smoke.py # ★ NEU: Smoke-Test (Settings-Loading) └── api/routes/ - ├── activity.py # GET /activity/, /stats, /active, /genre-stats, /watch-heatmap (+ Rate Limiting) + ├── activity.py # GET /activity/, /stats, /active, /genre-stats, /watch-heatmap, /concurrent-streams, /duration-stats, /completion-rates, /binge-stats ├── audit.py # GET/DELETE /audit/logs, /recent, etc. ├── auth.py # POST /auth/login, /register, /refresh, /logout (httpOnly Cookies, CSRF) ├── jobs.py # GET/POST /jobs/, trigger, interval (Admin-Only) ├── libraries.py # GET /libraries/, /stats, /{id}/details, /media, /activity - ├── media.py # GET /media/stats, /dashboard-stats, /watch-stats, /audit-log, /{id}/image (Poster Proxy) + ├── media.py # GET /media/stats, /dashboard-stats, /watch-stats, /audit-log, /{id}/image, /content-reach ├── notifications.py # CRUD /notifications/, test, preview-template (Admin-Only, URL Validation) ├── rules.py # CRUD /rules/, templates, export/import, bulk (Admin-Only, File Size Limit) ├── services.py # CRUD /services/, test, sync (Admin-Only, URL Validation) @@ -146,6 +146,7 @@ frontend/src/ ├── LibraryDetail.tsx # 3 Tabs: Overview, Media (Table+Grid), Activity ├── Users.tsx # User-Tabelle mit Stats ├── UserDetail.tsx # 3 Tabs: Overview, Activity (Type/Search Filter), Timeline (Heatmap) + ├── Analytics.tsx # 5 Tabs: Concurrent Streams, Duration, Completion, Binge Watch, Content Reach ├── Activity.tsx # Active Sessions + Activity-Log ├── History.tsx # Cleanup-Audit-Log ├── Jobs.tsx # ★ REWRITE: Live-Progress-Bars, WebSocket-Status, Running-Panel @@ -270,14 +271,14 @@ frontend/src/ | Smoke Tests | ✅ `tests/test_smoke.py` + pytest | — | ✅ | Initiales Test-Setup mit pytest-asyncio | | Dependabot | ✅ `.github/dependabot.yml` | — | ✅ | Automatische Dependency-Updates auf develop | -### Phase 4 – Advanced Analytics ⚠️ TEILWEISE ERLEDIGT (Session 9+10) +### Phase 4 – Advanced Analytics ✅ ERLEDIGT (Session 9+10) - [x] Watch Patterns Heatmap (7x24 Grid) – Activity.tsx (Session 9) - [x] User Activity Timeline / Calendar Heatmap – UserDetail.tsx (Session 10) -- [ ] Concurrent Streams Analysis -- [ ] Watch Duration Stats -- [ ] Completion Rate Analytics -- [ ] Binge-Watch Detection -- [ ] Shared vs. Solo Content Analysis +- [x] Concurrent Streams Analysis – Analytics.tsx (Session 10) +- [x] Watch Duration Stats – Analytics.tsx (Session 10) +- [x] Completion Rate Analytics – Analytics.tsx (Session 10) +- [x] Binge-Watch Detection – Analytics.tsx (Session 10) +- [x] Shared vs. Solo Content Analysis – Analytics.tsx (Session 10) ### Phase 5 – Smart Cleanup Integration ❌ NICHT BEGONNEN - [ ] Cleanup Rules per User ("Delete only if NO user watched in X days") @@ -503,7 +504,7 @@ Das Backend ist gut strukturiert mit: 21. ~~Dependabot Setup~~ ✅ ### Priorität 7: Phase 4+ (Zukunft) -22. Advanced Analytics (~~Heatmaps~~ ✅ Session 9, ~~User Timeline~~ ✅ Session 10, Completion Rates, Binge Detection) +22. ~~Advanced Analytics~~ ✅ (Session 9+10): Heatmaps, User Timeline, Concurrent Streams, Duration Stats, Completion Rates, Binge Detection, Content Reach 23. Smart Cleanup Rules (Per-User Conditions) 24. ~~Genre Distribution Charts~~ ✅ (Session 9) 25. ~~User Detail: Favorite Genres~~ ✅ (Session 9), ~~Timeline-Tab~~ ✅ (Session 10) @@ -580,5 +581,5 @@ docker compose -f docker-compose.dev.yml up --build | 22.02.2026 (6) | **Session 6 – Bugfix & Expand-Rows**: BUG-011 behoben: PlaybackActivity `position_ticks`/`runtime_ticks` Integer→BigInteger (PostgreSQL int32 overflow bei Emby-Ticks >2.1B). DB-Migration hinzugefügt. Fehlerbehandlung in `_sync_active_sessions` und `services.py` mit `db.rollback()`. ResponsiveTable: Expand-Row-Support (`onRowClick`, `isExpanded`, `expandedContent`). Preview.tsx: Beide Tabellen (Series+Movies) auf ResponsiveTable migriert (letzte Seite). Expand-Rows auf Activity, UserDetail, LibraryDetail (IP, Device, Play Method, Progress-Bar). ConfirmDialog: `aria-modal`, Focus-Trap, Escape-Key, Click-Outside. Library Activity API: `ip_address`, `transcode_video`, `transcode_audio` hinzugefügt. BUG-012: Radarr-Pfad Ordner→Datei-Pfad (Movie-Watch-Statistiken). BUG-013: User Last Seen/Watched/Client Fallback-Logik. Branch: `feature/phase2-enhancements-and-docs` | | 24.02.2026 (7) | **Session 7 – Security Hardening I** (PR #19 `feature/security-hardening`): CORS Lockdown (Wildcard-Warnung in Production). API-Key/Notification-Secret Masking. Password Complexity Enforcement. Security Headers Middleware (X-Frame-Options, X-Content-Type-Options, Referrer-Policy, Permissions-Policy). WebSocket Auth (kurzlebiger Token). Refresh Token Rotation (altes Token revoked). Account Lockout (DB-Migration für `failed_login_attempts`, `locked_until`). Trusted Proxy Config. Rate-Limit Improvements. System Settings Allowlist. Staging Path Validation. Rule Import File Size Limit. `datetime.utcnow()` → `datetime.now(timezone.utc)`. Audit Log Data Retention Job. SECRET_KEY Enforcement in docker-compose. Frontend WebSocket Auth + Refresh Token Rotation Support. Branch: `feature/security-hardening`, Commit: `3058956` (PR #19) | | 24.02.2026 (8) | **Session 8 – Security Hardening II + httpOnly Cookies** (PRs #20, #21, #22): httpOnly Cookie Auth Migration (ADR-001) – JWT aus localStorage entfernt, Set-Cookie im Backend, `credentials: 'include'` im Frontend, Cookie-Clearing bei Logout. CSRF Double-Submit Cookie Middleware (`csrf.py`). Security Event Logging (`security_events.py` – strukturiertes JSON für Auth/Rate-Limit/CSRF Events). SSRF-Safe URL Validation (`url_validation.py`). `escape_like()` für SQL-Injection-Schutz. Content-Security-Policy Header. WebSocket Connection Limits per IP. Body Size Limit Middleware. Admin-Only auf Rules, Jobs, Staging, Services, Notifications, System-Settings Routes. Outbound URL Validation auf alle Service-Connection/Notification/Setup Endpoints. Enhanced Rate Limiting mit Security-Event-Logging auf allen API-Routes. Refresh Token Cleanup Scheduler-Job. CI/CD: `tests.yml` (Backend+Frontend Tests), `security-scan.yml` (SAST/DAST). Pytest Setup mit Smoke Test. Dependabot Config. npm Dependency Bump. Branch: `feature/security-hardening2`, Commits: `148d0f6` (PR #20), `f0eec84` (PR #21), `657ad70` (PR #22) | -| 26.02.2026 (10) | **Session 10 – Phase 2 Completion**: User Timeline Tab (Backend: `GET /users/{id}/timeline` mit Calendar Heatmap + Session-Gruppierung; Frontend: 90-Tage-Heatmap + Recent Watch Sessions). Image Proxy (Backend: `GET /media/{id}/image` mit In-Memory-Cache, Episode→Series Fallback). LibraryDetail Grid View (Table/Grid Toggle, Poster-Karten mit Bild, Titel, Jahr, Größe, Play-Count). UserDetail Activity: Type-Filter (Movies/Episodes) + Suchfeld. Branch: `feature/phase2-completion` | +| 26.02.2026 (10) | **Session 10 \u2013 Phase 2 Completion + Phase 4 Advanced Analytics**: Phase 2: User Timeline Tab (Backend + Frontend Calendar Heatmap + Session-Gruppierung), Image Proxy (`GET /media/{id}/image`), LibraryDetail Grid View (Table/Grid Toggle mit Poster-Bildern), UserDetail Activity Type-Filter + Suche. Phase 4: Concurrent Streams Analysis, Watch Duration Stats, Completion Rate Analytics, Binge-Watch Detection, Shared vs. Solo Content Analysis. Neue Analytics-Seite mit 5 Tabs. Branches: `feature/phase2-completion`, `feature/phase4-advanced-analytics` | | 30.12.2024 | Initiale Version: Session-Zusammenfassung (Rules Export, Sidebar, Theme Toggle, Staging UI) | diff --git a/PLANNED_FEATURES.md b/PLANNED_FEATURES.md index 12431ef..129ca6b 100644 --- a/PLANNED_FEATURES.md +++ b/PLANNED_FEATURES.md @@ -167,52 +167,52 @@ Deeper insights derived from watch data. - Plan maintenance windows during low-activity periods ✅ - See if weekends differ from weekdays ✅ -### Concurrent Streams Analysis +### Concurrent Streams Analysis ✅ Implemented (Session 10) **Purpose**: Track how many simultaneous streams occur. -- Historical peak concurrent users -- Time of day with most concurrent streams +- Historical peak concurrent users ✅ Daily peak chart + overall peak +- Time of day with most concurrent streams ✅ Hourly average bar chart - Useful for: - - Server capacity planning - - Transcode load understanding - - License/limit management + - Server capacity planning ✅ + - Transcode load understanding ✅ + - License/limit management ✅ -### Watch Duration Stats +### Watch Duration Stats ✅ Implemented (Session 10) **Purpose**: Understand typical viewing sessions. -- Average session length (overall and per library) -- Distribution chart of session lengths -- Compare movies vs. series session lengths -- Identify "quick check" vs. "full watch" patterns +- Average session length (overall and per library) ✅ +- Distribution chart of session lengths ✅ Bucketed bar chart +- Compare movies vs. series session lengths ✅ By-type cards +- Identify "quick check" vs. "full watch" patterns ✅ -### Completion Rate Analytics +### Completion Rate Analytics ✅ Implemented (Session 10) **Purpose**: Track how often content is finished. -- Percentage of movies watched to completion (>90%) -- Series episode completion rates -- Identify content that users abandon early -- Useful for cleanup decisions: "Content rarely finished" +- Percentage of movies watched to completion (>90%) ✅ Pie chart +- Series episode completion rates ✅ By-type stacked bars +- Identify content that users abandon early ✅ Most abandoned list +- Useful for cleanup decisions: "Content rarely finished" ✅ -### Binge-Watch Detection +### Binge-Watch Detection ✅ Implemented (Session 10) **Purpose**: Identify when users watch multiple episodes consecutively. -- Detect 3+ episodes of same series in one session -- Track which series trigger binge behavior -- User-specific binge patterns +- Detect 3+ episodes of same series in one session ✅ 4hr gap detection +- Track which series trigger binge behavior ✅ Top binged series +- User-specific binge patterns ✅ Top bingers leaderboard - Useful for: - - Understanding engagement - - Cleanup rules: "Series being binged should not be deleted" + - Understanding engagement ✅ + - Cleanup rules: "Series being binged should not be deleted" ✅ -### Shared vs. Solo Content +### Shared vs. Solo Content ✅ Implemented (Session 10) **Purpose**: Identify content watched by multiple users vs. single users. -- **Shared Content**: Watched by 2+ unique users -- **Solo Content**: Only watched by 1 user -- **Unwatched**: Not watched by anyone -- Useful for cleanup: - - Keep shared content longer - - Solo content can be cleaned up sooner - - Unwatched content is priority for cleanup +- **Shared Content**: Watched by 2+ unique users ✅ +- **Solo Content**: Only watched by 1 user ✅ +- **Unwatched**: Not watched by anyone ✅ +- Useful for cleanup: ✅ + - Keep shared content longer ✅ + - Solo content can be cleaned up sooner ✅ + - Unwatched content is priority for cleanup ✅ --- @@ -290,11 +290,12 @@ Delete movie if: - [x] Genre Distribution Charts – Backend `/activity/genre-stats` + RadarChart (Activity) + BarChart (Dashboard, LibraryDetail, UserDetail) (Session 9) - [x] Watch Patterns Heatmap – Backend `/activity/watch-heatmap` + CSS Grid 7×24 (Activity) (Session 9) -### Phase 4 - Advanced Analytics -- [ ] Concurrent Streams Analysis -- [ ] Completion Rate Analytics -- [ ] Binge-Watch Detection -- [ ] Shared vs. Solo Content Analysis +### Phase 4 - Advanced Analytics ✅ Complete (Session 9+10) +- [x] Concurrent Streams Analysis – Backend `/activity/concurrent-streams` + Frontend Analytics page (Session 10) +- [x] Watch Duration Stats – Backend `/activity/duration-stats` + Frontend Analytics page (Session 10) +- [x] Completion Rate Analytics – Backend `/activity/completion-rates` + Frontend Analytics page (Session 10) +- [x] Binge-Watch Detection – Backend `/activity/binge-stats` + Frontend Analytics page (Session 10) +- [x] Shared vs. Solo Content – Backend `/media/content-reach` + Frontend Analytics page (Session 10) ### Phase 5 - Smart Cleanup Integration - [ ] Cleanup Rules per User condition @@ -414,6 +415,12 @@ To enable these features, the following data needs to be tracked: | 2 | User Timeline Tab (Calendar Heatmap + Sessions) | Session 10 | ✅ | | 2 | LibraryDetail Grid View with Poster Images | Session 10 | ✅ | | 2 | UserDetail Activity Type Filter + Search | Session 10 | ✅ | +| 4 | Concurrent Streams API (`/activity/concurrent-streams`) | Session 10 | ✅ | +| 4 | Duration Stats API (`/activity/duration-stats`) | Session 10 | ✅ | +| 4 | Completion Rates API (`/activity/completion-rates`) | Session 10 | ✅ | +| 4 | Binge Stats API (`/activity/binge-stats`) | Session 10 | ✅ | +| 4 | Content Reach API (`/media/content-reach`) | Session 10 | ✅ | +| 4 | Analytics Page (5-tab: Concurrent, Duration, Completion, Binge, Reach) | Session 10 | ✅ | --- diff --git a/backend/app/api/routes/activity.py b/backend/app/api/routes/activity.py index 709eef8..7b26c93 100644 --- a/backend/app/api/routes/activity.py +++ b/backend/app/api/routes/activity.py @@ -396,3 +396,455 @@ async def get_watch_heatmap( "period_days": days, "heatmap": heatmap } + + +@router.get("/concurrent-streams") +@limiter.limit(RateLimits.API_READ) +async def get_concurrent_streams( + request: Request, + days: int = Query(30, ge=1, le=365), + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """ + Analyse historical concurrent stream peaks. + + Returns daily peak concurrent streams and an overall peak, + plus hourly average concurrency for capacity planning. + """ + since = datetime.now(timezone.utc) - timedelta(days=days) + + # Fetch all sessions in the period with start and end times + result = await db.execute( + select( + PlaybackActivity.started_at, + PlaybackActivity.ended_at, + PlaybackActivity.duration_seconds, + ) + .where(and_( + PlaybackActivity.started_at >= since, + PlaybackActivity.started_at.isnot(None), + )) + .order_by(PlaybackActivity.started_at) + ) + sessions = result.all() + + if not sessions: + return { + "period_days": days, + "overall_peak": 0, + "overall_peak_time": None, + "daily_peaks": [], + "hourly_avg_concurrent": [{"hour": h, "avg_concurrent": 0.0} for h in range(24)], + } + + # Build events list: +1 at start, -1 at end + events: list[tuple[datetime, int]] = [] + for s in sessions: + start = s.started_at + end = s.ended_at or (start + timedelta(seconds=s.duration_seconds or 0)) + events.append((start, 1)) + events.append((end, -1)) + + events.sort(key=lambda e: (e[0], e[1])) # process starts before ends at same time + + # Sweep to find peaks + current = 0 + overall_peak = 0 + overall_peak_time = None + daily_peaks: dict[str, tuple[int, datetime]] = {} # date_str -> (peak, time) + hourly_totals: dict[int, list[int]] = {h: [] for h in range(24)} + + for ts, delta in events: + current += delta + day_str = ts.strftime("%Y-%m-%d") + hour = ts.hour + + if current > overall_peak: + overall_peak = current + overall_peak_time = ts + + if day_str not in daily_peaks or current > daily_peaks[day_str][0]: + daily_peaks[day_str] = (current, ts) + + # Hourly average: sample at each event + # Simpler approach: count overlapping sessions per hour bucket + for s in sessions: + start = s.started_at + end = s.ended_at or (start + timedelta(seconds=s.duration_seconds or 0)) + # Each hour this session spans gets +1 + t = start.replace(minute=0, second=0, microsecond=0) + while t < end: + hourly_totals[t.hour].append(1) + t += timedelta(hours=1) + + # Average across days observed + total_days = max(1, (datetime.now(timezone.utc) - since).days) + hourly_avg = [ + { + "hour": h, + "avg_concurrent": round(len(hourly_totals[h]) / total_days, 2) + } + for h in range(24) + ] + + daily_list = sorted( + [ + {"date": d, "peak_concurrent": v[0], "peak_time": v[1].isoformat()} + for d, v in daily_peaks.items() + ], + key=lambda x: x["date"], + reverse=True, + )[:30] # last 30 days only + + return { + "period_days": days, + "overall_peak": overall_peak, + "overall_peak_time": overall_peak_time.isoformat() if overall_peak_time else None, + "daily_peaks": daily_list, + "hourly_avg_concurrent": hourly_avg, + } + + +@router.get("/duration-stats") +@limiter.limit(RateLimits.API_READ) +async def get_duration_stats( + request: Request, + days: int = Query(30, ge=1, le=365), + library_id: Optional[int] = None, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """ + Watch duration statistics: average session length, distribution, + movies vs series comparison. + """ + since = datetime.now(timezone.utc) - timedelta(days=days) + + query = ( + select( + PlaybackActivity.duration_seconds, + PlaybackActivity.played_percentage, + MediaItem.media_type, + ) + .join(MediaItem, PlaybackActivity.media_item_id == MediaItem.id, isouter=True) + .where(and_( + PlaybackActivity.started_at >= since, + PlaybackActivity.duration_seconds > 0, + )) + ) + if library_id is not None: + query = query.where(PlaybackActivity.library_id == library_id) + + result = await db.execute(query) + rows = result.all() + + if not rows: + return { + "period_days": days, + "total_sessions": 0, + "avg_duration_seconds": 0, + "median_duration_seconds": 0, + "total_watch_time_seconds": 0, + "by_type": {}, + "distribution": [], + } + + durations = [r.duration_seconds for r in rows] + durations.sort() + + # Group by media type + type_stats: dict[str, dict] = {} + for row in rows: + mt = str(row.media_type) if row.media_type else "unknown" + # Strip enum prefix if present (e.g. "MediaType.movie" -> "movie") + if "." in mt: + mt = mt.split(".")[-1] + if mt not in type_stats: + type_stats[mt] = {"count": 0, "total_seconds": 0, "durations": []} + type_stats[mt]["count"] += 1 + type_stats[mt]["total_seconds"] += row.duration_seconds + type_stats[mt]["durations"].append(row.duration_seconds) + + by_type = {} + for mt, stats in type_stats.items(): + d = sorted(stats["durations"]) + by_type[mt] = { + "count": stats["count"], + "avg_duration_seconds": round(stats["total_seconds"] / stats["count"]), + "median_duration_seconds": d[len(d) // 2], + "total_watch_time_seconds": stats["total_seconds"], + } + + # Duration distribution buckets (in minutes) + buckets = [ + (0, 5, "< 5 min"), + (5, 15, "5-15 min"), + (15, 30, "15-30 min"), + (30, 60, "30-60 min"), + (60, 90, "1-1.5 hrs"), + (90, 120, "1.5-2 hrs"), + (120, 180, "2-3 hrs"), + (180, 999999, "3+ hrs"), + ] + distribution = [] + for low, high, label in buckets: + count = sum(1 for d in durations if low * 60 <= d < high * 60) + distribution.append({"label": label, "count": count}) + + return { + "period_days": days, + "total_sessions": len(durations), + "avg_duration_seconds": round(sum(durations) / len(durations)), + "median_duration_seconds": durations[len(durations) // 2], + "total_watch_time_seconds": sum(durations), + "by_type": by_type, + "distribution": distribution, + } + + +@router.get("/completion-rates") +@limiter.limit(RateLimits.API_READ) +async def get_completion_rates( + request: Request, + days: int = Query(90, ge=1, le=365), + library_id: Optional[int] = None, + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """ + Content completion rate analytics. + + Shows what percentage of content is watched to completion (>90%), + partially watched, or abandoned early. + """ + since = datetime.now(timezone.utc) - timedelta(days=days) + + query = ( + select( + PlaybackActivity.played_percentage, + PlaybackActivity.duration_seconds, + MediaItem.media_type, + MediaItem.title, + MediaItem.id.label("media_id"), + ) + .join(MediaItem, PlaybackActivity.media_item_id == MediaItem.id) + .where(and_( + PlaybackActivity.started_at >= since, + PlaybackActivity.duration_seconds > 60, # skip very short sessions + )) + ) + if library_id is not None: + query = query.where(PlaybackActivity.library_id == library_id) + + result = await db.execute(query) + rows = result.all() + + if not rows: + return { + "period_days": days, + "total_plays": 0, + "overall": {"completed": 0, "partial": 0, "abandoned": 0}, + "by_type": {}, + "most_abandoned": [], + } + + # Categorize + completed = 0 # >90% + partial = 0 # 25-90% + abandoned = 0 # <25% + + type_completion: dict[str, dict[str, int]] = {} + abandon_count: dict[int, dict] = {} # media_id -> {title, type, abandoned, total} + + for row in rows: + pct = row.played_percentage or 0 + mt = str(row.media_type) if row.media_type else "unknown" + if "." in mt: + mt = mt.split(".")[-1] + + if mt not in type_completion: + type_completion[mt] = {"completed": 0, "partial": 0, "abandoned": 0, "total": 0} + type_completion[mt]["total"] += 1 + + if pct >= 90: + completed += 1 + type_completion[mt]["completed"] += 1 + elif pct >= 25: + partial += 1 + type_completion[mt]["partial"] += 1 + else: + abandoned += 1 + type_completion[mt]["abandoned"] += 1 + + # Track abandoned content + if pct < 25 and row.media_id: + if row.media_id not in abandon_count: + abandon_count[row.media_id] = { + "media_id": row.media_id, + "title": row.title, + "media_type": mt, + "abandoned_count": 0, + "total_plays": 0, + } + abandon_count[row.media_id]["abandoned_count"] += 1 + abandon_count[row.media_id]["total_plays"] += 1 + + total = len(rows) + overall = { + "completed": completed, + "completed_pct": round(completed / total * 100, 1), + "partial": partial, + "partial_pct": round(partial / total * 100, 1), + "abandoned": abandoned, + "abandoned_pct": round(abandoned / total * 100, 1), + } + + by_type = {} + for mt, stats in type_completion.items(): + t = stats["total"] + by_type[mt] = { + "total": t, + "completed": stats["completed"], + "completed_pct": round(stats["completed"] / t * 100, 1) if t > 0 else 0, + "partial": stats["partial"], + "partial_pct": round(stats["partial"] / t * 100, 1) if t > 0 else 0, + "abandoned": stats["abandoned"], + "abandoned_pct": round(stats["abandoned"] / t * 100, 1) if t > 0 else 0, + } + + # Top 10 most abandoned + most_abandoned = sorted( + abandon_count.values(), + key=lambda x: x["abandoned_count"], + reverse=True, + )[:10] + + return { + "period_days": days, + "total_plays": total, + "overall": overall, + "by_type": by_type, + "most_abandoned": most_abandoned, + } + + +@router.get("/binge-stats") +@limiter.limit(RateLimits.API_READ) +async def get_binge_stats( + request: Request, + days: int = Query(30, ge=1, le=365), + min_episodes: int = Query(3, ge=2, le=20), + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user) +): + """ + Binge-watch detection: find sessions where users watched 3+ episodes + of the same series consecutively. + + Returns binge sessions, top binged series, and per-user binge stats. + """ + since = datetime.now(timezone.utc) - timedelta(days=days) + + # Get episode watches grouped by user, ordered by time + result = await db.execute( + select( + PlaybackActivity.user_id, + PlaybackActivity.started_at, + PlaybackActivity.ended_at, + PlaybackActivity.duration_seconds, + PlaybackActivity.media_title, + MediaItem.title.label("item_title"), + MediaItem.media_type, + MediaItem.parent_id, + MediaItem.series_id, + MediaServerUser.name.label("user_name"), + ) + .join(MediaItem, PlaybackActivity.media_item_id == MediaItem.id) + .join(MediaServerUser, PlaybackActivity.user_id == MediaServerUser.id) + .where(and_( + PlaybackActivity.started_at >= since, + MediaItem.media_type == "episode", + )) + .order_by(PlaybackActivity.user_id, PlaybackActivity.started_at) + ) + rows = result.all() + + SESSION_GAP = timedelta(hours=4) # max gap between episodes in a binge + + # Group by user and detect binge sessions + binge_sessions: list[dict] = [] + series_binge_count: dict[str, int] = defaultdict(int) + user_binge_count: dict[str, dict] = {} # user_name -> {binges, episodes} + + current_user_id = None + current_series = None + streak: list = [] + + def flush_streak(): + nonlocal streak + if len(streak) >= min_episodes: + series_name = streak[0].series_id or streak[0].media_title.split(" - ")[0] if streak[0].media_title else "Unknown" + user_name = streak[0].user_name + binge_sessions.append({ + "user_name": user_name, + "series": series_name, + "episode_count": len(streak), + "started_at": streak[0].started_at.isoformat() if streak[0].started_at else None, + "ended_at": (streak[-1].ended_at or streak[-1].started_at).isoformat() if streak[-1].started_at else None, + "total_duration_seconds": sum(s.duration_seconds or 0 for s in streak), + }) + series_binge_count[series_name] += 1 + if user_name not in user_binge_count: + user_binge_count[user_name] = {"binges": 0, "total_episodes": 0} + user_binge_count[user_name]["binges"] += 1 + user_binge_count[user_name]["total_episodes"] += len(streak) + streak = [] + + for row in rows: + series_key = row.series_id or row.parent_id + + if row.user_id != current_user_id or series_key != current_series: + flush_streak() + current_user_id = row.user_id + current_series = series_key + streak = [row] + else: + # Check gap from last episode + last_end = streak[-1].ended_at or ( + streak[-1].started_at + timedelta(seconds=streak[-1].duration_seconds or 0) + ) if streak[-1].started_at else None + + if last_end and row.started_at and (row.started_at - last_end) <= SESSION_GAP: + streak.append(row) + else: + flush_streak() + streak = [row] + + flush_streak() # Don't forget last streak + + # Sort binge sessions by episode count (most impressive first) + binge_sessions.sort(key=lambda x: x["episode_count"], reverse=True) + + # Top binged series + top_series = sorted( + [{"series": s, "binge_count": c} for s, c in series_binge_count.items()], + key=lambda x: x["binge_count"], + reverse=True, + )[:10] + + # Top bingers (users) + top_users = sorted( + [{"user_name": u, **stats} for u, stats in user_binge_count.items()], + key=lambda x: x["binges"], + reverse=True, + )[:10] + + return { + "period_days": days, + "min_episodes": min_episodes, + "total_binge_sessions": len(binge_sessions), + "recent_binges": binge_sessions[:20], + "top_binged_series": top_series, + "top_bingers": top_users, + } diff --git a/backend/app/api/routes/media.py b/backend/app/api/routes/media.py index 00893f9..3a210d2 100644 --- a/backend/app/api/routes/media.py +++ b/backend/app/api/routes/media.py @@ -13,7 +13,7 @@ from ...core.database import get_db from ...core.rate_limit import limiter, RateLimits -from ...models import MediaItem, ServiceConnection, ServiceType, MediaType, ImportStats, CleanupLog, MediaServerUser, UserWatchHistory +from ...models import MediaItem, ServiceConnection, ServiceType, MediaType, ImportStats, CleanupLog, MediaServerUser, UserWatchHistory, Library from ...schemas import CleanupLogResponse from ..deps import get_current_user @@ -814,6 +814,112 @@ async def get_audit_log( } +@router.get("/content-reach") +@limiter.limit(RateLimits.API_READ) +async def get_content_reach( + request: Request, + library_id: Optional[int] = None, + media_type: Optional[str] = None, + db: AsyncSession = Depends(get_db), + current_user=Depends(get_current_user), +): + """ + Shared vs. Solo vs. Unwatched content analysis. + + - Shared: watched by 2+ unique users + - Solo: watched by exactly 1 user + - Unwatched: not watched by anyone + """ + # Build base media query (movies and series only, not episodes/seasons) + media_query = select(MediaItem.id, MediaItem.title, MediaItem.media_type, MediaItem.size_bytes) + filters = [MediaItem.media_type.in_(["movie", "series"])] + if library_id is not None: + filters.append(MediaItem.library_id == library_id) + if media_type: + filters.append(MediaItem.media_type == media_type) + media_query = media_query.where(and_(*filters)) + + media_result = await db.execute(media_query) + media_items = media_result.all() + + if not media_items: + return { + "total_items": 0, + "shared": {"count": 0, "pct": 0, "size_bytes": 0}, + "solo": {"count": 0, "pct": 0, "size_bytes": 0}, + "unwatched": {"count": 0, "pct": 0, "size_bytes": 0}, + "top_shared": [], + "top_solo_large": [], + } + + media_ids = [m.id for m in media_items] + media_map = {m.id: m for m in media_items} + + # Count unique viewers per media item + viewer_query = ( + select( + UserWatchHistory.media_item_id, + func.count(func.distinct(UserWatchHistory.user_id)).label("viewer_count"), + ) + .where(and_( + UserWatchHistory.media_item_id.in_(media_ids), + UserWatchHistory.is_played == True, + )) + .group_by(UserWatchHistory.media_item_id) + ) + viewer_result = await db.execute(viewer_query) + viewer_counts = {row.media_item_id: row.viewer_count for row in viewer_result.all()} + + shared_items = [] + solo_items = [] + unwatched_items = [] + + for m in media_items: + vc = viewer_counts.get(m.id, 0) + mt = str(m.media_type) + if "." in mt: + mt = mt.split(".")[-1] + info = {"media_id": m.id, "title": m.title, "media_type": mt, "size_bytes": m.size_bytes or 0, "viewer_count": vc} + if vc >= 2: + shared_items.append(info) + elif vc == 1: + solo_items.append(info) + else: + unwatched_items.append(info) + + total = len(media_items) + shared_size = sum(i["size_bytes"] for i in shared_items) + solo_size = sum(i["size_bytes"] for i in solo_items) + unwatched_size = sum(i["size_bytes"] for i in unwatched_items) + + # Top shared (most viewers) + top_shared = sorted(shared_items, key=lambda x: x["viewer_count"], reverse=True)[:10] + + # Top solo items by size (cleanup candidates) + top_solo_large = sorted(solo_items, key=lambda x: x["size_bytes"], reverse=True)[:10] + + return { + "total_items": total, + "shared": { + "count": len(shared_items), + "pct": round(len(shared_items) / total * 100, 1), + "size_bytes": shared_size, + }, + "solo": { + "count": len(solo_items), + "pct": round(len(solo_items) / total * 100, 1), + "size_bytes": solo_size, + }, + "unwatched": { + "count": len(unwatched_items), + "pct": round(len(unwatched_items) / total * 100, 1), + "size_bytes": unwatched_size, + }, + "top_shared": top_shared, + "top_solo_large": top_solo_large, + } + + # Simple in-memory image cache (max 200 items, TTL 1 hour) _image_cache: Dict[int, tuple[bytes, str, datetime]] = {} _IMAGE_CACHE_MAX = 200 diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index d3aba13..04ed45a 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -25,6 +25,7 @@ const Staging = lazy(() => import('./pages/Staging')) const Users = lazy(() => import('./pages/Users')) const UserDetail = lazy(() => import('./pages/UserDetail')) const Activity = lazy(() => import('./pages/Activity')) +const Analytics = lazy(() => import('./pages/Analytics')) interface SetupStatus { setup_complete: boolean @@ -143,6 +144,7 @@ function App() { }>} /> }>} /> }>} /> + }>} /> } /> diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 26fbc0a..41a4e43 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -27,6 +27,7 @@ import { ComputerDesktopIcon, UsersIcon, ChartBarIcon, + ChartPieIcon, } from '@heroicons/react/24/outline' const navigation = [ @@ -34,6 +35,7 @@ const navigation = [ { name: 'Libraries', href: '/libraries', icon: FolderIcon }, { name: 'Users', href: '/users', icon: UsersIcon }, { name: 'Activity', href: '/activity', icon: ChartBarIcon }, + { name: 'Analytics', href: '/analytics', icon: ChartPieIcon }, { name: 'Services', href: '/services', icon: ServerStackIcon }, { name: 'Rules', href: '/rules', icon: ClipboardDocumentListIcon }, { name: 'Preview', href: '/preview', icon: EyeIcon }, diff --git a/frontend/src/pages/Analytics.tsx b/frontend/src/pages/Analytics.tsx new file mode 100644 index 0000000..500d573 --- /dev/null +++ b/frontend/src/pages/Analytics.tsx @@ -0,0 +1,835 @@ +import { useQuery } from '@tanstack/react-query' +import { useState } from 'react' +import { + SignalIcon, + ClockIcon, + ChartBarIcon, + FireIcon, + UsersIcon, + FilmIcon, + TvIcon, +} from '@heroicons/react/24/outline' +import { + ResponsiveContainer, + BarChart, Bar, + AreaChart, Area, + PieChart, Pie, Cell, + XAxis, YAxis, CartesianGrid, Tooltip, Legend, +} from 'recharts' +import api from '../lib/api' +import { formatBytes, formatDuration } from '../lib/utils' + +// ─── Types ─────────────────────────────────────────────────────── + +interface ConcurrentStreamsResponse { + period_days: number + overall_peak: number + overall_peak_time: string | null + daily_peaks: Array<{ + date: string + peak_concurrent: number + peak_time: string + }> + hourly_avg_concurrent: Array<{ + hour: number + avg_concurrent: number + }> +} + +interface DurationStatsResponse { + period_days: number + total_sessions: number + avg_duration_seconds: number + median_duration_seconds: number + total_watch_time_seconds: number + by_type: Record + distribution: Array<{ label: string; count: number }> +} + +interface CompletionRatesResponse { + period_days: number + total_plays: number + overall: { + completed: number + completed_pct: number + partial: number + partial_pct: number + abandoned: number + abandoned_pct: number + } + by_type: Record + most_abandoned: Array<{ + media_id: number + title: string + media_type: string + abandoned_count: number + total_plays: number + }> +} + +interface BingeStatsResponse { + period_days: number + min_episodes: number + total_binge_sessions: number + recent_binges: Array<{ + user_name: string + series: string + episode_count: number + started_at: string | null + ended_at: string | null + total_duration_seconds: number + }> + top_binged_series: Array<{ series: string; binge_count: number }> + top_bingers: Array<{ user_name: string; binges: number; total_episodes: number }> +} + +interface ContentReachResponse { + total_items: number + shared: { count: number; pct: number; size_bytes: number } + solo: { count: number; pct: number; size_bytes: number } + unwatched: { count: number; pct: number; size_bytes: number } + top_shared: Array<{ + media_id: number + title: string + media_type: string + size_bytes: number + viewer_count: number + }> + top_solo_large: Array<{ + media_id: number + title: string + media_type: string + size_bytes: number + viewer_count: number + }> +} + +// ─── Constants ─────────────────────────────────────────────────── + +type TabType = 'concurrent' | 'duration' | 'completion' | 'binge' | 'reach' + +const PIE_COLORS = ['#10b981', '#f59e0b', '#ef4444'] + +const tooltipStyle = { + backgroundColor: 'var(--color-dark-800)', + border: '1px solid var(--color-dark-700)', + borderRadius: '0.5rem', + color: '#fff', + fontSize: '0.875rem', +} + +// ─── Component ─────────────────────────────────────────────────── + +export default function Analytics() { + const [activeTab, setActiveTab] = useState('concurrent') + const [days, setDays] = useState(30) + + // Queries + const { data: concurrent } = useQuery({ + queryKey: ['analytics-concurrent', days], + queryFn: async () => (await api.get(`/activity/concurrent-streams?days=${days}`)).data, + enabled: activeTab === 'concurrent', + }) + + const { data: duration } = useQuery({ + queryKey: ['analytics-duration', days], + queryFn: async () => (await api.get(`/activity/duration-stats?days=${days}`)).data, + enabled: activeTab === 'duration', + }) + + const { data: completion } = useQuery({ + queryKey: ['analytics-completion', days], + queryFn: async () => (await api.get(`/activity/completion-rates?days=${days}`)).data, + enabled: activeTab === 'completion', + }) + + const { data: binge } = useQuery({ + queryKey: ['analytics-binge', days], + queryFn: async () => (await api.get(`/activity/binge-stats?days=${days}`)).data, + enabled: activeTab === 'binge', + }) + + const { data: reach } = useQuery({ + queryKey: ['analytics-reach'], + queryFn: async () => (await api.get('/media/content-reach')).data, + enabled: activeTab === 'reach', + }) + + const tabs = [ + { id: 'concurrent' as TabType, name: 'Concurrent Streams', icon: SignalIcon }, + { id: 'duration' as TabType, name: 'Duration', icon: ClockIcon }, + { id: 'completion' as TabType, name: 'Completion', icon: ChartBarIcon }, + { id: 'binge' as TabType, name: 'Binge Watch', icon: FireIcon }, + { id: 'reach' as TabType, name: 'Content Reach', icon: UsersIcon }, + ] + + return ( +
+ {/* Header */} +
+
+

Advanced Analytics

+

+ Deep insights into viewing patterns and content performance +

+
+ {activeTab !== 'reach' && ( + + )} +
+ + {/* Tabs */} +
+ +
+ + {/* ═══════════ CONCURRENT STREAMS ═══════════ */} + {activeTab === 'concurrent' && concurrent && ( +
+ {/* Peak Stats */} +
+
+

Overall Peak

+

+ {concurrent.overall_peak} +

+

+ concurrent streams +

+
+
+

Peak Time

+

+ {concurrent.overall_peak_time + ? new Date(concurrent.overall_peak_time).toLocaleString('en-US', { + weekday: 'short', month: 'short', day: 'numeric', + hour: 'numeric', minute: '2-digit', + }) + : 'N/A'} +

+
+
+

Avg Daily Peak

+

+ {concurrent.daily_peaks.length > 0 + ? (concurrent.daily_peaks.reduce((s, d) => s + d.peak_concurrent, 0) / concurrent.daily_peaks.length).toFixed(1) + : '0'} +

+
+
+ + {/* Daily Peaks Chart */} +
+

+ Daily Peak Concurrent Streams +

+
+ + + + d.slice(5)} + /> + + + + + +
+
+ + {/* Hourly Average Concurrency */} +
+

+ Average Concurrent Streams by Hour +

+
+ + + + `${h}:00`} + /> + + [value.toFixed(2), 'Avg Concurrent']} + /> + + + +
+
+
+ )} + + {/* ═══════════ DURATION STATS ═══════════ */} + {activeTab === 'duration' && duration && ( +
+ {/* Summary Stats */} +
+
+

Total Sessions

+

+ {duration.total_sessions.toLocaleString()} +

+
+
+

Avg Duration

+

+ {formatDuration(duration.avg_duration_seconds)} +

+
+
+

Median Duration

+

+ {formatDuration(duration.median_duration_seconds)} +

+
+
+

Total Watch Time

+

+ {formatDuration(duration.total_watch_time_seconds)} +

+
+
+ +
+ {/* Duration Distribution */} +
+

+ Session Length Distribution +

+
+ + + + + + + + + +
+
+ + {/* By Type Comparison */} +
+

+ Duration by Content Type +

+
+ {Object.entries(duration.by_type).map(([type, stats]) => ( +
+
+ {type === 'movie' ? ( + + ) : ( + + )} + {type} + + {stats.count.toLocaleString()} sessions + +
+
+
+

Average

+

+ {formatDuration(stats.avg_duration_seconds)} +

+
+
+

Median

+

+ {formatDuration(stats.median_duration_seconds)} +

+
+
+

Total

+

+ {formatDuration(stats.total_watch_time_seconds)} +

+
+
+
+ ))} + {Object.keys(duration.by_type).length === 0 && ( +

No data available

+ )} +
+
+
+
+ )} + + {/* ═══════════ COMPLETION RATES ═══════════ */} + {activeTab === 'completion' && completion && ( +
+
+ {/* Pie Chart */} +
+

+ Overall Completion Rates +

+
+ + + 90%)', value: completion.overall.completed }, + { name: 'Partial (25-90%)', value: completion.overall.partial }, + { name: 'Abandoned (<25%)', value: completion.overall.abandoned }, + ]} + cx="50%" + cy="50%" + innerRadius={60} + outerRadius={100} + paddingAngle={3} + dataKey="value" + > + {PIE_COLORS.map((color, i) => ( + + ))} + + + + + +
+
+
+

{completion.overall.completed_pct}%

+

Completed

+
+
+

{completion.overall.partial_pct}%

+

Partial

+
+
+

{completion.overall.abandoned_pct}%

+

Abandoned

+
+
+
+ + {/* By Type Breakdown */} +
+

+ Completion by Content Type +

+
+ {Object.entries(completion.by_type).map(([type, stats]) => ( +
+
+ {type === 'movie' ? ( + + ) : ( + + )} + {type} + + {stats.total} plays + +
+ {/* Stacked bar */} +
+
+
+
+
+
+ {stats.completed_pct}% completed + {stats.abandoned_pct}% abandoned +
+
+ ))} +
+
+
+ + {/* Most Abandoned */} + {completion.most_abandoned.length > 0 && ( +
+

+ Most Frequently Abandoned Content +

+
+ {completion.most_abandoned.map((item, i) => ( +
+
+ + {i + 1} + + {item.media_type === 'movie' ? ( + + ) : ( + + )} + {item.title} +
+ + {item.abandoned_count} abandoned + +
+ ))} +
+
+ )} +
+ )} + + {/* ═══════════ BINGE WATCH ═══════════ */} + {activeTab === 'binge' && binge && ( +
+ {/* Summary */} +
+
+ +
+

+ {binge.total_binge_sessions} binge session{binge.total_binge_sessions !== 1 ? 's' : ''} +

+

+ detected in the last {binge.period_days} days ({binge.min_episodes}+ episodes) +

+
+
+
+ +
+ {/* Top Binged Series */} +
+

+ Most Binged Series +

+ {binge.top_binged_series.length > 0 ? ( +
+ {binge.top_binged_series.map((s, i) => ( +
+
+ + {i + 1} + + + {s.series} +
+ + {s.binge_count} binge{s.binge_count !== 1 ? 's' : ''} + +
+ ))} +
+ ) : ( +

No binge sessions detected

+ )} +
+ + {/* Top Bingers */} +
+

+ Top Binge Watchers +

+ {binge.top_bingers.length > 0 ? ( +
+ {binge.top_bingers.map((u, i) => ( +
+
+ + {i + 1} + + + {u.user_name} +
+
+ {u.binges} binge{u.binges !== 1 ? 's' : ''} + ({u.total_episodes} eps) +
+
+ ))} +
+ ) : ( +

No bingers found

+ )} +
+
+ + {/* Recent Binge Sessions */} + {binge.recent_binges.length > 0 && ( +
+

+ Recent Binge Sessions +

+
+ {binge.recent_binges.slice(0, 15).map((b, i) => ( +
+
+ +
+

+ {b.user_name} binged {b.series} +

+

+ {b.started_at ? new Date(b.started_at).toLocaleDateString('en-US', { + month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit' + }) : 'Unknown date'} +

+
+
+
+ + {b.episode_count} episodes + + + {formatDuration(b.total_duration_seconds)} + +
+
+ ))} +
+
+ )} +
+ )} + + {/* ═══════════ CONTENT REACH ═══════════ */} + {activeTab === 'reach' && reach && ( +
+ {/* Summary Cards */} +
+
+
+
+

Shared

+

{reach.shared.count}

+

{reach.shared.pct}% · {formatBytes(reach.shared.size_bytes)}

+
+ +
+

Watched by 2+ users

+
+
+
+
+

Solo

+

{reach.solo.count}

+

{reach.solo.pct}% · {formatBytes(reach.solo.size_bytes)}

+
+ +
+

Watched by 1 user only

+
+
+
+
+

Unwatched

+

{reach.unwatched.count}

+

{reach.unwatched.pct}% · {formatBytes(reach.unwatched.size_bytes)}

+
+ +
+

Not watched by anyone

+
+
+ + {/* Pie Chart */} +
+

+ Content Reach Distribution +

+
+ + + + {PIE_COLORS.map((color, i) => ( + + ))} + + + + + +
+
+ +
+ {/* Top Shared */} + {reach.top_shared.length > 0 && ( +
+

+ Most Shared Content +

+
+ {reach.top_shared.map((item, i) => ( +
+
+ {i + 1} + {item.media_type === 'movie' ? ( + + ) : ( + + )} + {item.title} +
+ + {item.viewer_count} viewers + +
+ ))} +
+
+ )} + + {/* Top Solo (Large) - Cleanup Candidates */} + {reach.top_solo_large.length > 0 && ( +
+

+ Solo Content — Largest Files + (cleanup candidates) +

+
+ {reach.top_solo_large.map((item, i) => ( +
+
+ {i + 1} + {item.media_type === 'movie' ? ( + + ) : ( + + )} + {item.title} +
+ + {formatBytes(item.size_bytes)} + +
+ ))} +
+
+ )} +
+
+ )} + + {/* No data states */} + {activeTab === 'concurrent' && !concurrent && ( +
+
+
+ )} + {activeTab === 'duration' && !duration && ( +
+
+
+ )} + {activeTab === 'completion' && !completion && ( +
+
+
+ )} + {activeTab === 'binge' && !binge && ( +
+
+
+ )} + {activeTab === 'reach' && !reach && ( +
+
+
+ )} +
+ ) +} From 8cd53de383ca18e91d7d77e169082c03e6e2591b Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 18:44:45 +0100 Subject: [PATCH 093/122] feat: Phase 5 - Smart Cleanup Integration Backend: - Add per-user rule conditions to RuleConditions schema: - no_user_watched_days: Delete only if NO user watched in X days - exclude_if_user_favorited: Protect favorites of specific users - exclude_active_sessions: Never delete items being watched - min_unique_viewers: Only delete if fewer than X unique viewers - Update CleanupEngine.evaluate_rule() with UserWatchHistory/PlaybackActivity queries - Update CleanupEngine._evaluate_item_for_preview() with detailed per-user reasoning - Add GET /media/cleanup-suggestions endpoint with 5 suggestion categories: unwatched, abandoned, low engagement, stale, storage hog Score-based ranking with composite scoring algorithm Frontend: - Add CleanupSuggestions.tsx page with category filters and score display - Update Rules.tsx with Smart Cleanup (Per-User) section in rule form - Add condition summary badges for new fields on rule cards - Update RuleConditions TypeScript interface - Add Suggestions nav item with LightBulbIcon - Add /suggestions route --- DEVELOPMENT_STATUS.md | 21 +- PLANNED_FEATURES.md | 15 +- backend/app/api/routes/media.py | 176 ++++++++++++ backend/app/schemas/__init__.py | 5 + backend/app/services/cleanup_engine.py | 158 ++++++++++- frontend/src/App.tsx | 2 + frontend/src/components/Layout.tsx | 2 + frontend/src/pages/CleanupSuggestions.tsx | 314 ++++++++++++++++++++++ frontend/src/pages/Rules.tsx | 107 ++++++++ frontend/src/types/index.ts | 5 + 10 files changed, 788 insertions(+), 17 deletions(-) create mode 100644 frontend/src/pages/CleanupSuggestions.tsx diff --git a/DEVELOPMENT_STATUS.md b/DEVELOPMENT_STATUS.md index 6c874ef..786a4fd 100644 --- a/DEVELOPMENT_STATUS.md +++ b/DEVELOPMENT_STATUS.md @@ -2,7 +2,7 @@ > **Zweck**: Dieses Dokument dient als fortlaufender Stand für die Weiterentwicklung. Es kann in jedem neuen Chat/auf jedem Rechner als Kontext übergeben werden, damit der Assistent sofort weiß, wo es weitergeht. -**Letzte Aktualisierung**: 26. Februar 2026 (Session 10) +**Letzte Aktualisierung**: 26. Februar 2026 (Session 11) **Branch**: `develop` **Letzter Commit**: `55e4bf7` (Session 9) **Version**: `vdev.0.0.231` @@ -280,11 +280,13 @@ frontend/src/ - [x] Binge-Watch Detection – Analytics.tsx (Session 10) - [x] Shared vs. Solo Content Analysis – Analytics.tsx (Session 10) -### Phase 5 – Smart Cleanup Integration ❌ NICHT BEGONNEN -- [ ] Cleanup Rules per User ("Delete only if NO user watched in X days") -- [ ] User-Specific Exclusions ("Never delete if User X has favorite") -- [ ] Enhanced Currently Watching Detection -- [ ] Analytics-based Cleanup Suggestions +### Phase 5 – Smart Cleanup Integration ✅ ERLEDIGT +- [x] Per-User Cleanup Rules: `no_user_watched_days` – checks UserWatchHistory per user (Session 11) +- [x] User-Specific Exclusions: `exclude_if_user_favorited` – protects favorites of specific users (Session 11) +- [x] Enhanced Currently Watching: `exclude_active_sessions` – checks PlaybackActivity.is_active (Session 11) +- [x] Min Unique Viewers: `min_unique_viewers` – only delete if fewer than X viewers (Session 11) +- [x] Analytics-based Cleanup Suggestions: `/media/cleanup-suggestions` endpoint (Session 11) +- [x] Cleanup Suggestions UI: CleanupSuggestions.tsx with category filters + scoring (Session 11) --- @@ -503,9 +505,9 @@ Das Backend ist gut strukturiert mit: 20. ~~CI/CD: Tests + Security Scanning Workflows~~ ✅ 21. ~~Dependabot Setup~~ ✅ -### Priorität 7: Phase 4+ (Zukunft) +### ~~Priorität 7: Phase 4+5~~ ✅ ERLEDIGT (Session 9+10+11) 22. ~~Advanced Analytics~~ ✅ (Session 9+10): Heatmaps, User Timeline, Concurrent Streams, Duration Stats, Completion Rates, Binge Detection, Content Reach -23. Smart Cleanup Rules (Per-User Conditions) +23. ~~Smart Cleanup Rules (Per-User Conditions)~~ ✅ (Session 11): no_user_watched_days, exclude_if_user_favorited, exclude_active_sessions, min_unique_viewers, Cleanup Suggestions API + UI 24. ~~Genre Distribution Charts~~ ✅ (Session 9) 25. ~~User Detail: Favorite Genres~~ ✅ (Session 9), ~~Timeline-Tab~~ ✅ (Session 10) 26. ~~LibraryDetail: Genre-Charts~~ ✅ (Session 9), ~~Grid-View mit Poster-Bildern~~ ✅ (Session 10) @@ -581,5 +583,4 @@ docker compose -f docker-compose.dev.yml up --build | 22.02.2026 (6) | **Session 6 – Bugfix & Expand-Rows**: BUG-011 behoben: PlaybackActivity `position_ticks`/`runtime_ticks` Integer→BigInteger (PostgreSQL int32 overflow bei Emby-Ticks >2.1B). DB-Migration hinzugefügt. Fehlerbehandlung in `_sync_active_sessions` und `services.py` mit `db.rollback()`. ResponsiveTable: Expand-Row-Support (`onRowClick`, `isExpanded`, `expandedContent`). Preview.tsx: Beide Tabellen (Series+Movies) auf ResponsiveTable migriert (letzte Seite). Expand-Rows auf Activity, UserDetail, LibraryDetail (IP, Device, Play Method, Progress-Bar). ConfirmDialog: `aria-modal`, Focus-Trap, Escape-Key, Click-Outside. Library Activity API: `ip_address`, `transcode_video`, `transcode_audio` hinzugefügt. BUG-012: Radarr-Pfad Ordner→Datei-Pfad (Movie-Watch-Statistiken). BUG-013: User Last Seen/Watched/Client Fallback-Logik. Branch: `feature/phase2-enhancements-and-docs` | | 24.02.2026 (7) | **Session 7 – Security Hardening I** (PR #19 `feature/security-hardening`): CORS Lockdown (Wildcard-Warnung in Production). API-Key/Notification-Secret Masking. Password Complexity Enforcement. Security Headers Middleware (X-Frame-Options, X-Content-Type-Options, Referrer-Policy, Permissions-Policy). WebSocket Auth (kurzlebiger Token). Refresh Token Rotation (altes Token revoked). Account Lockout (DB-Migration für `failed_login_attempts`, `locked_until`). Trusted Proxy Config. Rate-Limit Improvements. System Settings Allowlist. Staging Path Validation. Rule Import File Size Limit. `datetime.utcnow()` → `datetime.now(timezone.utc)`. Audit Log Data Retention Job. SECRET_KEY Enforcement in docker-compose. Frontend WebSocket Auth + Refresh Token Rotation Support. Branch: `feature/security-hardening`, Commit: `3058956` (PR #19) | | 24.02.2026 (8) | **Session 8 – Security Hardening II + httpOnly Cookies** (PRs #20, #21, #22): httpOnly Cookie Auth Migration (ADR-001) – JWT aus localStorage entfernt, Set-Cookie im Backend, `credentials: 'include'` im Frontend, Cookie-Clearing bei Logout. CSRF Double-Submit Cookie Middleware (`csrf.py`). Security Event Logging (`security_events.py` – strukturiertes JSON für Auth/Rate-Limit/CSRF Events). SSRF-Safe URL Validation (`url_validation.py`). `escape_like()` für SQL-Injection-Schutz. Content-Security-Policy Header. WebSocket Connection Limits per IP. Body Size Limit Middleware. Admin-Only auf Rules, Jobs, Staging, Services, Notifications, System-Settings Routes. Outbound URL Validation auf alle Service-Connection/Notification/Setup Endpoints. Enhanced Rate Limiting mit Security-Event-Logging auf allen API-Routes. Refresh Token Cleanup Scheduler-Job. CI/CD: `tests.yml` (Backend+Frontend Tests), `security-scan.yml` (SAST/DAST). Pytest Setup mit Smoke Test. Dependabot Config. npm Dependency Bump. Branch: `feature/security-hardening2`, Commits: `148d0f6` (PR #20), `f0eec84` (PR #21), `657ad70` (PR #22) | -| 26.02.2026 (10) | **Session 10 \u2013 Phase 2 Completion + Phase 4 Advanced Analytics**: Phase 2: User Timeline Tab (Backend + Frontend Calendar Heatmap + Session-Gruppierung), Image Proxy (`GET /media/{id}/image`), LibraryDetail Grid View (Table/Grid Toggle mit Poster-Bildern), UserDetail Activity Type-Filter + Suche. Phase 4: Concurrent Streams Analysis, Watch Duration Stats, Completion Rate Analytics, Binge-Watch Detection, Shared vs. Solo Content Analysis. Neue Analytics-Seite mit 5 Tabs. Branches: `feature/phase2-completion`, `feature/phase4-advanced-analytics` | -| 30.12.2024 | Initiale Version: Session-Zusammenfassung (Rules Export, Sidebar, Theme Toggle, Staging UI) | +| 26.02.2026 (10) | **Session 10 \u2013 Phase 2 Completion + Phase 4 Advanced Analytics**: Phase 2: User Timeline Tab (Backend + Frontend Calendar Heatmap + Session-Gruppierung), Image Proxy (`GET /media/{id}/image`), LibraryDetail Grid View (Table/Grid Toggle mit Poster-Bildern), UserDetail Activity Type-Filter + Suche. Phase 4: Concurrent Streams Analysis, Watch Duration Stats, Completion Rate Analytics, Binge-Watch Detection, Shared vs. Solo Content Analysis. Neue Analytics-Seite mit 5 Tabs. Branches: `feature/phase2-completion`, `feature/phase4-advanced-analytics` || 26.02.2026 (11) | **Session 11 – Phase 5 Smart Cleanup Integration**: Per-User Rule Conditions (`no_user_watched_days`, `exclude_if_user_favorited`, `exclude_active_sessions`, `min_unique_viewers`) in RuleConditions Schema + CleanupEngine. Analytics-based Cleanup Suggestions API (`GET /media/cleanup-suggestions`). CleanupSuggestions.tsx Frontend-Seite mit Kategorie-Filter (Unwatched, Abandoned, Low Engagement, Stale, Storage Hog) und Score-basiertem Ranking. Rules.tsx: Smart Cleanup Per-User Section. Branch: `feature/phase5-smart-cleanup` || 30.12.2024 | Initiale Version: Session-Zusammenfassung (Rules Export, Sidebar, Theme Toggle, Staging UI) | diff --git a/PLANNED_FEATURES.md b/PLANNED_FEATURES.md index 129ca6b..0993673 100644 --- a/PLANNED_FEATURES.md +++ b/PLANNED_FEATURES.md @@ -297,11 +297,11 @@ Delete movie if: - [x] Binge-Watch Detection – Backend `/activity/binge-stats` + Frontend Analytics page (Session 10) - [x] Shared vs. Solo Content – Backend `/media/content-reach` + Frontend Analytics page (Session 10) -### Phase 5 - Smart Cleanup Integration -- [ ] Cleanup Rules per User condition -- [ ] User-Specific Exclusions -- [ ] Enhanced Currently Watching detection -- [ ] Analytics-based cleanup suggestions +### Phase 5 - Smart Cleanup Integration ✅ Complete (Session 11) +- [x] Cleanup Rules per User condition – `no_user_watched_days` in RuleConditions (Session 11) +- [x] User-Specific Exclusions – `exclude_if_user_favorited` + `min_unique_viewers` in RuleConditions (Session 11) +- [x] Enhanced Currently Watching detection – `exclude_active_sessions` checks PlaybackActivity.is_active (Session 11) +- [x] Analytics-based cleanup suggestions – `/media/cleanup-suggestions` endpoint + CleanupSuggestions page (Session 11) ### Security Hardening ✅ Complete (Session 7+8) - [x] httpOnly Cookie Auth (JWT aus localStorage entfernt, ADR-001) @@ -421,6 +421,11 @@ To enable these features, the following data needs to be tracked: | 4 | Binge Stats API (`/activity/binge-stats`) | Session 10 | ✅ | | 4 | Content Reach API (`/media/content-reach`) | Session 10 | ✅ | | 4 | Analytics Page (5-tab: Concurrent, Duration, Completion, Binge, Reach) | Session 10 | ✅ | +| 5 | Per-User Rule Conditions (no_user_watched_days, exclude_if_user_favorited) | Session 11 | ✅ | +| 5 | Active Session Protection (exclude_active_sessions) | Session 11 | ✅ | +| 5 | Min Unique Viewers Rule Condition | Session 11 | ✅ | +| 5 | Cleanup Suggestions API (`/media/cleanup-suggestions`) | Session 11 | ✅ | +| 5 | Cleanup Suggestions Page (CleanupSuggestions.tsx) | Session 11 | ✅ | --- diff --git a/backend/app/api/routes/media.py b/backend/app/api/routes/media.py index 3a210d2..cf7d734 100644 --- a/backend/app/api/routes/media.py +++ b/backend/app/api/routes/media.py @@ -920,6 +920,182 @@ async def get_content_reach( } +@router.get("/cleanup-suggestions") +@limiter.limit(RateLimits.API_READ) +async def get_cleanup_suggestions( + request: Request, + days: int = Query(90, ge=7, le=365), + db: AsyncSession = Depends(get_db), + current_user = Depends(get_current_user) +): + """Get analytics-based cleanup suggestions. + + Analyzes watch patterns and content usage to suggest items for cleanup: + - Unwatched content older than X days + - Abandoned content (started but never finished by anyone) + - Low-engagement content (watched by very few users) + - Completed and stale content (fully watched, not rewatched) + - Large files with low watch counts (storage hogs) + """ + from ...models import PlaybackActivity + + now = datetime.now(timezone.utc) + cutoff = now - timedelta(days=days) + + # Get all media items with sizes + items_result = await db.execute( + select(MediaItem).where( + and_( + MediaItem.media_type.in_([MediaType.MOVIE, MediaType.SERIES]), + MediaItem.size_bytes > 0 + ) + ) + ) + all_items = items_result.scalars().all() + + if not all_items: + return {"suggestions": [], "summary": {}} + + # Pre-fetch per-user data for all items + # Get unique viewer counts per item + viewer_counts_result = await db.execute( + select( + UserWatchHistory.media_item_id, + func.count(func.distinct(UserWatchHistory.user_id)).label("viewers"), + func.max(UserWatchHistory.last_played_at).label("last_played"), + func.avg(UserWatchHistory.played_percentage).label("avg_progress") + ).where(UserWatchHistory.is_played == True) + .group_by(UserWatchHistory.media_item_id) + ) + viewer_data = { + row.media_item_id: { + "viewers": row.viewers, + "last_played": row.last_played, + "avg_progress": float(row.avg_progress) if row.avg_progress else 0 + } + for row in viewer_counts_result.all() + } + + # Get abandoned items (started but avg progress < 25%) + abandoned_result = await db.execute( + select( + UserWatchHistory.media_item_id, + func.count(func.distinct(UserWatchHistory.user_id)).label("starters"), + func.avg(UserWatchHistory.played_percentage).label("avg_pct") + ).group_by(UserWatchHistory.media_item_id) + .having(func.avg(UserWatchHistory.played_percentage) < 25) + ) + abandoned_ids = {row.media_item_id for row in abandoned_result.all()} + + # Get active sessions (items currently being watched) + active_result = await db.execute( + select(PlaybackActivity.media_item_id).where(PlaybackActivity.is_active == True).distinct() + ) + active_ids = {row[0] for row in active_result.all()} + + suggestions = [] + + # Get total user count (for low-engagement calculation) + total_users_result = await db.execute(select(func.count()).select_from(MediaServerUser)) + total_users = total_users_result.scalar() or 1 + + for item in all_items: + # Skip items currently being watched + if item.id in active_ids: + continue + # Skip staged items + if getattr(item, 'is_staged', False): + continue + # Skip already flagged items + if getattr(item, 'flagged_for_cleanup', False): + continue + + vdata = viewer_data.get(item.id) + unique_viewers = vdata["viewers"] if vdata else 0 + last_played = vdata["last_played"] if vdata else None + avg_progress = vdata["avg_progress"] if vdata else 0 + + suggestion_reasons = [] + score = 0 # Higher score = stronger suggestion + + # Category 1: Unwatched content + if not item.is_watched and not vdata: + age_days = (now - item.added_at).days if item.added_at else 0 + if age_days > days: + suggestion_reasons.append(f"Never watched, added {age_days} days ago") + score += 30 + min(age_days // 30, 20) # Up to +50 + + # Category 2: Abandoned content + if item.id in abandoned_ids and vdata: + suggestion_reasons.append(f"Abandoned by viewers (avg progress: {avg_progress:.0f}%)") + score += 25 + + # Category 3: Low engagement (only 1 viewer on shared server) + if total_users > 1 and unique_viewers <= 1 and item.added_at and (now - item.added_at).days > days: + suggestion_reasons.append(f"Low engagement: only {unique_viewers} viewer(s) out of {total_users} users") + score += 15 + + # Category 4: Completed & stale (watched but not rewatched in X days) + if vdata and avg_progress > 90 and last_played and last_played < cutoff: + days_since = (now - last_played).days + suggestion_reasons.append(f"Fully watched, not revisited in {days_since} days") + score += 20 + min(days_since // 30, 10) + + # Category 5: Storage hogs (large files with low watch count) + size_gb = (item.size_bytes or 0) / (1024**3) + if size_gb > 5 and (item.watch_count or 0) <= 1: + suggestion_reasons.append(f"Large file ({size_gb:.1f} GB) with only {item.watch_count or 0} total plays") + score += 15 + int(size_gb) + + if suggestion_reasons: + suggestions.append({ + "item_id": item.id, + "title": item.title, + "media_type": item.media_type.value if hasattr(item.media_type, 'value') else str(item.media_type), + "size_bytes": item.size_bytes or 0, + "added_at": item.added_at.isoformat() if item.added_at else None, + "last_watched_at": last_played.isoformat() if last_played else None, + "watch_count": item.watch_count or 0, + "unique_viewers": unique_viewers, + "avg_progress": round(avg_progress, 1), + "score": score, + "reasons": suggestion_reasons, + }) + + # Sort by score descending + suggestions.sort(key=lambda x: x["score"], reverse=True) + + # Limit to top 50 + suggestions = suggestions[:50] + + # Build categories for summary + cat_counts = {"unwatched": 0, "abandoned": 0, "low_engagement": 0, "stale": 0, "storage_hog": 0} + total_reclaimable = 0 + for s in suggestions: + total_reclaimable += s["size_bytes"] + for reason in s["reasons"]: + if "Never watched" in reason: + cat_counts["unwatched"] += 1 + if "Abandoned" in reason: + cat_counts["abandoned"] += 1 + if "Low engagement" in reason: + cat_counts["low_engagement"] += 1 + if "not revisited" in reason: + cat_counts["stale"] += 1 + if "Large file" in reason: + cat_counts["storage_hog"] += 1 + + return { + "suggestions": suggestions, + "summary": { + "total_suggestions": len(suggestions), + "total_reclaimable_bytes": total_reclaimable, + "days_analyzed": days, + "categories": cat_counts, + } + } + + # Simple in-memory image cache (max 200 items, TTL 1 hour) _image_cache: Dict[int, tuple[bytes, str, datetime]] = {} _IMAGE_CACHE_MAX = 200 diff --git a/backend/app/schemas/__init__.py b/backend/app/schemas/__init__.py index 1b8e00e..08fb5c2 100644 --- a/backend/app/schemas/__init__.py +++ b/backend/app/schemas/__init__.py @@ -289,6 +289,11 @@ class RuleConditions(BaseModel): add_import_exclusion: bool = True # Add to import exclusion list when deleting watched_progress_below: Optional[int] = Field(None, ge=0, le=100) # Only delete if progress below X% exclude_recently_added_days: Optional[int] = Field(None, ge=0) # Exclude recently added items + # Phase 5: Smart Cleanup - Per-user conditions + no_user_watched_days: Optional[int] = Field(None, ge=0) # Delete only if NO user watched in X days + exclude_if_user_favorited: List[int] = [] # Never delete if any of these user IDs have it as favorite + exclude_active_sessions: bool = True # Never delete if any user is currently watching + min_unique_viewers: Optional[int] = Field(None, ge=0) # Only delete if fewer than X unique viewers class CleanupRuleBase(BaseModel): diff --git a/backend/app/services/cleanup_engine.py b/backend/app/services/cleanup_engine.py index bafc04d..b2f20da 100644 --- a/backend/app/services/cleanup_engine.py +++ b/backend/app/services/cleanup_engine.py @@ -4,14 +4,15 @@ from typing import Dict, Any, List, Optional from datetime import datetime as dt, timedelta, timezone from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy import select, and_, or_ +from sqlalchemy import select, and_, or_, func from loguru import logger import os import shutil from ..models import ( MediaItem, CleanupRule, ServiceConnection, CleanupLog, - NotificationChannel, MediaType, RuleActionType, ServiceType + NotificationChannel, MediaType, RuleActionType, ServiceType, + UserWatchHistory, PlaybackActivity ) from ..schemas import RuleConditions from .sonarr import SonarrClient @@ -136,6 +137,66 @@ async def evaluate_rule( if item.rating >= conditions.rating_below: continue + # Phase 5: Per-user watch checks + # Check if any user is currently watching (active session) + if conditions.exclude_active_sessions: + active_result = await self.db.execute( + select(func.count()).select_from(PlaybackActivity).where( + and_( + PlaybackActivity.media_item_id == item.id, + PlaybackActivity.is_active == True + ) + ) + ) + if active_result.scalar() > 0: + logger.debug(f"Skipping {item.title} - currently being watched") + continue + + # Check per-user watch history (no user watched in X days) + if conditions.no_user_watched_days is not None: + cutoff = dt.now(timezone.utc) - timedelta(days=conditions.no_user_watched_days) + recent_result = await self.db.execute( + select(func.count()).select_from(UserWatchHistory).where( + and_( + UserWatchHistory.media_item_id == item.id, + UserWatchHistory.last_played_at >= cutoff + ) + ) + ) + if recent_result.scalar() > 0: + logger.debug(f"Skipping {item.title} - a user watched within last {conditions.no_user_watched_days} days") + continue + + # Check if specific users have it as favorite + if conditions.exclude_if_user_favorited: + fav_result = await self.db.execute( + select(func.count()).select_from(UserWatchHistory).where( + and_( + UserWatchHistory.media_item_id == item.id, + UserWatchHistory.user_id.in_(conditions.exclude_if_user_favorited), + UserWatchHistory.is_favorite == True + ) + ) + ) + if fav_result.scalar() > 0: + logger.debug(f"Skipping {item.title} - favorited by a protected user") + continue + + # Check minimum unique viewers + if conditions.min_unique_viewers is not None: + viewer_result = await self.db.execute( + select(func.count(func.distinct(UserWatchHistory.user_id))).select_from(UserWatchHistory).where( + and_( + UserWatchHistory.media_item_id == item.id, + UserWatchHistory.is_played == True + ) + ) + ) + unique_viewers = viewer_result.scalar() + if unique_viewers >= conditions.min_unique_viewers: + logger.debug(f"Skipping {item.title} - has {unique_viewers} unique viewers (min: {conditions.min_unique_viewers})") + continue + matched_items.append(item) # Apply max items limit @@ -643,6 +704,99 @@ async def _evaluate_item_for_preview( f"Rating ({item.rating}) below threshold ({conditions.rating_below})" ) + # Phase 5: Per-user checks for preview + # Check active sessions + if conditions.exclude_active_sessions: + active_result = await self.db.execute( + select(func.count()).select_from(PlaybackActivity).where( + and_( + PlaybackActivity.media_item_id == item.id, + PlaybackActivity.is_active == True + ) + ) + ) + active_count = active_result.scalar() + if active_count > 0: + result["would_delete"] = False + result["skip_reasons"].append( + f"Currently being watched by {active_count} user(s)" + ) + return result + + # Check per-user watch history + if conditions.no_user_watched_days is not None: + cutoff = dt.now(timezone.utc) - timedelta(days=conditions.no_user_watched_days) + recent_result = await self.db.execute( + select(func.count()).select_from(UserWatchHistory).where( + and_( + UserWatchHistory.media_item_id == item.id, + UserWatchHistory.last_played_at >= cutoff + ) + ) + ) + recent_count = recent_result.scalar() + if recent_count > 0: + result["would_delete"] = False + result["skip_reasons"].append( + f"{recent_count} user(s) watched within last {conditions.no_user_watched_days} days" + ) + return result + else: + result["reasons"].append( + f"No user watched within last {conditions.no_user_watched_days} days" + ) + + # Check user-specific favorites + if conditions.exclude_if_user_favorited: + from ..models import MediaServerUser + # Get user names for reporting + user_result = await self.db.execute( + select(MediaServerUser.id, MediaServerUser.name).where( + MediaServerUser.id.in_(conditions.exclude_if_user_favorited) + ) + ) + user_names = {row.id: row.name for row in user_result.all()} + + fav_result = await self.db.execute( + select(UserWatchHistory.user_id).where( + and_( + UserWatchHistory.media_item_id == item.id, + UserWatchHistory.user_id.in_(conditions.exclude_if_user_favorited), + UserWatchHistory.is_favorite == True + ) + ) + ) + fav_user_ids = [row[0] for row in fav_result.all()] + if fav_user_ids: + fav_names = [user_names.get(uid, f"User {uid}") for uid in fav_user_ids] + result["would_delete"] = False + result["skip_reasons"].append( + f"Favorited by protected user(s): {', '.join(fav_names)}" + ) + return result + + # Check minimum unique viewers + if conditions.min_unique_viewers is not None: + viewer_result = await self.db.execute( + select(func.count(func.distinct(UserWatchHistory.user_id))).select_from(UserWatchHistory).where( + and_( + UserWatchHistory.media_item_id == item.id, + UserWatchHistory.is_played == True + ) + ) + ) + unique_viewers = viewer_result.scalar() + if unique_viewers >= conditions.min_unique_viewers: + result["would_delete"] = False + result["skip_reasons"].append( + f"Has {unique_viewers} unique viewer(s) (minimum: {conditions.min_unique_viewers})" + ) + return result + else: + result["reasons"].append( + f"Only {unique_viewers} unique viewer(s) (below minimum: {conditions.min_unique_viewers})" + ) + # If we got here, item matches all conditions if not result["reasons"]: result["reasons"].append("Matches all rule conditions") diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 04ed45a..378d1a0 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -26,6 +26,7 @@ const Users = lazy(() => import('./pages/Users')) const UserDetail = lazy(() => import('./pages/UserDetail')) const Activity = lazy(() => import('./pages/Activity')) const Analytics = lazy(() => import('./pages/Analytics')) +const CleanupSuggestions = lazy(() => import('./pages/CleanupSuggestions')) interface SetupStatus { setup_complete: boolean @@ -140,6 +141,7 @@ function App() { }>} /> }>} /> }>} /> + }>} /> }>} /> }>} /> }>} /> diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 41a4e43..f9f24ae 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -28,6 +28,7 @@ import { UsersIcon, ChartBarIcon, ChartPieIcon, + LightBulbIcon, } from '@heroicons/react/24/outline' const navigation = [ @@ -39,6 +40,7 @@ const navigation = [ { name: 'Services', href: '/services', icon: ServerStackIcon }, { name: 'Rules', href: '/rules', icon: ClipboardDocumentListIcon }, { name: 'Preview', href: '/preview', icon: EyeIcon }, + { name: 'Suggestions', href: '/suggestions', icon: LightBulbIcon }, { name: 'Staging', href: '/staging', icon: ArchiveBoxIcon }, { name: 'Jobs', href: '/jobs', icon: CpuChipIcon }, { name: 'Notifications', href: '/notifications', icon: BellIcon }, diff --git a/frontend/src/pages/CleanupSuggestions.tsx b/frontend/src/pages/CleanupSuggestions.tsx new file mode 100644 index 0000000..d5ca0e0 --- /dev/null +++ b/frontend/src/pages/CleanupSuggestions.tsx @@ -0,0 +1,314 @@ +import { useState } from 'react' +import { useQuery } from '@tanstack/react-query' +import { + LightBulbIcon, + TrashIcon, + FilmIcon, + TvIcon, + ArrowPathIcon, + ExclamationTriangleIcon, + EyeSlashIcon, + UsersIcon, + ClockIcon, + ServerIcon, +} from '@heroicons/react/24/outline' +import api from '../lib/api' +import { formatBytes } from '../lib/utils' + +interface CleanupSuggestion { + item_id: number + title: string + media_type: string + size_bytes: number + added_at: string | null + last_watched_at: string | null + watch_count: number + unique_viewers: number + avg_progress: number + score: number + reasons: string[] +} + +interface SuggestionSummary { + total_suggestions: number + total_reclaimable_bytes: number + days_analyzed: number + categories: { + unwatched: number + abandoned: number + low_engagement: number + stale: number + storage_hog: number + } +} + +interface SuggestionsResponse { + suggestions: CleanupSuggestion[] + summary: SuggestionSummary +} + +const categoryInfo: Record = { + unwatched: { + label: 'Unwatched', + icon: EyeSlashIcon, + color: 'text-gray-400', + description: 'Never watched by any user', + }, + abandoned: { + label: 'Abandoned', + icon: ExclamationTriangleIcon, + color: 'text-yellow-400', + description: 'Started but never finished (avg < 25%)', + }, + low_engagement: { + label: 'Low Engagement', + icon: UsersIcon, + color: 'text-orange-400', + description: 'Watched by very few users', + }, + stale: { + label: 'Stale', + icon: ClockIcon, + color: 'text-blue-400', + description: 'Fully watched, not rewatched', + }, + storage_hog: { + label: 'Storage Hog', + icon: ServerIcon, + color: 'text-red-400', + description: 'Large files with low play counts', + }, +} + +export default function CleanupSuggestions() { + const [days, setDays] = useState(90) + const [filterCategory, setFilterCategory] = useState(null) + + const { data, isLoading, isFetching } = useQuery({ + queryKey: ['cleanup-suggestions', days], + queryFn: async () => { + const res = await api.get(`/media/cleanup-suggestions?days=${days}`) + return res.data + }, + }) + + const suggestions = data?.suggestions || [] + const summary = data?.summary + + // Filter by category + const filtered = filterCategory + ? suggestions.filter(s => { + const reasonText = s.reasons.join(' ') + switch (filterCategory) { + case 'unwatched': return reasonText.includes('Never watched') + case 'abandoned': return reasonText.includes('Abandoned') + case 'low_engagement': return reasonText.includes('Low engagement') + case 'stale': return reasonText.includes('not revisited') + case 'storage_hog': return reasonText.includes('Large file') + default: return true + } + }) + : suggestions + + const dayOptions = [ + { value: 30, label: '30 days' }, + { value: 60, label: '60 days' }, + { value: 90, label: '90 days' }, + { value: 180, label: '6 months' }, + { value: 365, label: '1 year' }, + ] + + return ( +
+ {/* Header */} +
+
+

+ + Cleanup Suggestions +

+

+ AI-powered suggestions based on watch patterns and content analytics +

+
+
+ + +
+
+ + {/* Summary Cards */} + {summary && ( +
+
+
{summary.total_suggestions}
+
Total Suggestions
+
+
+
{formatBytes(summary.total_reclaimable_bytes)}
+
Reclaimable Space
+
+
+
{summary.categories.unwatched + summary.categories.abandoned}
+
Unwatched / Abandoned
+
+
+
{summary.categories.storage_hog}
+
Storage Hogs
+
+
+ )} + + {/* Category Filter Pills */} + {summary && ( +
+ + {Object.entries(categoryInfo).map(([key, info]) => { + const count = summary.categories[key as keyof typeof summary.categories] || 0 + if (count === 0) return null + return ( + + ) + })} +
+ )} + + {/* Loading */} + {(isLoading || isFetching) && ( +
+ + Analyzing watch patterns... +
+ )} + + {/* Empty State */} + {!isLoading && !isFetching && suggestions.length === 0 && ( +
+ +

No cleanup suggestions

+

Your library is well-maintained!

+
+ )} + + {/* Suggestions List */} + {!isLoading && filtered.length > 0 && ( +
+ {filtered.map((suggestion) => ( +
+
+
+
+ {suggestion.media_type === 'movie' ? ( + + ) : ( + + )} +
+
+

+ {suggestion.title} +

+
+ {formatBytes(suggestion.size_bytes)} + + {suggestion.watch_count} plays + + {suggestion.unique_viewers} viewer{suggestion.unique_viewers !== 1 ? 's' : ''} + {suggestion.avg_progress > 0 && ( + <> + + Avg progress: {suggestion.avg_progress}% + + )} + {suggestion.last_watched_at && ( + <> + + Last watched: {new Date(suggestion.last_watched_at).toLocaleDateString()} + + )} + {suggestion.added_at && ( + <> + + Added: {new Date(suggestion.added_at).toLocaleDateString()} + + )} +
+
+ {suggestion.reasons.map((reason, i) => ( + + {reason} + + ))} +
+
+
+
+
+
= 40 ? 'text-red-400' : + suggestion.score >= 25 ? 'text-yellow-400' : + 'text-dark-300' + }`}> + Score: {suggestion.score} +
+
cleanup priority
+
+
+
+
+ ))} +
+ )} + + {/* Info box */} +
+

How suggestions work

+
    +
  • Unwatched — Content nobody has ever played, sitting idle for the analyzed period
  • +
  • Abandoned — Started by users but average watch progress is below 25%
  • +
  • Low Engagement — Only watched by 1 or fewer users on a multi-user server
  • +
  • Stale — Fully watched content that hasn't been revisited
  • +
  • Storage Hog — Files larger than 5 GB with minimal play counts
  • +
+

+ Suggestions are ranked by a composite score. Higher scores indicate stronger cleanup candidates. + Use these insights to create or refine your cleanup rules. +

+
+
+ ) +} diff --git a/frontend/src/pages/Rules.tsx b/frontend/src/pages/Rules.tsx index 042d5b7..abf1702 100644 --- a/frontend/src/pages/Rules.tsx +++ b/frontend/src/pages/Rules.tsx @@ -372,6 +372,15 @@ export default function Rules() { {rule.conditions.exclude_favorited && ( • Excludes favorites )} + {rule.conditions.no_user_watched_days && ( + • No user watched: {rule.conditions.no_user_watched_days} days + )} + {rule.conditions.exclude_active_sessions && ( + • Protects active sessions + )} + {rule.conditions.min_unique_viewers && ( + • Min viewers: {rule.conditions.min_unique_viewers} + )}
@@ -496,6 +505,11 @@ function RuleModal({ watched_progress_below: (initialData?.conditions as any)?.watched_progress_below || null, exclude_recently_added_days: (initialData?.conditions as any)?.exclude_recently_added_days || null, add_import_exclusion: (initialData?.conditions as any)?.add_import_exclusion ?? true, + // Phase 5: Smart Cleanup + no_user_watched_days: (initialData?.conditions as any)?.no_user_watched_days || null, + exclude_if_user_favorited: (initialData?.conditions as any)?.exclude_if_user_favorited || [], + exclude_active_sessions: (initialData?.conditions as any)?.exclude_active_sessions ?? true, + min_unique_viewers: (initialData?.conditions as any)?.min_unique_viewers || null, }, action: initialData?.action || 'delete', grace_period_days: existingRule?.grace_period_days ?? template?.grace_period_days ?? 7, @@ -929,6 +943,99 @@ function RuleModal({
+ + {/* Phase 5: Smart Cleanup - Per-User Conditions */} +
+

Smart Cleanup (Per-User)

+

Evaluate cleanup conditions against individual user watch data instead of global stats

+
+
+
+ + setFormData({ + ...formData, + conditions: { + ...formData.conditions, + no_user_watched_days: e.target.value ? parseInt(e.target.value) : null + } + })} + placeholder="90" + min={0} + /> +
+
+ + setFormData({ + ...formData, + conditions: { + ...formData.conditions, + min_unique_viewers: e.target.value ? parseInt(e.target.value) : null + } + })} + placeholder="2" + min={0} + /> +
+
+ +
+ +
+ +
+ + setFormData({ + ...formData, + conditions: { + ...formData.conditions, + exclude_if_user_favorited: e.target.value + ? e.target.value.split(',').map(id => parseInt(id.trim())).filter(id => !isNaN(id)) + : [] + } + })} + placeholder="1, 2, 3" + /> +

+ User IDs can be found on the Users page. Leave empty to disable. +

+
+
+
diff --git a/frontend/src/types/index.ts b/frontend/src/types/index.ts index ecb4c1d..9416b6b 100644 --- a/frontend/src/types/index.ts +++ b/frontend/src/types/index.ts @@ -99,6 +99,11 @@ export interface RuleConditions { add_import_exclusion?: boolean // Add to Import-Exclusion-List watched_progress_below?: number | null // Only delete if progress below X% exclude_recently_added_days?: number | null // Exclude recently added items + // Phase 5: Smart Cleanup - Per-user conditions + no_user_watched_days?: number | null // Delete only if NO user watched in X days + exclude_if_user_favorited?: number[] // Never delete if these user IDs have it as favorite + exclude_active_sessions?: boolean // Never delete if any user is currently watching + min_unique_viewers?: number | null // Only delete if fewer than X unique viewers } export interface CleanupRule { From dbac79bac05c767dcf47b49782b0425a5fbd374f Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:00:01 +0100 Subject: [PATCH 094/122] docs: remove i18n from roadmap, fix changelog formatting MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove item 27 (i18n / Lokalisierung) from Priorität 7 - not planned - Fix concatenated changelog rows (Sessions 10, 11, initial) into separate lines - Fix literal \u2013 → actual en-dash in Session 10 entry --- DEVELOPMENT_STATUS.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/DEVELOPMENT_STATUS.md b/DEVELOPMENT_STATUS.md index 786a4fd..b98a981 100644 --- a/DEVELOPMENT_STATUS.md +++ b/DEVELOPMENT_STATUS.md @@ -511,7 +511,6 @@ Das Backend ist gut strukturiert mit: 24. ~~Genre Distribution Charts~~ ✅ (Session 9) 25. ~~User Detail: Favorite Genres~~ ✅ (Session 9), ~~Timeline-Tab~~ ✅ (Session 10) 26. ~~LibraryDetail: Genre-Charts~~ ✅ (Session 9), ~~Grid-View mit Poster-Bildern~~ ✅ (Session 10) -27. i18n / Lokalisierung --- @@ -583,4 +582,6 @@ docker compose -f docker-compose.dev.yml up --build | 22.02.2026 (6) | **Session 6 – Bugfix & Expand-Rows**: BUG-011 behoben: PlaybackActivity `position_ticks`/`runtime_ticks` Integer→BigInteger (PostgreSQL int32 overflow bei Emby-Ticks >2.1B). DB-Migration hinzugefügt. Fehlerbehandlung in `_sync_active_sessions` und `services.py` mit `db.rollback()`. ResponsiveTable: Expand-Row-Support (`onRowClick`, `isExpanded`, `expandedContent`). Preview.tsx: Beide Tabellen (Series+Movies) auf ResponsiveTable migriert (letzte Seite). Expand-Rows auf Activity, UserDetail, LibraryDetail (IP, Device, Play Method, Progress-Bar). ConfirmDialog: `aria-modal`, Focus-Trap, Escape-Key, Click-Outside. Library Activity API: `ip_address`, `transcode_video`, `transcode_audio` hinzugefügt. BUG-012: Radarr-Pfad Ordner→Datei-Pfad (Movie-Watch-Statistiken). BUG-013: User Last Seen/Watched/Client Fallback-Logik. Branch: `feature/phase2-enhancements-and-docs` | | 24.02.2026 (7) | **Session 7 – Security Hardening I** (PR #19 `feature/security-hardening`): CORS Lockdown (Wildcard-Warnung in Production). API-Key/Notification-Secret Masking. Password Complexity Enforcement. Security Headers Middleware (X-Frame-Options, X-Content-Type-Options, Referrer-Policy, Permissions-Policy). WebSocket Auth (kurzlebiger Token). Refresh Token Rotation (altes Token revoked). Account Lockout (DB-Migration für `failed_login_attempts`, `locked_until`). Trusted Proxy Config. Rate-Limit Improvements. System Settings Allowlist. Staging Path Validation. Rule Import File Size Limit. `datetime.utcnow()` → `datetime.now(timezone.utc)`. Audit Log Data Retention Job. SECRET_KEY Enforcement in docker-compose. Frontend WebSocket Auth + Refresh Token Rotation Support. Branch: `feature/security-hardening`, Commit: `3058956` (PR #19) | | 24.02.2026 (8) | **Session 8 – Security Hardening II + httpOnly Cookies** (PRs #20, #21, #22): httpOnly Cookie Auth Migration (ADR-001) – JWT aus localStorage entfernt, Set-Cookie im Backend, `credentials: 'include'` im Frontend, Cookie-Clearing bei Logout. CSRF Double-Submit Cookie Middleware (`csrf.py`). Security Event Logging (`security_events.py` – strukturiertes JSON für Auth/Rate-Limit/CSRF Events). SSRF-Safe URL Validation (`url_validation.py`). `escape_like()` für SQL-Injection-Schutz. Content-Security-Policy Header. WebSocket Connection Limits per IP. Body Size Limit Middleware. Admin-Only auf Rules, Jobs, Staging, Services, Notifications, System-Settings Routes. Outbound URL Validation auf alle Service-Connection/Notification/Setup Endpoints. Enhanced Rate Limiting mit Security-Event-Logging auf allen API-Routes. Refresh Token Cleanup Scheduler-Job. CI/CD: `tests.yml` (Backend+Frontend Tests), `security-scan.yml` (SAST/DAST). Pytest Setup mit Smoke Test. Dependabot Config. npm Dependency Bump. Branch: `feature/security-hardening2`, Commits: `148d0f6` (PR #20), `f0eec84` (PR #21), `657ad70` (PR #22) | -| 26.02.2026 (10) | **Session 10 \u2013 Phase 2 Completion + Phase 4 Advanced Analytics**: Phase 2: User Timeline Tab (Backend + Frontend Calendar Heatmap + Session-Gruppierung), Image Proxy (`GET /media/{id}/image`), LibraryDetail Grid View (Table/Grid Toggle mit Poster-Bildern), UserDetail Activity Type-Filter + Suche. Phase 4: Concurrent Streams Analysis, Watch Duration Stats, Completion Rate Analytics, Binge-Watch Detection, Shared vs. Solo Content Analysis. Neue Analytics-Seite mit 5 Tabs. Branches: `feature/phase2-completion`, `feature/phase4-advanced-analytics` || 26.02.2026 (11) | **Session 11 – Phase 5 Smart Cleanup Integration**: Per-User Rule Conditions (`no_user_watched_days`, `exclude_if_user_favorited`, `exclude_active_sessions`, `min_unique_viewers`) in RuleConditions Schema + CleanupEngine. Analytics-based Cleanup Suggestions API (`GET /media/cleanup-suggestions`). CleanupSuggestions.tsx Frontend-Seite mit Kategorie-Filter (Unwatched, Abandoned, Low Engagement, Stale, Storage Hog) und Score-basiertem Ranking. Rules.tsx: Smart Cleanup Per-User Section. Branch: `feature/phase5-smart-cleanup` || 30.12.2024 | Initiale Version: Session-Zusammenfassung (Rules Export, Sidebar, Theme Toggle, Staging UI) | +| 26.02.2026 (10) | **Session 10 – Phase 2 Completion + Phase 4 Advanced Analytics**: Phase 2: User Timeline Tab (Backend + Frontend Calendar Heatmap + Session-Gruppierung), Image Proxy (`GET /media/{id}/image`), LibraryDetail Grid View (Table/Grid Toggle mit Poster-Bildern), UserDetail Activity Type-Filter + Suche. Phase 4: Concurrent Streams Analysis, Watch Duration Stats, Completion Rate Analytics, Binge-Watch Detection, Shared vs. Solo Content Analysis. Neue Analytics-Seite mit 5 Tabs. Branches: `feature/phase2-completion`, `feature/phase4-advanced-analytics` | +| 26.02.2026 (11) | **Session 11 – Phase 5 Smart Cleanup Integration**: Per-User Rule Conditions (`no_user_watched_days`, `exclude_if_user_favorited`, `exclude_active_sessions`, `min_unique_viewers`) in RuleConditions Schema + CleanupEngine. Analytics-based Cleanup Suggestions API (`GET /media/cleanup-suggestions`). CleanupSuggestions.tsx Frontend-Seite mit Kategorie-Filter (Unwatched, Abandoned, Low Engagement, Stale, Storage Hog) und Score-basiertem Ranking. Rules.tsx: Smart Cleanup Per-User Section. Branch: `feature/phase5-smart-cleanup` | +| 30.12.2024 | Initiale Version: Session-Zusammenfassung (Rules Export, Sidebar, Theme Toggle, Staging UI) | From bc6aa4dc5b3c98bfd8665e9e2cce155fc9c39a0f Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:13:03 +0100 Subject: [PATCH 095/122] fix(ci): make Trivy scan resilient to Docker Hub outages - Add trivy-fs job: filesystem scan (no Docker build needed) - Rename trivy -> trivy-image: container image scan now optional - Docker build has continue-on-error: true - Subsequent steps skip gracefully if build fails - Emits ::warning annotation instead of failing the workflow - SARIF uploads conditioned on file existence (hashFiles check) --- .github/workflows/security-scan.yml | 63 ++++++++++++++++++++++++----- 1 file changed, 53 insertions(+), 10 deletions(-) diff --git a/.github/workflows/security-scan.yml b/.github/workflows/security-scan.yml index 59fb3a7..53022d8 100644 --- a/.github/workflows/security-scan.yml +++ b/.github/workflows/security-scan.yml @@ -133,35 +133,74 @@ jobs: retention-days: 30 # ────────────────────────────────────────────── - # Container image scanning — Trivy + # Filesystem vulnerability scanning — Trivy # ────────────────────────────────────────────── - trivy: + trivy-fs: + name: Trivy (Filesystem Scan) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - name: Run Trivy filesystem scanner (SARIF) + uses: aquasecurity/trivy-action@0.34.1 + with: + scan-type: 'fs' + scan-ref: '.' + format: 'sarif' + output: 'trivy-results.sarif' + severity: 'CRITICAL,HIGH' + exit-code: '0' + + - name: Upload Trivy SARIF to GitHub Security + if: always() && hashFiles('trivy-results.sarif') != '' + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: 'trivy-results.sarif' + category: 'trivy-fs' + + - name: Run Trivy filesystem scanner (table for logs) + if: always() + uses: aquasecurity/trivy-action@0.34.1 + with: + scan-type: 'fs' + scan-ref: '.' + format: 'table' + severity: 'CRITICAL,HIGH' + exit-code: '0' + + # ────────────────────────────────────────────── + # Container image scanning — Trivy (optional) + # ────────────────────────────────────────────── + trivy-image: name: Trivy (Container Scan) runs-on: ubuntu-latest steps: - - uses: actions/checkout@v6 + - uses: actions/checkout@v6 - name: Build Docker image + id: docker-build + continue-on-error: true run: docker build -t mediacurator:scan . - - name: Run Trivy vulnerability scanner + - name: Run Trivy image scanner (SARIF) + if: steps.docker-build.outcome == 'success' uses: aquasecurity/trivy-action@0.34.1 with: image-ref: 'mediacurator:scan' format: 'sarif' - output: 'trivy-results.sarif' + output: 'trivy-image-results.sarif' severity: 'CRITICAL,HIGH' exit-code: '0' - - name: Upload Trivy SARIF to GitHub Security - if: always() + - name: Upload Trivy image SARIF to GitHub Security + if: always() && hashFiles('trivy-image-results.sarif') != '' uses: github/codeql-action/upload-sarif@v3 with: - sarif_file: 'trivy-results.sarif' + sarif_file: 'trivy-image-results.sarif' category: 'trivy-container' - - name: Run Trivy (table output for logs) - if: always() + - name: Run Trivy image scanner (table for logs) + if: steps.docker-build.outcome == 'success' uses: aquasecurity/trivy-action@0.34.1 with: image-ref: 'mediacurator:scan' @@ -169,6 +208,10 @@ jobs: severity: 'CRITICAL,HIGH' exit-code: '0' + - name: Report build skip + if: steps.docker-build.outcome != 'success' + run: echo "::warning::Docker build failed — image scan skipped. Filesystem scan still runs in trivy-fs job." + # ────────────────────────────────────────────── # Dockerfile linting — Hadolint # ────────────────────────────────────────────── From eb894298bcdb715cbb940a0a470016530df9f4db Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:14:29 +0100 Subject: [PATCH 096/122] feat: add actionable cleanup suggestions with direct user actions --- docs/adr/002-feature-suggestions.md | 62 +++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 docs/adr/002-feature-suggestions.md diff --git a/docs/adr/002-feature-suggestions.md b/docs/adr/002-feature-suggestions.md new file mode 100644 index 0000000..67c3c48 --- /dev/null +++ b/docs/adr/002-feature-suggestions.md @@ -0,0 +1,62 @@ +# ADR-002: Actionable Cleanup Suggestions + +**Status:** Proposed +**Date:** 2026-02-24 + +## Context + +The Cleanup Suggestions page (`/cleanup-suggestions`) currently provides a **read-only analytics view**. It scores media items across categories like *Unwatched*, *Abandoned*, *Low Engagement*, *Stale*, and *Storage Hog* — but users cannot act on these suggestions directly. To actually clean up a suggested item, a user must manually create or adjust a cleanup rule, wait for it to be evaluated, and then let the scheduled job flag and process items. This disconnect between "here's what you should clean up" and "now go do it" adds unnecessary friction. + +## Proposal + +Add the ability to **take direct action on cleanup suggestions** from the Cleanup Suggestions page. Specifically, users should be able to: + +### 1. Flag Suggestions for Deletion (Mark for Cleanup) + +- Select one or more suggested items and **mark them for deletion** — equivalent to what happens when a cleanup rule matches an item. +- This sets `flagged_for_cleanup = true`, records `flagged_at`, and computes `scheduled_cleanup_at` based on a user-chosen or default grace period. +- After the grace period expires, the existing `run_scheduled_cleanups()` job picks them up and executes the configured action (delete via Sonarr/Radarr, unmonitor, etc.). +- Since these items are flagged manually (not by a rule), `flagged_by_rule_id` can remain `null` to distinguish manual flags from rule-driven ones. + +### 2. Stage Suggestions Directly + +- Select one or more suggested items and **move them to staging** in a single action — bypassing the need for a rule entirely. +- This invokes the existing `move_to_staging()` logic: the file is physically relocated to the staging directory, the item appears in the Emby staging library ("MediaCleanup - Scheduled for Deletion"), and the grace period countdown begins. +- Users can still restore staged items from the Staging page if they change their mind. + +### 3. Bulk Actions + +- Support **batch selection** (select all / select by category) so users can act on multiple suggestions at once rather than one at a time. +- Provide a confirmation dialog summarizing the action (number of items, total reclaimable space, chosen action). + +## UI Changes + +- Add **checkboxes** to each suggestion row on the Cleanup Suggestions page. +- Add a **toolbar/action bar** that appears when items are selected, offering: + - "Flag for Deletion" — marks selected items for cleanup with a configurable grace period. + - "Move to Staging" — immediately stages selected items (only visible when staging is enabled). +- Add a **"Select All" / "Select by Category"** control for bulk operations. +- Show a **confirmation dialog** before executing any action, displaying the list of affected items and total disk space. + +## API Changes + +The following new or adjusted endpoints are needed: + +| Endpoint | Method | Description | +|---|---|---| +| `/media/cleanup-suggestions/flag` | POST | Accepts a list of media IDs + optional grace period; flags them for cleanup | +| `/media/cleanup-suggestions/stage` | POST | Accepts a list of media IDs; stages them via the existing staging service | + +Alternatively, the existing `POST /staging/stage` (batch staging) endpoint may already cover the staging use case — evaluate whether it can be reused directly. + +## How It Differs from Rules + +- **Rules** are automated and recurring — they run on a schedule, evaluate conditions, and flag matching items every time. +- **This feature** is a **manual, one-time action** — the user reviews the AI-generated suggestions and decides which ones to act on right now. +- Think of it as: rules are the autopilot, suggestions are the manual override. + +## Out of Scope + +- Changing how suggestion scores are calculated. +- Auto-applying suggestions without user confirmation. +- Creating rules from suggestions (could be a future enhancement). \ No newline at end of file From 3691b1b815acee7bc79be9cb58047effe3ee2d6b Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:14:33 +0100 Subject: [PATCH 097/122] feat: add planned features documentation for statistics and analytics --- docs/PLANNED_FEATURES.md | 432 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 432 insertions(+) create mode 100644 docs/PLANNED_FEATURES.md diff --git a/docs/PLANNED_FEATURES.md b/docs/PLANNED_FEATURES.md new file mode 100644 index 0000000..0993673 --- /dev/null +++ b/docs/PLANNED_FEATURES.md @@ -0,0 +1,432 @@ +# Planned Features - MediaCurator + +This document describes the planned statistics and analytics features for MediaCurator. These features will provide deep insights into media consumption patterns and enable smarter cleanup decisions. + +--- + +## Table of Contents + +1. [Library Detail View](#library-detail-view) +2. [Users Page](#users-page) +3. [User Detail View](#user-detail-view) +4. [Global Activity Log](#global-activity-log) +5. [Statistics Dashboard](#statistics-dashboard) +6. [Advanced Analytics](#advanced-analytics) +7. [Smart Cleanup Rules](#smart-cleanup-rules) + +--- + +## Library Detail View + +Click on any library to see detailed statistics and content. + +### Overview Tab +- **Time-based Stats**: Play counts and duration for Last 24 Hours, Last 7 Days, Last 30 Days, and All Time +- **Genre Distribution**: Radar charts showing which genres are watched most (by duration and by play count) +- **Recently Added**: Carousel/grid of recently added content with poster images +- **Last Watched**: Recently watched content in this library with user attribution + +### Media Tab +- **Content Browser**: Grid view of all media items with poster images ✅ Grid View with Poster Proxy (Session 10) +- **File Size Display**: Show file size on each item (e.g., "16.24 GB") ✅ +- **Sorting Options**: Sort by title, date added, size, play count ✅ +- **Filtering**: Filter by genre, year, rating, watched status + +### Activity Tab +- **Library Activity Log**: All playback sessions for this library +- **Columns**: User, IP Address, Title, Client, Transcode status, Device, Date, Total Playback +- **Filtering**: Filter by type (movie/episode), search by title +- **Pagination**: Handle large activity logs efficiently + +--- + +## Users Page + +Overview of all Emby/Jellyfin users synced to MediaCurator. + +### User List +| Column | Description | +|--------|-------------| +| User | Username with avatar | +| Tracked | Whether user is being tracked (checkmark) | +| Last Watched | Title of the last watched content | +| Last Client | Client app and device used (e.g., "Emby for iOS - iPhone") | +| Plays | Total play count across all time | +| Watch Time | Total watch duration (e.g., "9 Hours 33 Minutes") | +| Last Seen | When the user was last active (e.g., "8 Days 20 Hour 58 Minutes ago") | + +### Features +- **Search**: Search users by name +- **Pagination**: Handle many users +- **Click to Detail**: Click on any user to see their detail view + +--- + +## User Detail View + +Detailed statistics and activity for a single user. + +### Overview Tab +- **User Stats Box**: Play counts and watch time for Last 24 Hours, Last 7 Days, Last 30 Days, All Time +- **Last Watched**: Grid of recently watched content by this user +- **Favorite Genres**: What genres this user prefers (derived from watch history) ✅ BarChart (Session 9) + +### Activity Tab +- **Item Activity**: Complete playback history for this user +- **Columns**: IP Address, Title, Client, Transcode, Device, Date, Total Playback +- **Library Filter**: Filter activity by library ✅ +- **Type Filter**: Filter by movie/series ✅ (Session 10) +- **Search**: Search within activity ✅ (Session 10) + +### Timeline Tab ✅ Implemented (Session 10) +- **Visual Timeline**: Chronological view of user's watch activity ✅ Calendar Heatmap (90 days) +- **Library Filter**: Show only specific libraries ✅ +- **Date Range**: Filter by date range +- **Session Grouping**: Group consecutive plays into sessions ✅ 30-min gap detection + +--- + +## Global Activity Log + +Server-wide activity feed showing all playback across all users. + +### Activity Table +| Column | Description | +|--------|-------------| +| User | Which user is watching | +| IP Address | Client IP address | +| Title | Movie title or "Series : S01E05 - Episode Title" | +| Client | App name (e.g., "Emby for Samsung", "Emby Web") | +| Transcode | Direct Stream, Transcode (Video), Transcode (Audio), or both | +| Device | Device name (e.g., "Samsung Smart TV", "Firefox", "iPhone") | +| Date | Timestamp of playback start | +| Total Playback | Duration of the playback session | + +### Filters +- **Libraries**: Filter by specific library +- **Type**: All, Movies, Series, Music, etc. +- **Items per page**: 10, 25, 50, 100 +- **Search**: Search by title, user, or device + +### Expand Row ✅ Implemented (Session 6) +- Click to expand a row for more details: + - ~~Full file path~~ + - ~~Bitrate information~~ + - ~~Resolution~~ + - ~~Audio/video codecs~~ + - ~~Subtitle information~~ + - IP Address, Device, Play Method (color-coded), Progress Bar + - Implemented on: Activity, UserDetail, LibraryDetail, Preview (season breakdown) + +--- + +## Statistics Dashboard + +Visual charts and graphs for understanding viewing patterns. + +### Daily Play Count Chart +- **Stacked Area Chart**: Shows daily play counts over configurable time range +- **Per-Library Breakdown**: Different color for each library +- **Toggle**: Switch between Count (plays) and Duration (watch time) +- **Time Range**: Last 7, 14, 20, 30, 60, 90 days + +### Play Count By Day of Week +- **Bar Chart**: Which days of the week are most active +- **Per-Library Breakdown**: Stacked by library +- **Shows**: Sunday through Saturday activity patterns + +### Play Count By Hour +- **Bar Chart**: Which hours of the day are most active +- **24-Hour View**: 00:00 to 23:00 +- **Per-Library Breakdown**: See what's watched when +- **Peak Identification**: Easily spot prime-time hours + +### Genre Distribution ✅ Implemented (Session 9) +- **Radar/Spider Chart**: Visual representation of genre preferences ✅ RadarChart on Activity page +- **Two Views**: By Duration (total watch time) and By Play Count ✅ Radar (plays) + Bar (watch time hours) +- **Library-Specific**: Can be global or per-library ✅ Filterable by library_id and user_id + +--- + +## Advanced Analytics + +Deeper insights derived from watch data. + +### User Activity Timeline ✅ Implemented (Session 10) +**Purpose**: Visualize when each user watches content over time. + +- Calendar heatmap showing activity intensity ✅ 90-day GitHub-style heatmap +- Hour-by-day grid showing viewing patterns +- Identify each user's "watch schedule" ✅ Session grouping with 30-min gap + +### Watch Patterns / Heatmap ✅ Implemented (Session 9) +**Purpose**: Understand peak viewing times across all users. + +- 7x24 grid (days x hours) with color intensity ✅ CSS Grid on Activity page +- Identify server load patterns ✅ +- Plan maintenance windows during low-activity periods ✅ +- See if weekends differ from weekdays ✅ + +### Concurrent Streams Analysis ✅ Implemented (Session 10) +**Purpose**: Track how many simultaneous streams occur. + +- Historical peak concurrent users ✅ Daily peak chart + overall peak +- Time of day with most concurrent streams ✅ Hourly average bar chart +- Useful for: + - Server capacity planning ✅ + - Transcode load understanding ✅ + - License/limit management ✅ + +### Watch Duration Stats ✅ Implemented (Session 10) +**Purpose**: Understand typical viewing sessions. + +- Average session length (overall and per library) ✅ +- Distribution chart of session lengths ✅ Bucketed bar chart +- Compare movies vs. series session lengths ✅ By-type cards +- Identify "quick check" vs. "full watch" patterns ✅ + +### Completion Rate Analytics ✅ Implemented (Session 10) +**Purpose**: Track how often content is finished. + +- Percentage of movies watched to completion (>90%) ✅ Pie chart +- Series episode completion rates ✅ By-type stacked bars +- Identify content that users abandon early ✅ Most abandoned list +- Useful for cleanup decisions: "Content rarely finished" ✅ + +### Binge-Watch Detection ✅ Implemented (Session 10) +**Purpose**: Identify when users watch multiple episodes consecutively. + +- Detect 3+ episodes of same series in one session ✅ 4hr gap detection +- Track which series trigger binge behavior ✅ Top binged series +- User-specific binge patterns ✅ Top bingers leaderboard +- Useful for: + - Understanding engagement ✅ + - Cleanup rules: "Series being binged should not be deleted" ✅ + +### Shared vs. Solo Content ✅ Implemented (Session 10) +**Purpose**: Identify content watched by multiple users vs. single users. + +- **Shared Content**: Watched by 2+ unique users ✅ +- **Solo Content**: Only watched by 1 user ✅ +- **Unwatched**: Not watched by anyone ✅ +- Useful for cleanup: ✅ + - Keep shared content longer ✅ + - Solo content can be cleaned up sooner ✅ + - Unwatched content is priority for cleanup ✅ + +--- + +## Smart Cleanup Rules + +New rule conditions enabled by user tracking. + +### Cleanup Rules per User +**Condition**: "Delete only if NO user has watched in X days" + +Instead of relying on global "last watched" from Emby, check each user's watch history individually. + +**Use Cases**: +- Family server: Keep content if any family member watched recently +- Multi-user: Don't delete shared favorites +- Fair cleanup: Account for all users, not just the last one + +**Example Rule**: +``` +Delete movie if: + - No user has watched in 90 days + - AND file size > 10 GB + - AND rating < 7.0 +``` + +### User-Specific Exclusions +**Condition**: "Never delete if User X has it as favorite" + +**Use Cases**: +- Protect content for specific users (e.g., kids' content) +- VIP users whose favorites are always kept +- Admin override for important content + +### Currently Watching Detection (Enhanced) +**Condition**: "Never delete if any user is currently in the middle of watching" + +**Improvements**: +- Track per-user watch progress +- Detect active binge sessions +- Protect entire series if someone is actively watching + +--- + +## Implementation Priority + +### Phase 1 - Foundation ✅ +- [x] MediaServerUser model +- [x] UserWatchHistory model +- [x] Basic user stats on Dashboard +- [x] Library stats API +- [x] PlaybackActivity model + +### Phase 2 - Views & Navigation ✅ +- [x] Library Detail View (Overview, Media, Activity tabs) +- [x] Users Page (list with stats) +- [x] User Detail View (Overview, Activity, Timeline tabs) +- [x] Global Activity Log +- [x] Activity stats API (plays by day/hour/week) +- [x] Active sessions tracking +- [x] Library filter on Activity page +- [x] Library filter on User Detail activity tab +- [x] Items-per-page selector on Activity page +- [x] ResponsiveTable on all pages (History, Users, Activity, UserDetail, Staging, Jobs, LibraryDetail, Preview) +- [x] Expand-Row on Activity, UserDetail, LibraryDetail (IP, Device, Play Method, Progress) +- [x] ConfirmDialog accessibility (aria-modal, focus trap, escape key) +- [x] User Timeline Tab (calendar heatmap + session grouping) (Session 10) +- [x] Image Proxy endpoint (poster images from Emby/Jellyfin) (Session 10) +- [x] LibraryDetail Grid View with poster images (Session 10) +- [x] UserDetail Activity type filter + search (Session 10) + +### Phase 3 - Statistics & Charts ✅ Complete (Session 4+9) +- [x] Daily Play Count Chart (Activity page + Dashboard) +- [x] Play Count by Day of Week Chart (Activity page + Dashboard) +- [x] Play Count by Hour Chart (Activity page + Dashboard) +- [x] Genre Distribution Charts – Backend `/activity/genre-stats` + RadarChart (Activity) + BarChart (Dashboard, LibraryDetail, UserDetail) (Session 9) +- [x] Watch Patterns Heatmap – Backend `/activity/watch-heatmap` + CSS Grid 7×24 (Activity) (Session 9) + +### Phase 4 - Advanced Analytics ✅ Complete (Session 9+10) +- [x] Concurrent Streams Analysis – Backend `/activity/concurrent-streams` + Frontend Analytics page (Session 10) +- [x] Watch Duration Stats – Backend `/activity/duration-stats` + Frontend Analytics page (Session 10) +- [x] Completion Rate Analytics – Backend `/activity/completion-rates` + Frontend Analytics page (Session 10) +- [x] Binge-Watch Detection – Backend `/activity/binge-stats` + Frontend Analytics page (Session 10) +- [x] Shared vs. Solo Content – Backend `/media/content-reach` + Frontend Analytics page (Session 10) + +### Phase 5 - Smart Cleanup Integration ✅ Complete (Session 11) +- [x] Cleanup Rules per User condition – `no_user_watched_days` in RuleConditions (Session 11) +- [x] User-Specific Exclusions – `exclude_if_user_favorited` + `min_unique_viewers` in RuleConditions (Session 11) +- [x] Enhanced Currently Watching detection – `exclude_active_sessions` checks PlaybackActivity.is_active (Session 11) +- [x] Analytics-based cleanup suggestions – `/media/cleanup-suggestions` endpoint + CleanupSuggestions page (Session 11) + +### Security Hardening ✅ Complete (Session 7+8) +- [x] httpOnly Cookie Auth (JWT aus localStorage entfernt, ADR-001) +- [x] CSRF Double-Submit Cookie Protection +- [x] Security Headers Middleware (CSP, X-Frame-Options, X-Content-Type-Options) +- [x] Structured Security Event Logging +- [x] SSRF-safe URL Validation (outbound requests) +- [x] Account Lockout Mechanism +- [x] Refresh Token Rotation +- [x] WebSocket Authentication (short-lived token) +- [x] CORS Lockdown (wildcard warning in production) +- [x] Admin-Only Routes (Rules, Jobs, Staging, Services, Notifications, System) +- [x] Input Sanitization (escape_like for SQL queries) +- [x] Outbound URL Validation (Services, Notifications, Setup) +- [x] Staging Path Validation +- [x] Request Body Size Limit +- [x] WebSocket Connection Limits per IP +- [x] Sensitive Config Masking (API keys, secrets) +- [x] Password Complexity Enforcement +- [x] Secret Key Enforcement +- [x] Audit Log Data Retention Job +- [x] Refresh Token Cleanup Job +- [x] Trusted Proxy Configuration +- [x] CI/CD: Tests + Security Scanning Workflows +- [x] Pytest Setup with Smoke Tests +- [x] Dependabot Configuration +- [x] datetime.utcnow() → timezone-aware datetime.now(timezone.utc) + +--- + +## Data Requirements + +To enable these features, the following data needs to be tracked: + +### From Emby/Jellyfin Sync +- User list with IDs and names +- Per-user watch history (what, when, how long) +- Playback sessions (client, device, IP, transcode info) +- Current playback position (for in-progress detection) + +### Derived/Calculated +- Total watch time per user +- Total watch time per library +- Genre preferences per user +- Session detection (grouping consecutive plays) +- Concurrent stream peaks + +### Storage Considerations +- ~~Activity logs can grow large - implement retention policies~~ ✅ Implemented (Session 7: Audit Log Data Retention Job) +- Consider aggregating old data (daily summaries instead of individual events) +- Provide cleanup options for activity history + +--- + +--- + +## Implementation History + +| Phase | Feature | Session | Status | +|-------|---------|---------|--------| +| 1 | All Foundation models & APIs | Session 1 | ✅ | +| 2 | Library Detail, Users, Activity pages | Session 1-2 | ✅ | +| 2 | WebSocket Real-Time System | Session 2 | ✅ | +| 2 | Setup Wizard | Session 2 | ✅ | +| 2 | Bug fixes (Light-mode, API paths, etc.) | Session 3 | ✅ | +| 2 | ResponsiveTable migrations | Session 4 | ✅ | +| 3 | recharts charts on Activity page | Session 4 | ✅ | +| 2 | Library filter on Activity & UserDetail | Session 5 | ✅ | +| 2 | Items-per-page on Activity | Session 5 | ✅ | +| 2 | LibraryDetail ResponsiveTable migration | Session 5 | ✅ | +| 3 | Dashboard recharts charts | Session 5 | ✅ | +| 4 | Code-splitting with React.lazy | Session 4 | ✅ | +| 2 | Preview.tsx ResponsiveTable migration | Session 6 | ✅ | +| 2 | Expand-Row (Activity, UserDetail, LibraryDetail) | Session 6 | ✅ | +| 2 | ConfirmDialog accessibility (focus trap, aria-modal) | Session 6 | ✅ | +| - | BUG-011: position_ticks int32→BigInteger | Session 6 | ✅ | +| - | BUG-012: Radarr folder→file path (movie stats) | Session 6 | ✅ | +| - | BUG-013: User Last Seen/Watched/Client fallback | Session 6 | ✅ | +| Sec | CORS Lockdown, Secret Key Enforcement, API Key Masking | Session 7 | ✅ | +| Sec | Security Headers Middleware (CSP, X-Frame-Options) | Session 7 | ✅ | +| Sec | Account Lockout + Refresh Token Rotation | Session 7 | ✅ | +| Sec | WebSocket Auth Token + Frontend Auth Rotation | Session 7 | ✅ | +| Sec | Trusted Proxy Config + Rate Limit Improvements | Session 7 | ✅ | +| Sec | Staging Path Validation + Rule Import Size Limit | Session 7 | ✅ | +| Sec | Audit Log Data Retention Job | Session 7 | ✅ | +| Sec | datetime.utcnow() → timezone-aware | Session 7 | ✅ | +| Sec | httpOnly Cookie Auth Migration (ADR-001) | Session 8 | ✅ | +| Sec | CSRF Double-Submit Cookie Middleware | Session 8 | ✅ | +| Sec | Security Event Logging (structured JSON) | Session 8 | ✅ | +| Sec | SSRF-safe URL Validation | Session 8 | ✅ | +| Sec | SQL Injection Protection (escape_like) | Session 8 | ✅ | +| Sec | Content-Security-Policy Header | Session 8 | ✅ | +| Sec | WebSocket Connection Limits per IP | Session 8 | ✅ | +| Sec | Admin-Only Routes (all sensitive endpoints) | Session 8 | ✅ | +| Sec | Outbound URL Validation (SSRF) on all endpoints | Session 8 | ✅ | +| Sec | Body Size Limit + Rate Limiting on all Routes | Session 8 | ✅ | +| Sec | Refresh Token Cleanup Job | Session 8 | ✅ | +| Sec | Password Complexity + Input Validation | Session 8 | ✅ | +| CI | GitHub Actions: tests.yml + security-scan.yml | Session 8 | ✅ | +| CI | Pytest Setup + Smoke Test | Session 8 | ✅ | +| CI | Dependabot Configuration | Session 8 | ✅ | +| 3 | Genre Distribution API (`/activity/genre-stats`) | Session 9 | ✅ | +| 4 | Watch Patterns Heatmap API (`/activity/watch-heatmap`) | Session 9 | ✅ | +| 3 | Genre RadarChart on Activity page | Session 9 | ✅ | +| 4 | Watch Heatmap (7×24 CSS Grid) on Activity page | Session 9 | ✅ | +| 3 | Genre Charts on LibraryDetail (Radar + Bar) | Session 9 | ✅ | +| 2 | Favorite Genres BarChart on UserDetail | Session 9 | ✅ | +| 3 | Genre Distribution BarChart on Dashboard | Session 9 | ✅ | +| 2 | User Timeline API (`/users/{id}/timeline`) | Session 10 | ✅ | +| 2 | Image Proxy API (`/media/{id}/image`) | Session 10 | ✅ | +| 2 | User Timeline Tab (Calendar Heatmap + Sessions) | Session 10 | ✅ | +| 2 | LibraryDetail Grid View with Poster Images | Session 10 | ✅ | +| 2 | UserDetail Activity Type Filter + Search | Session 10 | ✅ | +| 4 | Concurrent Streams API (`/activity/concurrent-streams`) | Session 10 | ✅ | +| 4 | Duration Stats API (`/activity/duration-stats`) | Session 10 | ✅ | +| 4 | Completion Rates API (`/activity/completion-rates`) | Session 10 | ✅ | +| 4 | Binge Stats API (`/activity/binge-stats`) | Session 10 | ✅ | +| 4 | Content Reach API (`/media/content-reach`) | Session 10 | ✅ | +| 4 | Analytics Page (5-tab: Concurrent, Duration, Completion, Binge, Reach) | Session 10 | ✅ | +| 5 | Per-User Rule Conditions (no_user_watched_days, exclude_if_user_favorited) | Session 11 | ✅ | +| 5 | Active Session Protection (exclude_active_sessions) | Session 11 | ✅ | +| 5 | Min Unique Viewers Rule Condition | Session 11 | ✅ | +| 5 | Cleanup Suggestions API (`/media/cleanup-suggestions`) | Session 11 | ✅ | +| 5 | Cleanup Suggestions Page (CleanupSuggestions.tsx) | Session 11 | ✅ | + +--- + +*This document will be updated as features are implemented and new ideas emerge.* From 64f211daa2ad54108b86a0c48fd197e856e07a4b Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:14:40 +0100 Subject: [PATCH 098/122] feat: Add comprehensive DEVELOPMENT_STATUS document for MediaCurator - Introduced a detailed development status and handoff document to track ongoing progress and provide context for future development. - Included sections on project overview, tech stack, architecture, implementation status, known bugs, UX/UI issues, code quality problems, and next steps. - Document serves as a reference for developers and contributors to understand the current state and future direction of the project. --- docs/DEVELOPMENT_STATUS.md | 586 +++++++++++++++++++++++++++++++++++++ 1 file changed, 586 insertions(+) create mode 100644 docs/DEVELOPMENT_STATUS.md diff --git a/docs/DEVELOPMENT_STATUS.md b/docs/DEVELOPMENT_STATUS.md new file mode 100644 index 0000000..786a4fd --- /dev/null +++ b/docs/DEVELOPMENT_STATUS.md @@ -0,0 +1,586 @@ +# MediaCurator - Development Status & Handoff Document + +> **Zweck**: Dieses Dokument dient als fortlaufender Stand für die Weiterentwicklung. Es kann in jedem neuen Chat/auf jedem Rechner als Kontext übergeben werden, damit der Assistent sofort weiß, wo es weitergeht. + +**Letzte Aktualisierung**: 26. Februar 2026 (Session 11) +**Branch**: `develop` +**Letzter Commit**: `55e4bf7` (Session 9) +**Version**: `vdev.0.0.231` +**Repo**: `https://github.com/Serph91P/MediaCurator.git` + +--- + +## Inhaltsverzeichnis + +1. [Projekt-Übersicht](#projekt-übersicht) +2. [Tech-Stack](#tech-stack) +3. [Architektur-Überblick](#architektur-überblick) +4. [Implementierungsstatus nach Phasen](#implementierungsstatus-nach-phasen) +5. [Bekannte Bugs (priorisiert)](#bekannte-bugs-priorisiert) +6. [UX/UI-Probleme](#uxui-probleme) +7. [Code-Qualitätsprobleme](#code-qualitätsprobleme) +8. [Was gut funktioniert](#was-gut-funktioniert) +9. [Nächste Schritte (priorisiert)](#nächste-schritte-priorisiert) +10. [Dateistruktur-Referenz](#dateistruktur-referenz) +11. [Entwicklungsumgebung starten](#entwicklungsumgebung-starten) +12. [Änderungshistorie dieses Dokuments](#änderungshistorie-dieses-dokuments) + +--- + +## Projekt-Übersicht + +MediaCurator ist ein Self-Hosted Media Management Tool das mit Emby/Jellyfin (Media Server) und Sonarr/Radarr (Download Manager) zusammenarbeitet. Es bietet: + +- **Automatische Cleanup-Regeln**: Ungesehene/alte/schlecht bewertete Medien erkennen und löschen +- **Staging-System**: Soft-Delete mit Grace Period – Dateien werden in eine Staging-Library verschoben, bevor sie permanent gelöscht werden +- **User-Tracking**: Watch-History pro User synced von Emby +- **Activity-Monitoring**: Aktive Sessions, Playback-History +- **Notifications**: Apprise-basiert (Discord, Slack, Webhook, etc.) +- **Dashboard**: Statistiken, Most Viewed/Popular, Library-Übersicht + +--- + +## Tech-Stack + +### Backend +| Komponente | Technologie | Version | +|-----------|------------|---------| +| Framework | FastAPI | aktuell | +| ORM | SQLAlchemy (async) | aktuell | +| DB | SQLite (default) / PostgreSQL | | +| Scheduler | APScheduler | | +| HTTP Client | httpx (async) | | +| Auth | JWT (httpOnly cookies + CSRF) | | + +### Frontend +| Komponente | Technologie | Version | +|-----------|------------|---------| +| Framework | React | 19.2 | +| Language | TypeScript | 5.3 | +| Bundler | Vite | 7.3 | +| Styling | Tailwind CSS | 4.x (CSS-first @theme) | +| Server State | TanStack React Query | 5.x | +| Client State | Zustand (persist) | 5 | +| Forms | react-hook-form | 7.49 | +| Charts | recharts | 3.7 (**in Benutzung**: Activity + Dashboard Charts) | +| HTTP | Axios (httpOnly cookie auth, CSRF) | 1.6 | +| Toasts | react-hot-toast | 2.4 | + +--- + +## Architektur-Überblick + +### Backend-Struktur +``` +backend/app/ +├── main.py # FastAPI App, CORS, Middleware-Stack, Router-Mounting, WebSocket Endpoint +├── scheduler.py # APScheduler Jobs (Sync, Cleanup, Staging, Token-Cleanup) + WebSocket-Broadcast +├── core/ +│ ├── config.py # Pydantic Settings +│ ├── csrf.py # ★ NEU: CSRF Double-Submit Cookie Middleware +│ ├── database.py # SQLAlchemy async engine/session +│ ├── migrations.py # DB-Migrationen (Alembic-like, manuell) +│ ├── rate_limit.py # Rate Limiting (enhanced mit Security-Event-Logging) +│ ├── security.py # JWT (httpOnly Cookies), Password Hashing, WebSocket Token +│ ├── security_events.py # ★ NEU: Strukturiertes Security-Event-Logging +│ ├── security_headers.py # ★ NEU: Security Headers Middleware (CSP, X-Frame-Options, etc.) +│ ├── url_validation.py # ★ NEU: SSRF-Safe URL Validation +│ └── websocket.py # ConnectionManager für Real-Time Job-Broadcasting (+ IP-Limits) +├── models/database.py # ALLE SQLAlchemy Models (656 Zeilen) +├── schemas/__init__.py # ALLE Pydantic Schemas +├── services/ +│ ├── base.py # BaseServiceClient (abstrakt, httpx) +│ ├── emby.py # EmbyClient + EmbyService (Caching, Sync) +│ ├── radarr.py # RadarrClient (API v3) +│ ├── sonarr.py # SonarrClient (API v3) +│ ├── sync.py # Haupt-Sync-Logik (Emby→DB, Sonarr→DB, Radarr→DB) +│ ├── cleanup_engine.py # Rule-Evaluation + Execution +│ ├── staging.py # Soft-Delete-System +│ ├── notifications.py # Multi-Channel mit Templates +│ ├── audit.py # Audit-Logging +│ └── version.py # Git/GitHub Version-Check +├── tests/ +│ ├── __init__.py # ★ NEU: Test-Setup +│ └── test_smoke.py # ★ NEU: Smoke-Test (Settings-Loading) +└── api/routes/ + ├── activity.py # GET /activity/, /stats, /active, /genre-stats, /watch-heatmap, /concurrent-streams, /duration-stats, /completion-rates, /binge-stats + ├── audit.py # GET/DELETE /audit/logs, /recent, etc. + ├── auth.py # POST /auth/login, /register, /refresh, /logout (httpOnly Cookies, CSRF) + ├── jobs.py # GET/POST /jobs/, trigger, interval (Admin-Only) + ├── libraries.py # GET /libraries/, /stats, /{id}/details, /media, /activity + ├── media.py # GET /media/stats, /dashboard-stats, /watch-stats, /audit-log, /{id}/image, /content-reach + ├── notifications.py # CRUD /notifications/, test, preview-template (Admin-Only, URL Validation) + ├── rules.py # CRUD /rules/, templates, export/import, bulk (Admin-Only, File Size Limit) + ├── services.py # CRUD /services/, test, sync (Admin-Only, URL Validation) + ├── staging.py # GET/POST /staging/, restore, delete, settings (Admin-Only, Path Validation) + ├── setup.py # GET /setup/status, POST /test-connection, /add-service, /complete, /skip + ├── system.py # GET /system/health, /stats, /settings, cleanup/preview (Admin-Only) + └── users.py # GET /users/, /{id}, /{id}/activity, /{id}/timeline, PATCH hide +``` + +### Frontend-Struktur +``` +frontend/src/ +├── App.tsx # Routes (15 protected + Login/Register/Setup) + SetupGate +├── main.tsx # React Entry, QueryClient, Toaster +├── index.css # Tailwind @theme (dark-* + primary-* Palette) +├── components/ +│ ├── Layout.tsx # Sidebar, Header, Version-Check, Theme-Toggle, ★ Job-Badge +│ ├── ConfirmDialog.tsx # Modal (danger/warning/info) +│ ├── ResponsiveTable.tsx # Table → Cards auf Mobile +│ └── Skeleton.tsx # Loading-Skeletons +├── hooks/ +│ ├── useDebounce.ts # Generischer Debounce-Hook +│ └── useJobWebSocket.ts # ★ NEU: Globaler WebSocket-Hook (Auto-Reconnect, Toasts) +├── lib/ +│ ├── api.ts # Axios mit httpOnly Cookie Auth + CSRF Token Interceptor +│ └── utils.ts # formatBytes, formatRelativeTime, formatDate, etc. +├── stores/ +│ ├── auth.ts # Zustand: Login/Logout/Sessions (cookie-basiert, kein localStorage-Token) +│ ├── jobs.ts # Zustand: WebSocket Job-State (runningJobs, recentCompletions) +│ └── theme.ts # Zustand: light/dark/system +├── types/index.ts # TypeScript Interfaces +└── pages/ + ├── Dashboard.tsx # Stats, Most Viewed/Popular, Libraries, Disk + ├── Libraries.tsx # Library-Grid mit Stats, Sync + ├── LibraryDetail.tsx # 3 Tabs: Overview, Media (Table+Grid), Activity + ├── Users.tsx # User-Tabelle mit Stats + ├── UserDetail.tsx # 3 Tabs: Overview, Activity (Type/Search Filter), Timeline (Heatmap) + ├── Analytics.tsx # 5 Tabs: Concurrent Streams, Duration, Completion, Binge Watch, Content Reach + ├── Activity.tsx # Active Sessions + Activity-Log + ├── History.tsx # Cleanup-Audit-Log + ├── Jobs.tsx # ★ REWRITE: Live-Progress-Bars, WebSocket-Status, Running-Panel + ├── SetupWizard.tsx # ★ NEU: Geführter 5-Step Setup-Wizard + ├── Rules.tsx # CRUD + Templates + Export/Import + ├── Services.tsx # Service-Connections CRUD + ├── Settings.tsx # System-Settings, Password, Cache + ├── Staging.tsx # Staged Items + Library-Settings + ├── Notifications.tsx # Channels CRUD + Template-Editor + ├── Preview.tsx # Cleanup Dry-Run Preview + ├── Login.tsx # Auth + └── Register.tsx # Erstbenutzer-Setup +``` + +### Datenbank-Models (Wichtigste) +| Model | Tabelle | Beschreibung | +|-------|---------|-------------| +| User | users | App-Benutzer (Admin-Login) | +| RefreshToken | refresh_tokens | JWT Refresh Tokens mit Device-Info | +| ServiceConnection | service_connections | Sonarr/Radarr/Emby/Jellyfin Verbindungen | +| Library | libraries | Synchronisierte Bibliotheken mit Staging-Config | +| MediaItem | media_items | Alle Medien (Filme, Serien, Episoden) mit Watch-Status | +| CleanupRule | cleanup_rules | Regelwerk mit JSON-Conditions | +| MediaServerUser | media_server_users | Emby/Jellyfin-Benutzer | +| UserWatchHistory | user_watch_history | Watch-History pro User pro Item | +| PlaybackActivity | playback_activities | Playback-Sessions mit Client/Device/Transcode | +| CleanupLog | cleanup_logs | Cleanup-Audit-Trail | +| NotificationChannel | notification_channels | Notification-Konfigurationen | +| JobExecutionLog | job_execution_logs | Scheduler-Job-History | +| ImportStats | import_stats | Sync-Statistiken | +| AuditLog | audit_logs | Admin-Action-Audit | +| SystemSettings | system_settings | Key-Value Settings | + +--- + +## Implementierungsstatus nach Phasen + +> Referenz: `PLANNED_FEATURES.md` + +### Phase 1 – Foundation ✅ ERLEDIGT +- [x] MediaServerUser model +- [x] UserWatchHistory model +- [x] PlaybackActivity model +- [x] Basic user stats on Dashboard +- [x] Library stats API + +### Phase 2 – Views & Navigation ✅ ERLEDIGT +| Feature | Backend | Frontend | Qualität | Anmerkungen | +|---------|---------|----------|----------|-------------| +| Library Detail – Overview Tab | ✅ API liefert 24h/7d/30d Stats | ✅ Dargestellt + Genre-Charts | ✅ | Genre RadarChart + BarChart hinzugefügt (Session 9) | +| Library Detail – Media Tab | ✅ Sortierung, Suche, Pagination | ✅ ResponsiveTable + Grid View | ✅ | Grid View mit Poster-Bildern hinzugefügt (Session 10) | +| Library Detail – Activity Tab | ✅ Pagination | ✅ ResponsiveTable | ✅ | Migriert auf ResponsiveTable (Session 5) | +| Users Page | ✅ Pagination, Search | ✅ ResponsiveTable | ✅ | Gut implementiert | +| User Detail – Overview | ✅ Time-based Stats | ✅ + Favorite Genres Chart | ✅ | Favorite Genres BarChart hinzugefügt (Session 9) | +| User Detail – Activity | ✅ Filters + Library-Filter | ✅ Tabelle + Library/Type-Filter + Suche | ✅ | Type-Filter + Suchfeld hinzugefügt (Session 10) | +| User Detail – Timeline Tab | ✅ `/users/{id}/timeline` | ✅ Calendar Heatmap + Sessions | ✅ | 90-Tage-Heatmap + Session-Gruppierung (Session 10) | +| Image Proxy | ✅ `/media/{id}/image` | ✅ Im Grid View | ✅ | Poster-Proxy von Emby/Jellyfin mit Cache (Session 10) | +| Global Activity Log | ✅ Stats + Active Sessions | ✅ | ✅ | Library-Filter + Items-per-Page hinzugefügt (Session 5) | +| Activity Stats API | ✅ plays by day/hour/weekday | ✅ Charts | ✅ | recharts Charts auf Activity + Dashboard (Session 4+5) | +| Active Sessions | ✅ 30s Sync | ✅ 30s Refresh | ✅ | Gut implementiert | + +### Phase 3 – Statistics & Charts ✅ ERLEDIGT (Session 4+9) +| Feature | Backend-API | Frontend | Anmerkungen | +|---------|------------|----------|-------------| +| Daily Play Count Chart (Area) | ✅ `/activity/stats` liefert `plays_by_day` | ✅ Activity.tsx | recharts AreaChart mit Gradient | +| Play Count by Day of Week (Bar) | ✅ `/activity/stats` liefert `plays_by_day_of_week` | ✅ Activity.tsx | recharts BarChart | +| Play Count by Hour (Bar) | ✅ `/activity/stats` liefert `plays_by_hour` | ✅ Activity.tsx | recharts BarChart mit AM/PM Labels | +| Genre Distribution (Radar/Spider) | ✅ `/activity/genre-stats` | ✅ Activity + Dashboard + LibraryDetail | RadarChart (Activity), BarChart (Dashboard, Library, User) (Session 9) | +| Watch Patterns Heatmap (7×24) | ✅ `/activity/watch-heatmap` | ✅ Activity.tsx | CSS Grid Heatmap, Day×Hour (Session 9) | + +### ★ NEU: WebSocket Real-Time System ✅ ERLEDIGT (Session 2) +| Feature | Backend | Frontend | Qualität | Anmerkungen | +|---------|---------|----------|----------|-------------| +| ConnectionManager | ✅ `core/websocket.py` | — | ✅ | Broadcast, job_started/progress/completed Events | +| WebSocket Endpoint | ✅ `/api/ws/jobs` in main.py | — | ✅ | Ping/Pong Support | +| Progress Callbacks | ✅ sync.py (alle 3 Services) | — | ✅ | Sonarr: je 5 Serien, Radarr: je 10 Filme, Emby: 6 Phasen | +| Scheduler Integration | ✅ Alle 5 Job-Funktionen | — | ✅ | started/progress/completed Events | +| Global WebSocket Hook | — | ✅ `useJobWebSocket.ts` | ✅ | Auto-Reconnect (exp. Backoff), 30s Ping | +| Toast Notifications | — | ✅ In Layout.tsx (global) | ✅ | Started/Completed/Failed Toasts auf jeder Seite | +| Zustand Job Store | — | ✅ `stores/jobs.ts` | ✅ | runningJobs Map, wsStatus, recentCompletions | +| Jobs Page Rewrite | — | ✅ `Jobs.tsx` (~650 Zeilen) | ✅ | Live Progress-Bars, Running-Panel, ElapsedTime, WS-Indikator | +| Layout Job Badge | — | ✅ `Layout.tsx` | ✅ | Animierter blauer Badge mit laufenden Jobs Count | + +### ★ NEU: Setup Wizard ✅ ERLEDIGT (Session 2) +| Feature | Backend | Frontend | Qualität | Anmerkungen | +|---------|---------|----------|----------|-------------| +| Setup Status API | ✅ `GET /setup/status` | — | ✅ | Prüft Services + SystemSettings, kein Auth nötig | +| Test Connection | ✅ `POST /setup/test-connection` | — | ✅ | Temporäre Verbindung testen ohne Speichern | +| Add Service | ✅ `POST /setup/add-service` | — | ✅ | Im Wizard-Kontext | +| Complete/Skip | ✅ `POST /setup/complete`, `/skip` | — | ✅ | Validierung: min. 1 Arr + 1 Media Server | +| Wizard UI | — | ✅ `SetupWizard.tsx` (~550 Zeilen) | ✅ | 5-Step: Welcome→Arr→MediaServer→Sync→Complete | +| Setup Gate | — | ✅ `App.tsx` SetupGate | ✅ | Redirect nach /setup wenn nicht abgeschlossen | +| Service Order | — | ✅ | ✅ | Erzwingt Sonarr/Radarr vor Emby/Jellyfin | +| Initial Sync | — | ✅ | ✅ | Sequentieller Sync aller Services mit Status-Anzeige | +| Skip Option | — | ✅ | ✅ | Setup überspringen möglich | + +### ★ NEU: Security Hardening ✅ ERLEDIGT (Session 7+8) +| Feature | Backend | Frontend | Qualität | Anmerkungen | +|---------|---------|----------|----------|-------------| +| httpOnly Cookie Auth | ✅ `security.py` Set-Cookie | ✅ `api.ts` credentials:include | ✅ | JWT aus localStorage entfernt, XSS-sicher (ADR-001) | +| CSRF Protection | ✅ `csrf.py` Double-Submit Cookie | ✅ `api.ts` X-CSRF-Token Header | ✅ | Automatisch bei state-changing Requests | +| Security Headers | ✅ `security_headers.py` Middleware | — | ✅ | CSP, X-Frame-Options, X-Content-Type-Options, Referrer-Policy | +| Security Event Logging | ✅ `security_events.py` | — | ✅ | Strukturiertes JSON-Logging für Auth, Rate-Limit, CSRF Events | +| SSRF URL Validation | ✅ `url_validation.py` | — | ✅ | Blockiert private IPs, non-HTTP schemes, Credentials in URLs | +| Account Lockout | ✅ User Model + Auth | — | ✅ | Nach N fehlgeschlagenen Logins temporäre Sperre | +| Refresh Token Rotation | ✅ `security.py` + `auth.py` | ✅ `auth.ts` | ✅ | Altes Token wird bei Refresh revoked | +| WebSocket Auth Token | ✅ Short-lived WS Token | ✅ `useJobWebSocket.ts` | ✅ | Kurzlebiger Token für WS-Verbindung | +| CORS Lockdown | ✅ `main.py` | — | ✅ | Wildcard-Warnung in Production, strikte Origin-Prüfung | +| Admin-Only Routes | ✅ Alle sensitiven Routes | — | ✅ | Rules, Jobs, Staging, Services, Notifications nur für Admin | +| Input Sanitization | ✅ `escape_like()` in Queries | — | ✅ | SQL-Injection-Schutz bei LIKE-Queries | +| Outbound URL Validation | ✅ Services, Notifications, Setup | — | ✅ | Discord/Slack/Webhook URLs gegen SSRF validiert | +| Staging Path Validation | ✅ `staging.py` | — | ✅ | Pfade gegen erlaubte Verzeichnisse validiert | +| Body Size Limit | ✅ `main.py` Middleware | — | ✅ | Request-Body-Größe begrenzt | +| WS Connection Limits | ✅ `websocket.py` per-IP Limit | — | ✅ | Max Connections pro IP gegen Abuse | +| Sensitive Config Masking | ✅ API Keys, Notification Secrets | — | ✅ | Secrets werden in API-Responses maskiert | +| Password Complexity | ✅ `schemas/__init__.py` | — | ✅ | Mindestanforderungen für Passwörter | +| Secret Key Enforcement | ✅ `config.py` + docker-compose | — | ✅ | Schwache/Default Secret Keys werden abgelehnt | +| Audit Log Retention | ✅ `scheduler.py` Cleanup-Job | — | ✅ | Automatische Bereinigung alter Audit-Logs | +| Refresh Token Cleanup | ✅ `scheduler.py` Cleanup-Job | — | ✅ | Expired/revoked Tokens werden automatisch gelöscht | +| Trusted Proxy Config | ✅ `config.py` + Rate Limiter | — | ✅ | Forwarded Headers nur von trusted Proxies | +| CI/CD Workflows | ✅ tests.yml + security-scan.yml | — | ✅ | GitHub Actions für Tests + Security-Scanning (SAST/DAST) | +| Smoke Tests | ✅ `tests/test_smoke.py` + pytest | — | ✅ | Initiales Test-Setup mit pytest-asyncio | +| Dependabot | ✅ `.github/dependabot.yml` | — | ✅ | Automatische Dependency-Updates auf develop | + +### Phase 4 – Advanced Analytics ✅ ERLEDIGT (Session 9+10) +- [x] Watch Patterns Heatmap (7x24 Grid) – Activity.tsx (Session 9) +- [x] User Activity Timeline / Calendar Heatmap – UserDetail.tsx (Session 10) +- [x] Concurrent Streams Analysis – Analytics.tsx (Session 10) +- [x] Watch Duration Stats – Analytics.tsx (Session 10) +- [x] Completion Rate Analytics – Analytics.tsx (Session 10) +- [x] Binge-Watch Detection – Analytics.tsx (Session 10) +- [x] Shared vs. Solo Content Analysis – Analytics.tsx (Session 10) + +### Phase 5 – Smart Cleanup Integration ✅ ERLEDIGT +- [x] Per-User Cleanup Rules: `no_user_watched_days` – checks UserWatchHistory per user (Session 11) +- [x] User-Specific Exclusions: `exclude_if_user_favorited` – protects favorites of specific users (Session 11) +- [x] Enhanced Currently Watching: `exclude_active_sessions` – checks PlaybackActivity.is_active (Session 11) +- [x] Min Unique Viewers: `min_unique_viewers` – only delete if fewer than X viewers (Session 11) +- [x] Analytics-based Cleanup Suggestions: `/media/cleanup-suggestions` endpoint (Session 11) +- [x] Cleanup Suggestions UI: CleanupSuggestions.tsx with category filters + scoring (Session 11) + +--- + +## Bekannte Bugs (priorisiert) + +### 🔴 CRITICAL + +#### BUG-012: Dashboard “Most Viewed Movies” zeigt keine Daten +- **Datei**: `backend/app/services/sync.py` (Zeile 277) +- **Problem**: Radarr-Sync speichert den **Ordner-Pfad** (`movie.get("path")` → `/movies/Movie Title/`), aber Emby liefert den **Datei-Pfad** (`/movies/Movie Title/Movie.mkv`). Path-Matching in `track_watch_data` schlägt für ALLE Filme fehl → `watch_count` wird nie aktualisiert. +- **Auswirkung**: Keine Film-Watch-Statistiken, "No movie plays yet" auf Dashboard, `UserWatchHistory` für Filme leer +- **Fix**: `movie.get("movieFile", {}).get("path") or movie.get("path")` – bevorzugt den Datei-Pfad aus `movieFile` +- **Status**: ✅ ERLEDIGT (Session 6) +- **Hinweis**: Bestehende Filme werden beim nächsten Radarr-Sync automatisch korrigiert (Update-Loop setzt `path` neu) + +#### BUG-013: Users-Seite – Last Seen/Last Watched/Last Client fehlerhaft +- **Dateien**: `backend/app/services/sync.py`, `backend/app/api/routes/users.py` +- **Problem (3 Ursachen)**: + 1. `last_activity_at` hängt von `LastPlayedDate` aus Emby ab, das oft fehlt oder nicht geparst werden kann. Fehler wurden mit `except: pass` verschluckt. + 2. `UserWatchHistory`-Fallback filtert auf `last_played_at IS NOT NULL`, was ebenfalls von `LastPlayedDate` abhängt → kein Fallback. + 3. `UserWatchHistory` speichert keine Client/Device-Info → Fallback gibt immer `null`. +- **Auswirkung**: "Last Seen: Never" bei allen Usern, "Last Watched: Never" / "Last Client: N/A" bei Usern ohne aktive PlaybackActivity +- **Fix**: + - Fallback: `last_activity_at = datetime.now(UTC)` wenn User Plays hat aber kein `LastPlayedDate` + - `UserWatchHistory`-Query: `is_played == True` statt `last_played_at IS NOT NULL`, sortiert nach `coalesce(last_played_at, min)` + `play_count` + - Stilles `except: pass` → `logger.warning()` für `LastPlayedDate`-Parsing-Fehler +- **Status**: ✅ ERLEDIGT (Session 6) + +#### BUG-011: PlaybackActivity – position_ticks/runtime_ticks Integer Overflow +- **Datei**: `backend/app/models/database.py`, `backend/app/services/sync.py` +- **Problem**: `position_ticks` und `runtime_ticks` waren `Integer` (int32, max ~2.1B), aber Emby liefert Tick-Werte >int32 (z.B. `70223183889`). PostgreSQL wirft `value out of int32 range`. +- **Auswirkung**: Emby-Sync bricht bei Active Sessions ab, `services.py` commit schlägt fehl (PendingRollbackError) +- **Fix**: Spalten auf `BigInteger` geändert, Migration für PostgreSQL hinzugefügt (ALTER COLUMN TYPE BIGINT), Fehlerbehandlung in `_sync_active_sessions` und `services.py` mit `db.rollback()` versehen +- **Status**: ✅ ERLEDIGT (Session 6) + +#### BUG-001: LibraryDetail.tsx – Doppeltes `/api/` Prefix +- **Datei**: `frontend/src/pages/LibraryDetail.tsx` +- **Problem**: Die Seite benutzt Pfade wie `/api/libraries/${id}/details`, aber die Axios-BaseURL ist bereits `/api`. Das erzeugt Requests an `/api/api/libraries/...`. +- **Auswirkung**: **Seite ist wahrscheinlich komplett kaputt** (404 auf alle Requests) +- **Fix**: Alle Fetch-Calls auf relative Pfade ohne `/api/` Prefix umstellen, oder besser: auf React Query + `api.get()` migrieren (wie alle anderen Seiten) +- **Status**: ✅ ERLEDIGT (Session 3) – Komplett rewritten: API-Pfade korrigiert, auf 3× `useQuery` migriert, lokale Utils entfernt → importiert aus `lib/utils.ts`, Light/Dark-Mode Klassen + +### 🟡 HIGH + +#### BUG-002: Light-Mode kaputt auf Login/Register +- **Dateien**: `frontend/src/pages/Login.tsx`, `frontend/src/pages/Register.tsx` +- **Problem**: Hart-kodierte Dark-Klassen (`bg-dark-800`, `text-white`, `text-dark-200`) ohne `dark:` Prefix-Varianten +- **Auswirkung**: Im Light-Mode erscheint ein dunkler Kasten auf hellem Hintergrund. Text teilweise unsichtbar. +- **Fix**: Alle Farbklassen auf `bg-white dark:bg-dark-800`, `text-gray-900 dark:text-white`, etc. umstellen +- **Status**: ✅ ERLEDIGT (Session 3) – Alle Farbklassen mit `dark:` Varianten versehen + +#### BUG-003: ConfirmDialog – Light-Mode kaputt +- **Datei**: `frontend/src/components/ConfirmDialog.tsx` +- **Problem**: Hart-kodiert `bg-dark-800`. Cancel-Button im Light-Mode unsichtbar. +- **Fix**: `dark:` Varianten hinzufügen +- **Status**: ✅ ERLEDIGT (Session 3) + +#### BUG-004: Skeleton – Light-Mode kaputt +- **Datei**: `frontend/src/components/Skeleton.tsx` +- **Problem**: Hart-kodiert `bg-dark-700`, `bg-dark-800`. Ladeanimationen werden zu schwarzen Rechtecken. +- **Fix**: `bg-gray-200 dark:bg-dark-700` etc. +- **Status**: ✅ ERLEDIGT (Session 3) + +### 🟢 MEDIUM + +#### BUG-005: Toast-Notifications – Light-Mode +- **Datei**: `frontend/src/main.tsx` +- **Problem**: Toaster-Farben hart-kodiert auf dunkel (`#1e293b`) +- **Fix**: CSS-Variablen oder Theme-aware Konfiguration +- **Status**: ✅ ERLEDIGT (Session 3) – Hardcoded Hex durch Tailwind CSS `className` ersetzt + +#### BUG-006: Sprachmix Deutsch/Englisch +- **Dateien**: + - `frontend/src/components/Layout.tsx`: Update-Banner mit `"Update verfügbar!"`, `"Changelog ansehen"`, `"Aktuelle Version"` + - `frontend/src/lib/utils.ts`: `formatDate`/`formatDateTime` benutzen `'de-DE'` Locale +- **Fix**: Alles auf Englisch (oder ein i18n-System), Locale konfigurierbar machen +- **Status**: ✅ ERLEDIGT (Session 3+4) – `utils.ts` Locale `de-DE` → `en-US` geändert (Session 3). Layout.tsx deutsche Strings → Englisch (Session 4). + +#### BUG-007: LibraryDetail.tsx – Kein React Query +- **Datei**: `frontend/src/pages/LibraryDetail.tsx` +- **Problem**: Einzige Seite die manuell `useState` + `useEffect` benutzt statt React Query. Kein Caching, kein Retry, inkonsistent. +- **Fix**: Auf `useQuery`/`useMutation` migrieren wie alle anderen Seiten +- **Status**: ✅ ERLEDIGT (Session 3) – 3× `useQuery` mit `enabled` Flags, zusammen mit BUG-001 behoben + +#### BUG-008: Auth-Store – fetchUser nie aufgerufen +- **Datei**: `frontend/src/stores/auth.ts` +- **Problem**: `isAuthenticated` initialisiert sich aus `!!getToken()`, aber `user` kommt nur aus persistiertem Store. `fetchUser` wird beim App-Start nie aufgerufen → evtl. veraltete User-Daten. +- **Fix**: `fetchUser()` bei App-Init aufrufen (z.B. in `App.tsx` oder `ProtectedRoute`) +- **Status**: ✅ ERLEDIGT (Session 3) – `useEffect` in `ProtectedRoute` ruft `fetchUser()` bei Mount auf + +### 🔵 LOW + +#### BUG-009: Code-Duplizierung – formatBytes +- **Dateien**: `LibraryDetail.tsx`, `Preview.tsx` haben eigene `formatBytes` statt `utils.ts` zu importieren +- **Fix**: Löschen und aus `lib/utils.ts` importieren +- **Status**: ✅ ERLEDIGT (Session 3) – Lokale Funktionen entfernt, importiert aus `lib/utils.ts`. `formatDuration` ebenfalls nach utils.ts verschoben. + +#### BUG-010: Users.tsx – Eigener Debounce statt Hook +- **Datei**: `frontend/src/pages/Users.tsx` +- **Problem**: Manueller `setTimeout`-Debounce statt `useDebounce` Hook +- **Fix**: `useDebounce` aus `hooks/useDebounce.ts` verwenden +- **Status**: ✅ ERLEDIGT (Session 3) + +--- + +## UX/UI-Probleme + +### Mobile-Responsiveness – Tabellen laufen über +| Seite | Nutzt ResponsiveTable? | Status | +|-------|----------------------|--------| +| History.tsx | ✅ Ja | ✅ Gut | +| Users.tsx | ✅ Ja | ✅ Gut | +| LibraryDetail.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 5) | +| UserDetail.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 4) | +| Activity.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 4) | +| Preview.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 6, mit Expand-Row-Support) | +| Staging.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 4) | +| Jobs.tsx | ✅ ResponsiveTable | ✅ Executions-Tabelle migriert (Session 4) | + +### Fehlende UI-Features (geplant aber nicht implementiert) +- **Activity-Seite**: ~~IP-Adresse Spalte, Device-Spalte, Expand-Row, Library-Filter, Items-per-Page Selector~~ ✅ Alles implementiert (Session 4+5+6). ~~Genre RadarChart, Watch Heatmap~~ ✅ (Session 9) +- **LibraryDetail**: ~~Genre-Distribution Charts~~ ✅ (Session 9), ~~Grid-View mit Poster-Bildern~~ ✅ (Session 10), ~~Expand-Row~~ ✅ (Session 6) +- **UserDetail**: ~~Favorite Genres Sektion~~ ✅ (Session 9), ~~Timeline-Tab~~ ✅ (Session 10), ~~Expand-Row~~ ✅ (Session 6) +- **Rules.tsx**: Modal ist sehr lang – kein Wizard/Accordion, keine Genre/Tag-Autocomplete +- **Settings.tsx**: Cron-Eingaben ohne Hilfe/Validierung +- **Dashboard**: ~~Keine Charts~~ recharts Charts implementiert (Session 5). ~~Genre Distribution~~ ✅ (Session 9) + +### Performance +- ~~**Kein Code-Splitting**~~: ✅ Alle Seiten (außer Login/Register) auf `React.lazy` + `Suspense` migriert (Session 4). +- **Jobs.tsx**: 5-Sekunden-Refetch auf 2 Queries = konstanter Netzwerk-Traffic +- **Staging.tsx**: Beide Queries auf 30s Refetch – könnte reduziert werden + +### Accessibility +- ~~Kein `aria-label` auf Mobile-Hamburger-Button in Layout~~ ✅ War bereits vorhanden +- ~~ConfirmDialog hat keinen Focus-Trap / `aria-modal`~~ ✅ Behoben (Session 6) – `role="dialog"`, `aria-modal`, `aria-labelledby`, Focus-Trap, Escape-Key, Click-Outside +- ~~Farbkontrast im Light-Mode gebrochen~~ ✅ Behoben (Session 3) + +--- + +## Code-Qualitätsprobleme + +### Frontend-Inkonsistenzen +| Problem | Betroffene Dateien | Beschreibung | Status | +|---------|--------------------|-------------|--------| +| Fetch-Pattern | LibraryDetail.tsx | Einzige Seite ohne React Query | ✅ Behoben (Session 3) | +| Utility-Duplizierung | LibraryDetail.tsx, Preview.tsx, Activity.tsx, UserDetail.tsx | Eigene formatBytes/formatDuration/formatWatchTime statt utils.ts | ✅ Behoben (Session 3+4) | +| Debounce-Pattern | Users.tsx, Activity.tsx | Manuell statt useDebounce Hook | ✅ Behoben (Session 3+4) | +| API-Prefix | LibraryDetail.tsx | Doppeltes /api/ | ✅ Behoben (Session 3) | +| Theme-Klassen | Login, Register, ConfirmDialog, Skeleton | Fehlende dark: Varianten | ✅ Behoben (Session 3) | + +### Backend – Keine bekannten kritischen Issues +Das Backend ist gut strukturiert mit: +- Sauberer Abstraktion (BaseServiceClient → Emby/Radarr/Sonarr) +- Vollständige API-Coverage für alle Features +- Rate Limiting auf allen Routes mit Security-Event-Logging +- Audit-Logging mit automatischer Retention +- Proper Error Handling +- Caching in EmbyClient (SimpleCache mit TTL) +- Umfassende Security-Härtung (httpOnly Cookies, CSRF, CSP, SSRF-Schutz, Account Lockout) + +--- + +## Was gut funktioniert + +### Backend ✅ +- **Sync-Engine**: Emby→DB Sync (Users, Watch History, Active Sessions, Libraries) robust implementiert +- **Cleanup-Engine**: Rule-Evaluation mit detailliertem Preview/Dry-Run, Grace Periods, Import Exclusions +- **Staging-System**: Soft-Delete mit Emby Staging-Library, Auto-Restore bei Watch, Per-Library Settings +- **Notifications**: Apprise-basiert mit Template-Engine (Mustache-like), Retry-Logik, Event-Type-Filtering +- **API-Design**: RESTful, gut paginiert, konsistente Error Responses + +### Frontend ✅ +- **Dashboard**: Gute Übersicht, Time-Range-Selector, Most Viewed/Popular/Active Users +- **Rules + Preview**: Templates, Export/Import, Bulk-Actions, Dry-Run mit detailliertem Reasoning +- **Staging UI**: Durchdachtes 2-Tab-Layout, Urgency-Farbcodierung, Global + Per-Library Settings +- **Notifications**: Multi-URL-Input, Template-Editor mit Variable-Suggestions, Live-Preview +- **Services & Jobs**: Übersichtlich, Test-Verbindung, manueller Sync-Trigger, editierbare Intervalle +- **Auth-Flow**: httpOnly Cookie Auth, CSRF Protection, Refresh-Token Rotation, Account Lockout +- **ResponsiveTable-Komponente**: Gut gebaut (Table→Cards auf Mobile), jetzt auf allen Seiten eingesetzt inkl. Expand-Row-Support + +--- + +## Nächste Schritte (priorisiert) + +### ~~Priorität 1: Kritische Bugs fixen~~ ✅ ERLEDIGT (Session 3) +1. ~~**BUG-001**: LibraryDetail.tsx `/api/`-Prefix fixen + auf React Query migrieren + Utils importieren~~ ✅ +2. ~~**BUG-002/003/004**: Light-Mode fixen (Login, Register, ConfirmDialog, Skeleton)~~ ✅ + +### ~~Priorität 2: UX-Verbesserungen~~ ✅ ERLEDIGT (Session 3+4) +3. ~~**BUG-006**: Sprachmix Deutsch→Englisch bereinigen~~ ✅ utils.ts Locale (Session 3) + Layout.tsx deutsche Strings (Session 4) +4. ~~**Mobile Tables**: ResponsiveTable in Activity, UserDetail, Staging, Jobs, Preview einsetzen~~ ✅ (Session 4+6). Alle Seiten migriert. +5. ~~**BUG-005**: Toast-Theming fixen~~ ✅ + +### ~~Priorität 3: Phase 3 – Charts implementieren~~ ✅ ERLEDIGT (Session 4+9) +6. ~~**Daily Play Count Chart**: Area Chart mit recharts auf Activity-Seite~~ ✅ +7. ~~**Plays by Day of Week**: Bar Chart auf Activity-Seite~~ ✅ +8. ~~**Plays by Hour**: Bar Chart auf Activity-Seite~~ ✅ +9. ~~**Genre Distribution**: Backend-API `/activity/genre-stats` + RadarChart (Activity) + BarChart (Dashboard, LibraryDetail, UserDetail)~~ ✅ (Session 9) + +### ~~Priorität 4: Fehlende Phase 2 Features~~ ✅ ERLEDIGT +10. ~~**Activity-Seite erweitern**: IP, Device Spalten, Library-Filter, Items-per-Page, Expand-Row~~ ✅ (Session 4+5+6). +11. ~~**UserDetail erweitern**: Library-Filter auf Activity, Expand-Row, Favorite Genres~~ ✅ (Session 5+6+9). +12. ~~**LibraryDetail erweitern**: ResponsiveTable, Expand-Row, Genre-Charts~~ ✅ (Session 5+6+9). Grid-View offen. +13. ~~**Dashboard Charts**: Daily Plays, Day-of-Week, Hour-of-Day, Genre Distribution recharts Charts~~ ✅ (Session 5+9). + +### ~~Priorität 5: Code-Qualität~~ ✅ ERLEDIGT (Session 3+4) +13. ~~Code-Splitting mit React.lazy~~ ✅ (Session 4) – 13 Seiten lazy-loaded mit Suspense-Fallback +14. ~~BUG-008: fetchUser bei App-Init~~ ✅ +15. ~~BUG-009/010: Code-Duplizierung/Debounce aufräumen~~ ✅ + +### ~~Priorität 6: Security Hardening~~ ✅ ERLEDIGT (Session 7+8) +16. ~~httpOnly Cookie Migration (ADR-001)~~ ✅ +17. ~~CSRF Protection~~ ✅ +18. ~~Security Headers, SSRF Validation, Account Lockout~~ ✅ +19. ~~Admin-Only Routes, Input Sanitization, Sensitive Config Masking~~ ✅ +20. ~~CI/CD: Tests + Security Scanning Workflows~~ ✅ +21. ~~Dependabot Setup~~ ✅ + +### ~~Priorität 7: Phase 4+5~~ ✅ ERLEDIGT (Session 9+10+11) +22. ~~Advanced Analytics~~ ✅ (Session 9+10): Heatmaps, User Timeline, Concurrent Streams, Duration Stats, Completion Rates, Binge Detection, Content Reach +23. ~~Smart Cleanup Rules (Per-User Conditions)~~ ✅ (Session 11): no_user_watched_days, exclude_if_user_favorited, exclude_active_sessions, min_unique_viewers, Cleanup Suggestions API + UI +24. ~~Genre Distribution Charts~~ ✅ (Session 9) +25. ~~User Detail: Favorite Genres~~ ✅ (Session 9), ~~Timeline-Tab~~ ✅ (Session 10) +26. ~~LibraryDetail: Genre-Charts~~ ✅ (Session 9), ~~Grid-View mit Poster-Bildern~~ ✅ (Session 10) +27. i18n / Lokalisierung + +--- + +## Dateistruktur-Referenz + +### Wichtige Konfigurationsdateien +| Datei | Beschreibung | +|-------|-------------| +| `docker-compose.dev.yml` | Dev-Umgebung (Hot-Reload) | +| `docker-compose.yml` | Production mit SQLite | +| `docker-compose.postgres.yml` | Production mit PostgreSQL | +| `backend/pyproject.toml` | Python Dependencies | +| `frontend/package.json` | Node Dependencies | +| `frontend/vite.config.ts` | Vite Config (Proxy → Backend) | +| `frontend/src/index.css` | Tailwind @theme Definition | +| `docs/adr/001-httponly-cookie-migration.md` | ADR: JWT localStorage → httpOnly Cookies | +| `.github/workflows/tests.yml` | CI: Backend + Frontend Tests | +| `.github/workflows/security-scan.yml` | CI: SAST/DAST Security Scanning | +| `.github/dependabot.yml` | Dependabot Config (target: develop) | + +### API-Endpunkte (Kurzreferenz) +| Prefix | Router | Beschreibung | +|--------|--------|-------------| +| `/api/auth` | auth.py | Login, Register, Sessions | +| `/api/services` | services.py | Service CRUD, Test, Sync | +| `/api/libraries` | libraries.py | Libraries, Stats, Detail, Media | +| `/api/media` | media.py | Dashboard-Stats, Watch-Stats, Image Proxy | +| `/api/rules` | rules.py | Rules CRUD, Templates, Export | +| `/api/activity` | activity.py | Activity Log, Stats, Active Sessions | +| `/api/users` | users.py | Media Server Users, Timeline | +| `/api/notifications` | notifications.py | Channels CRUD, Templates | +| `/api/staging` | staging.py | Staging System | +| `/api/jobs` | jobs.py | Scheduler Jobs | +| `/api/setup` | setup.py | ★ NEU: Setup Wizard Status, Test, Add, Complete, Skip | +| `/api/system` | system.py | Health, Settings, Cleanup, Version | +| `/api/ws/jobs` | main.py | ★ NEU: WebSocket für Real-Time Job-Progress | +| `/api/audit` | audit.py | Audit Logs (Admin) | + +--- + +## Entwicklungsumgebung starten + +```bash +# Repository klonen +git clone https://github.com/Serph91P/MediaCleanup.git +cd MediaCleanup +git checkout develop + +# Dev-Umgebung starten (Hot-Reload für Frontend + Backend) +docker compose -f docker-compose.dev.yml up --build + +# Zugriff: +# Frontend: http://localhost:5173 +# Backend API Docs: http://localhost:8080/docs +# Erster Login erstellt Admin-Account +``` + +--- + +## Änderungshistorie dieses Dokuments + +| Datum | Änderung | +|-------|----------| +| 22.02.2026 | Vollständige Neuaufsetzung: Kompletter Code-Review (Backend + Frontend), Bug-Katalog mit 10 Einträgen, UX-Analyse, Priorisierte Roadmap, Architektur-Dokumentation | +| 22.02.2026 (2) | **Session 2**: WebSocket Real-Time System (ConnectionManager, Progress-Callbacks für Sonarr/Radarr/Emby, Scheduler-Integration), Global Toast-Notifications via WebSocket, Jobs-Page komplett neu geschrieben (Live-Progress-Bars, Running-Panel, WS-Status-Indikator), Layout.tsx Job-Badge, Setup-Wizard (Backend: /setup/status, /test-connection, /add-service, /complete, /skip; Frontend: 5-Step geführter Wizard mit Welcome→Arr→MediaServer→Sync→Complete), App.tsx SetupGate Redirect-Logik | +| 22.02.2026 (3) | **Session 3 – Bugfixes**: Alle 10 Bugs (BUG-001 bis BUG-010) behoben. LibraryDetail.tsx komplett rewritten (API-Pfade, React Query, shared Utils, Light/Dark-Mode). Light-Mode gefixt in Login, Register, ConfirmDialog, Skeleton, Toaster. Locale `de-DE` → `en-US`. fetchUser() bei App-Init. Code-Duplizierung (formatBytes/formatDuration) aufgeräumt. useDebounce Hook in Users.tsx. Branch: `fix/bugfixes-and-ux-improvements`, Commit: `f90a5f5` (10 Dateien, 295 Insertions, 342 Deletions) | +| 22.02.2026 (4) | **Session 4 – UX & Charts**: Layout.tsx deutsche Strings → Englisch (BUG-006 abgeschlossen). ResponsiveTable Light-Mode-Fix (fehlende `dark:` Varianten). Activity.tsx: `useDebounce` statt setTimeout, shared Utils (`formatDurationLong`, `formatWatchTime`), ResponsiveTable-Migration, 3× recharts Charts (Daily Plays Area, Day-of-Week Bar, Hour-of-Day Bar). UserDetail.tsx: shared Utils + ResponsiveTable. Staging.tsx: ResponsiveTable. Jobs.tsx: Executions-Tabelle → ResponsiveTable. App.tsx: React.lazy Code-Splitting (13 Seiten). Branch: `feature/ux-improvements-and-charts` | +| 22.02.2026 (5) | **Session 5 – Phase 2 Enhancements & Docs**: LibraryDetail.tsx Media+Activity Tabs → ResponsiveTable. Activity.tsx: Library-Filter Dropdown + Items-per-Page Selector (10/25/50/100). UserDetail.tsx: Library-Filter auf Activity-Tab. Dashboard.tsx: 3× recharts Charts (Daily Plays Area, Day-of-Week Bar, Hour-of-Day Bar) mit Dashboard-eigenem statsDays-Selector. PLANNED_FEATURES.md: Phase 2/3 Status aktualisiert, Implementation History Tabelle. Branch: `feature/phase2-enhancements-and-docs` | +| 22.02.2026 (6) | **Session 6 – Bugfix & Expand-Rows**: BUG-011 behoben: PlaybackActivity `position_ticks`/`runtime_ticks` Integer→BigInteger (PostgreSQL int32 overflow bei Emby-Ticks >2.1B). DB-Migration hinzugefügt. Fehlerbehandlung in `_sync_active_sessions` und `services.py` mit `db.rollback()`. ResponsiveTable: Expand-Row-Support (`onRowClick`, `isExpanded`, `expandedContent`). Preview.tsx: Beide Tabellen (Series+Movies) auf ResponsiveTable migriert (letzte Seite). Expand-Rows auf Activity, UserDetail, LibraryDetail (IP, Device, Play Method, Progress-Bar). ConfirmDialog: `aria-modal`, Focus-Trap, Escape-Key, Click-Outside. Library Activity API: `ip_address`, `transcode_video`, `transcode_audio` hinzugefügt. BUG-012: Radarr-Pfad Ordner→Datei-Pfad (Movie-Watch-Statistiken). BUG-013: User Last Seen/Watched/Client Fallback-Logik. Branch: `feature/phase2-enhancements-and-docs` | +| 24.02.2026 (7) | **Session 7 – Security Hardening I** (PR #19 `feature/security-hardening`): CORS Lockdown (Wildcard-Warnung in Production). API-Key/Notification-Secret Masking. Password Complexity Enforcement. Security Headers Middleware (X-Frame-Options, X-Content-Type-Options, Referrer-Policy, Permissions-Policy). WebSocket Auth (kurzlebiger Token). Refresh Token Rotation (altes Token revoked). Account Lockout (DB-Migration für `failed_login_attempts`, `locked_until`). Trusted Proxy Config. Rate-Limit Improvements. System Settings Allowlist. Staging Path Validation. Rule Import File Size Limit. `datetime.utcnow()` → `datetime.now(timezone.utc)`. Audit Log Data Retention Job. SECRET_KEY Enforcement in docker-compose. Frontend WebSocket Auth + Refresh Token Rotation Support. Branch: `feature/security-hardening`, Commit: `3058956` (PR #19) | +| 24.02.2026 (8) | **Session 8 – Security Hardening II + httpOnly Cookies** (PRs #20, #21, #22): httpOnly Cookie Auth Migration (ADR-001) – JWT aus localStorage entfernt, Set-Cookie im Backend, `credentials: 'include'` im Frontend, Cookie-Clearing bei Logout. CSRF Double-Submit Cookie Middleware (`csrf.py`). Security Event Logging (`security_events.py` – strukturiertes JSON für Auth/Rate-Limit/CSRF Events). SSRF-Safe URL Validation (`url_validation.py`). `escape_like()` für SQL-Injection-Schutz. Content-Security-Policy Header. WebSocket Connection Limits per IP. Body Size Limit Middleware. Admin-Only auf Rules, Jobs, Staging, Services, Notifications, System-Settings Routes. Outbound URL Validation auf alle Service-Connection/Notification/Setup Endpoints. Enhanced Rate Limiting mit Security-Event-Logging auf allen API-Routes. Refresh Token Cleanup Scheduler-Job. CI/CD: `tests.yml` (Backend+Frontend Tests), `security-scan.yml` (SAST/DAST). Pytest Setup mit Smoke Test. Dependabot Config. npm Dependency Bump. Branch: `feature/security-hardening2`, Commits: `148d0f6` (PR #20), `f0eec84` (PR #21), `657ad70` (PR #22) | +| 26.02.2026 (10) | **Session 10 \u2013 Phase 2 Completion + Phase 4 Advanced Analytics**: Phase 2: User Timeline Tab (Backend + Frontend Calendar Heatmap + Session-Gruppierung), Image Proxy (`GET /media/{id}/image`), LibraryDetail Grid View (Table/Grid Toggle mit Poster-Bildern), UserDetail Activity Type-Filter + Suche. Phase 4: Concurrent Streams Analysis, Watch Duration Stats, Completion Rate Analytics, Binge-Watch Detection, Shared vs. Solo Content Analysis. Neue Analytics-Seite mit 5 Tabs. Branches: `feature/phase2-completion`, `feature/phase4-advanced-analytics` || 26.02.2026 (11) | **Session 11 – Phase 5 Smart Cleanup Integration**: Per-User Rule Conditions (`no_user_watched_days`, `exclude_if_user_favorited`, `exclude_active_sessions`, `min_unique_viewers`) in RuleConditions Schema + CleanupEngine. Analytics-based Cleanup Suggestions API (`GET /media/cleanup-suggestions`). CleanupSuggestions.tsx Frontend-Seite mit Kategorie-Filter (Unwatched, Abandoned, Low Engagement, Stale, Storage Hog) und Score-basiertem Ranking. Rules.tsx: Smart Cleanup Per-User Section. Branch: `feature/phase5-smart-cleanup` || 30.12.2024 | Initiale Version: Session-Zusammenfassung (Rules Export, Sidebar, Theme Toggle, Staging UI) | From 7bffd743b5e4b1e64446a12b66ed43122818857b Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:14:45 +0100 Subject: [PATCH 099/122] feat: Remove DEVELOPMENT_STATUS document as it is no longer needed --- DEVELOPMENT_STATUS.md | 587 ------------------------------------------ 1 file changed, 587 deletions(-) delete mode 100644 DEVELOPMENT_STATUS.md diff --git a/DEVELOPMENT_STATUS.md b/DEVELOPMENT_STATUS.md deleted file mode 100644 index b98a981..0000000 --- a/DEVELOPMENT_STATUS.md +++ /dev/null @@ -1,587 +0,0 @@ -# MediaCurator - Development Status & Handoff Document - -> **Zweck**: Dieses Dokument dient als fortlaufender Stand für die Weiterentwicklung. Es kann in jedem neuen Chat/auf jedem Rechner als Kontext übergeben werden, damit der Assistent sofort weiß, wo es weitergeht. - -**Letzte Aktualisierung**: 26. Februar 2026 (Session 11) -**Branch**: `develop` -**Letzter Commit**: `55e4bf7` (Session 9) -**Version**: `vdev.0.0.231` -**Repo**: `https://github.com/Serph91P/MediaCurator.git` - ---- - -## Inhaltsverzeichnis - -1. [Projekt-Übersicht](#projekt-übersicht) -2. [Tech-Stack](#tech-stack) -3. [Architektur-Überblick](#architektur-überblick) -4. [Implementierungsstatus nach Phasen](#implementierungsstatus-nach-phasen) -5. [Bekannte Bugs (priorisiert)](#bekannte-bugs-priorisiert) -6. [UX/UI-Probleme](#uxui-probleme) -7. [Code-Qualitätsprobleme](#code-qualitätsprobleme) -8. [Was gut funktioniert](#was-gut-funktioniert) -9. [Nächste Schritte (priorisiert)](#nächste-schritte-priorisiert) -10. [Dateistruktur-Referenz](#dateistruktur-referenz) -11. [Entwicklungsumgebung starten](#entwicklungsumgebung-starten) -12. [Änderungshistorie dieses Dokuments](#änderungshistorie-dieses-dokuments) - ---- - -## Projekt-Übersicht - -MediaCurator ist ein Self-Hosted Media Management Tool das mit Emby/Jellyfin (Media Server) und Sonarr/Radarr (Download Manager) zusammenarbeitet. Es bietet: - -- **Automatische Cleanup-Regeln**: Ungesehene/alte/schlecht bewertete Medien erkennen und löschen -- **Staging-System**: Soft-Delete mit Grace Period – Dateien werden in eine Staging-Library verschoben, bevor sie permanent gelöscht werden -- **User-Tracking**: Watch-History pro User synced von Emby -- **Activity-Monitoring**: Aktive Sessions, Playback-History -- **Notifications**: Apprise-basiert (Discord, Slack, Webhook, etc.) -- **Dashboard**: Statistiken, Most Viewed/Popular, Library-Übersicht - ---- - -## Tech-Stack - -### Backend -| Komponente | Technologie | Version | -|-----------|------------|---------| -| Framework | FastAPI | aktuell | -| ORM | SQLAlchemy (async) | aktuell | -| DB | SQLite (default) / PostgreSQL | | -| Scheduler | APScheduler | | -| HTTP Client | httpx (async) | | -| Auth | JWT (httpOnly cookies + CSRF) | | - -### Frontend -| Komponente | Technologie | Version | -|-----------|------------|---------| -| Framework | React | 19.2 | -| Language | TypeScript | 5.3 | -| Bundler | Vite | 7.3 | -| Styling | Tailwind CSS | 4.x (CSS-first @theme) | -| Server State | TanStack React Query | 5.x | -| Client State | Zustand (persist) | 5 | -| Forms | react-hook-form | 7.49 | -| Charts | recharts | 3.7 (**in Benutzung**: Activity + Dashboard Charts) | -| HTTP | Axios (httpOnly cookie auth, CSRF) | 1.6 | -| Toasts | react-hot-toast | 2.4 | - ---- - -## Architektur-Überblick - -### Backend-Struktur -``` -backend/app/ -├── main.py # FastAPI App, CORS, Middleware-Stack, Router-Mounting, WebSocket Endpoint -├── scheduler.py # APScheduler Jobs (Sync, Cleanup, Staging, Token-Cleanup) + WebSocket-Broadcast -├── core/ -│ ├── config.py # Pydantic Settings -│ ├── csrf.py # ★ NEU: CSRF Double-Submit Cookie Middleware -│ ├── database.py # SQLAlchemy async engine/session -│ ├── migrations.py # DB-Migrationen (Alembic-like, manuell) -│ ├── rate_limit.py # Rate Limiting (enhanced mit Security-Event-Logging) -│ ├── security.py # JWT (httpOnly Cookies), Password Hashing, WebSocket Token -│ ├── security_events.py # ★ NEU: Strukturiertes Security-Event-Logging -│ ├── security_headers.py # ★ NEU: Security Headers Middleware (CSP, X-Frame-Options, etc.) -│ ├── url_validation.py # ★ NEU: SSRF-Safe URL Validation -│ └── websocket.py # ConnectionManager für Real-Time Job-Broadcasting (+ IP-Limits) -├── models/database.py # ALLE SQLAlchemy Models (656 Zeilen) -├── schemas/__init__.py # ALLE Pydantic Schemas -├── services/ -│ ├── base.py # BaseServiceClient (abstrakt, httpx) -│ ├── emby.py # EmbyClient + EmbyService (Caching, Sync) -│ ├── radarr.py # RadarrClient (API v3) -│ ├── sonarr.py # SonarrClient (API v3) -│ ├── sync.py # Haupt-Sync-Logik (Emby→DB, Sonarr→DB, Radarr→DB) -│ ├── cleanup_engine.py # Rule-Evaluation + Execution -│ ├── staging.py # Soft-Delete-System -│ ├── notifications.py # Multi-Channel mit Templates -│ ├── audit.py # Audit-Logging -│ └── version.py # Git/GitHub Version-Check -├── tests/ -│ ├── __init__.py # ★ NEU: Test-Setup -│ └── test_smoke.py # ★ NEU: Smoke-Test (Settings-Loading) -└── api/routes/ - ├── activity.py # GET /activity/, /stats, /active, /genre-stats, /watch-heatmap, /concurrent-streams, /duration-stats, /completion-rates, /binge-stats - ├── audit.py # GET/DELETE /audit/logs, /recent, etc. - ├── auth.py # POST /auth/login, /register, /refresh, /logout (httpOnly Cookies, CSRF) - ├── jobs.py # GET/POST /jobs/, trigger, interval (Admin-Only) - ├── libraries.py # GET /libraries/, /stats, /{id}/details, /media, /activity - ├── media.py # GET /media/stats, /dashboard-stats, /watch-stats, /audit-log, /{id}/image, /content-reach - ├── notifications.py # CRUD /notifications/, test, preview-template (Admin-Only, URL Validation) - ├── rules.py # CRUD /rules/, templates, export/import, bulk (Admin-Only, File Size Limit) - ├── services.py # CRUD /services/, test, sync (Admin-Only, URL Validation) - ├── staging.py # GET/POST /staging/, restore, delete, settings (Admin-Only, Path Validation) - ├── setup.py # GET /setup/status, POST /test-connection, /add-service, /complete, /skip - ├── system.py # GET /system/health, /stats, /settings, cleanup/preview (Admin-Only) - └── users.py # GET /users/, /{id}, /{id}/activity, /{id}/timeline, PATCH hide -``` - -### Frontend-Struktur -``` -frontend/src/ -├── App.tsx # Routes (15 protected + Login/Register/Setup) + SetupGate -├── main.tsx # React Entry, QueryClient, Toaster -├── index.css # Tailwind @theme (dark-* + primary-* Palette) -├── components/ -│ ├── Layout.tsx # Sidebar, Header, Version-Check, Theme-Toggle, ★ Job-Badge -│ ├── ConfirmDialog.tsx # Modal (danger/warning/info) -│ ├── ResponsiveTable.tsx # Table → Cards auf Mobile -│ └── Skeleton.tsx # Loading-Skeletons -├── hooks/ -│ ├── useDebounce.ts # Generischer Debounce-Hook -│ └── useJobWebSocket.ts # ★ NEU: Globaler WebSocket-Hook (Auto-Reconnect, Toasts) -├── lib/ -│ ├── api.ts # Axios mit httpOnly Cookie Auth + CSRF Token Interceptor -│ └── utils.ts # formatBytes, formatRelativeTime, formatDate, etc. -├── stores/ -│ ├── auth.ts # Zustand: Login/Logout/Sessions (cookie-basiert, kein localStorage-Token) -│ ├── jobs.ts # Zustand: WebSocket Job-State (runningJobs, recentCompletions) -│ └── theme.ts # Zustand: light/dark/system -├── types/index.ts # TypeScript Interfaces -└── pages/ - ├── Dashboard.tsx # Stats, Most Viewed/Popular, Libraries, Disk - ├── Libraries.tsx # Library-Grid mit Stats, Sync - ├── LibraryDetail.tsx # 3 Tabs: Overview, Media (Table+Grid), Activity - ├── Users.tsx # User-Tabelle mit Stats - ├── UserDetail.tsx # 3 Tabs: Overview, Activity (Type/Search Filter), Timeline (Heatmap) - ├── Analytics.tsx # 5 Tabs: Concurrent Streams, Duration, Completion, Binge Watch, Content Reach - ├── Activity.tsx # Active Sessions + Activity-Log - ├── History.tsx # Cleanup-Audit-Log - ├── Jobs.tsx # ★ REWRITE: Live-Progress-Bars, WebSocket-Status, Running-Panel - ├── SetupWizard.tsx # ★ NEU: Geführter 5-Step Setup-Wizard - ├── Rules.tsx # CRUD + Templates + Export/Import - ├── Services.tsx # Service-Connections CRUD - ├── Settings.tsx # System-Settings, Password, Cache - ├── Staging.tsx # Staged Items + Library-Settings - ├── Notifications.tsx # Channels CRUD + Template-Editor - ├── Preview.tsx # Cleanup Dry-Run Preview - ├── Login.tsx # Auth - └── Register.tsx # Erstbenutzer-Setup -``` - -### Datenbank-Models (Wichtigste) -| Model | Tabelle | Beschreibung | -|-------|---------|-------------| -| User | users | App-Benutzer (Admin-Login) | -| RefreshToken | refresh_tokens | JWT Refresh Tokens mit Device-Info | -| ServiceConnection | service_connections | Sonarr/Radarr/Emby/Jellyfin Verbindungen | -| Library | libraries | Synchronisierte Bibliotheken mit Staging-Config | -| MediaItem | media_items | Alle Medien (Filme, Serien, Episoden) mit Watch-Status | -| CleanupRule | cleanup_rules | Regelwerk mit JSON-Conditions | -| MediaServerUser | media_server_users | Emby/Jellyfin-Benutzer | -| UserWatchHistory | user_watch_history | Watch-History pro User pro Item | -| PlaybackActivity | playback_activities | Playback-Sessions mit Client/Device/Transcode | -| CleanupLog | cleanup_logs | Cleanup-Audit-Trail | -| NotificationChannel | notification_channels | Notification-Konfigurationen | -| JobExecutionLog | job_execution_logs | Scheduler-Job-History | -| ImportStats | import_stats | Sync-Statistiken | -| AuditLog | audit_logs | Admin-Action-Audit | -| SystemSettings | system_settings | Key-Value Settings | - ---- - -## Implementierungsstatus nach Phasen - -> Referenz: `PLANNED_FEATURES.md` - -### Phase 1 – Foundation ✅ ERLEDIGT -- [x] MediaServerUser model -- [x] UserWatchHistory model -- [x] PlaybackActivity model -- [x] Basic user stats on Dashboard -- [x] Library stats API - -### Phase 2 – Views & Navigation ✅ ERLEDIGT -| Feature | Backend | Frontend | Qualität | Anmerkungen | -|---------|---------|----------|----------|-------------| -| Library Detail – Overview Tab | ✅ API liefert 24h/7d/30d Stats | ✅ Dargestellt + Genre-Charts | ✅ | Genre RadarChart + BarChart hinzugefügt (Session 9) | -| Library Detail – Media Tab | ✅ Sortierung, Suche, Pagination | ✅ ResponsiveTable + Grid View | ✅ | Grid View mit Poster-Bildern hinzugefügt (Session 10) | -| Library Detail – Activity Tab | ✅ Pagination | ✅ ResponsiveTable | ✅ | Migriert auf ResponsiveTable (Session 5) | -| Users Page | ✅ Pagination, Search | ✅ ResponsiveTable | ✅ | Gut implementiert | -| User Detail – Overview | ✅ Time-based Stats | ✅ + Favorite Genres Chart | ✅ | Favorite Genres BarChart hinzugefügt (Session 9) | -| User Detail – Activity | ✅ Filters + Library-Filter | ✅ Tabelle + Library/Type-Filter + Suche | ✅ | Type-Filter + Suchfeld hinzugefügt (Session 10) | -| User Detail – Timeline Tab | ✅ `/users/{id}/timeline` | ✅ Calendar Heatmap + Sessions | ✅ | 90-Tage-Heatmap + Session-Gruppierung (Session 10) | -| Image Proxy | ✅ `/media/{id}/image` | ✅ Im Grid View | ✅ | Poster-Proxy von Emby/Jellyfin mit Cache (Session 10) | -| Global Activity Log | ✅ Stats + Active Sessions | ✅ | ✅ | Library-Filter + Items-per-Page hinzugefügt (Session 5) | -| Activity Stats API | ✅ plays by day/hour/weekday | ✅ Charts | ✅ | recharts Charts auf Activity + Dashboard (Session 4+5) | -| Active Sessions | ✅ 30s Sync | ✅ 30s Refresh | ✅ | Gut implementiert | - -### Phase 3 – Statistics & Charts ✅ ERLEDIGT (Session 4+9) -| Feature | Backend-API | Frontend | Anmerkungen | -|---------|------------|----------|-------------| -| Daily Play Count Chart (Area) | ✅ `/activity/stats` liefert `plays_by_day` | ✅ Activity.tsx | recharts AreaChart mit Gradient | -| Play Count by Day of Week (Bar) | ✅ `/activity/stats` liefert `plays_by_day_of_week` | ✅ Activity.tsx | recharts BarChart | -| Play Count by Hour (Bar) | ✅ `/activity/stats` liefert `plays_by_hour` | ✅ Activity.tsx | recharts BarChart mit AM/PM Labels | -| Genre Distribution (Radar/Spider) | ✅ `/activity/genre-stats` | ✅ Activity + Dashboard + LibraryDetail | RadarChart (Activity), BarChart (Dashboard, Library, User) (Session 9) | -| Watch Patterns Heatmap (7×24) | ✅ `/activity/watch-heatmap` | ✅ Activity.tsx | CSS Grid Heatmap, Day×Hour (Session 9) | - -### ★ NEU: WebSocket Real-Time System ✅ ERLEDIGT (Session 2) -| Feature | Backend | Frontend | Qualität | Anmerkungen | -|---------|---------|----------|----------|-------------| -| ConnectionManager | ✅ `core/websocket.py` | — | ✅ | Broadcast, job_started/progress/completed Events | -| WebSocket Endpoint | ✅ `/api/ws/jobs` in main.py | — | ✅ | Ping/Pong Support | -| Progress Callbacks | ✅ sync.py (alle 3 Services) | — | ✅ | Sonarr: je 5 Serien, Radarr: je 10 Filme, Emby: 6 Phasen | -| Scheduler Integration | ✅ Alle 5 Job-Funktionen | — | ✅ | started/progress/completed Events | -| Global WebSocket Hook | — | ✅ `useJobWebSocket.ts` | ✅ | Auto-Reconnect (exp. Backoff), 30s Ping | -| Toast Notifications | — | ✅ In Layout.tsx (global) | ✅ | Started/Completed/Failed Toasts auf jeder Seite | -| Zustand Job Store | — | ✅ `stores/jobs.ts` | ✅ | runningJobs Map, wsStatus, recentCompletions | -| Jobs Page Rewrite | — | ✅ `Jobs.tsx` (~650 Zeilen) | ✅ | Live Progress-Bars, Running-Panel, ElapsedTime, WS-Indikator | -| Layout Job Badge | — | ✅ `Layout.tsx` | ✅ | Animierter blauer Badge mit laufenden Jobs Count | - -### ★ NEU: Setup Wizard ✅ ERLEDIGT (Session 2) -| Feature | Backend | Frontend | Qualität | Anmerkungen | -|---------|---------|----------|----------|-------------| -| Setup Status API | ✅ `GET /setup/status` | — | ✅ | Prüft Services + SystemSettings, kein Auth nötig | -| Test Connection | ✅ `POST /setup/test-connection` | — | ✅ | Temporäre Verbindung testen ohne Speichern | -| Add Service | ✅ `POST /setup/add-service` | — | ✅ | Im Wizard-Kontext | -| Complete/Skip | ✅ `POST /setup/complete`, `/skip` | — | ✅ | Validierung: min. 1 Arr + 1 Media Server | -| Wizard UI | — | ✅ `SetupWizard.tsx` (~550 Zeilen) | ✅ | 5-Step: Welcome→Arr→MediaServer→Sync→Complete | -| Setup Gate | — | ✅ `App.tsx` SetupGate | ✅ | Redirect nach /setup wenn nicht abgeschlossen | -| Service Order | — | ✅ | ✅ | Erzwingt Sonarr/Radarr vor Emby/Jellyfin | -| Initial Sync | — | ✅ | ✅ | Sequentieller Sync aller Services mit Status-Anzeige | -| Skip Option | — | ✅ | ✅ | Setup überspringen möglich | - -### ★ NEU: Security Hardening ✅ ERLEDIGT (Session 7+8) -| Feature | Backend | Frontend | Qualität | Anmerkungen | -|---------|---------|----------|----------|-------------| -| httpOnly Cookie Auth | ✅ `security.py` Set-Cookie | ✅ `api.ts` credentials:include | ✅ | JWT aus localStorage entfernt, XSS-sicher (ADR-001) | -| CSRF Protection | ✅ `csrf.py` Double-Submit Cookie | ✅ `api.ts` X-CSRF-Token Header | ✅ | Automatisch bei state-changing Requests | -| Security Headers | ✅ `security_headers.py` Middleware | — | ✅ | CSP, X-Frame-Options, X-Content-Type-Options, Referrer-Policy | -| Security Event Logging | ✅ `security_events.py` | — | ✅ | Strukturiertes JSON-Logging für Auth, Rate-Limit, CSRF Events | -| SSRF URL Validation | ✅ `url_validation.py` | — | ✅ | Blockiert private IPs, non-HTTP schemes, Credentials in URLs | -| Account Lockout | ✅ User Model + Auth | — | ✅ | Nach N fehlgeschlagenen Logins temporäre Sperre | -| Refresh Token Rotation | ✅ `security.py` + `auth.py` | ✅ `auth.ts` | ✅ | Altes Token wird bei Refresh revoked | -| WebSocket Auth Token | ✅ Short-lived WS Token | ✅ `useJobWebSocket.ts` | ✅ | Kurzlebiger Token für WS-Verbindung | -| CORS Lockdown | ✅ `main.py` | — | ✅ | Wildcard-Warnung in Production, strikte Origin-Prüfung | -| Admin-Only Routes | ✅ Alle sensitiven Routes | — | ✅ | Rules, Jobs, Staging, Services, Notifications nur für Admin | -| Input Sanitization | ✅ `escape_like()` in Queries | — | ✅ | SQL-Injection-Schutz bei LIKE-Queries | -| Outbound URL Validation | ✅ Services, Notifications, Setup | — | ✅ | Discord/Slack/Webhook URLs gegen SSRF validiert | -| Staging Path Validation | ✅ `staging.py` | — | ✅ | Pfade gegen erlaubte Verzeichnisse validiert | -| Body Size Limit | ✅ `main.py` Middleware | — | ✅ | Request-Body-Größe begrenzt | -| WS Connection Limits | ✅ `websocket.py` per-IP Limit | — | ✅ | Max Connections pro IP gegen Abuse | -| Sensitive Config Masking | ✅ API Keys, Notification Secrets | — | ✅ | Secrets werden in API-Responses maskiert | -| Password Complexity | ✅ `schemas/__init__.py` | — | ✅ | Mindestanforderungen für Passwörter | -| Secret Key Enforcement | ✅ `config.py` + docker-compose | — | ✅ | Schwache/Default Secret Keys werden abgelehnt | -| Audit Log Retention | ✅ `scheduler.py` Cleanup-Job | — | ✅ | Automatische Bereinigung alter Audit-Logs | -| Refresh Token Cleanup | ✅ `scheduler.py` Cleanup-Job | — | ✅ | Expired/revoked Tokens werden automatisch gelöscht | -| Trusted Proxy Config | ✅ `config.py` + Rate Limiter | — | ✅ | Forwarded Headers nur von trusted Proxies | -| CI/CD Workflows | ✅ tests.yml + security-scan.yml | — | ✅ | GitHub Actions für Tests + Security-Scanning (SAST/DAST) | -| Smoke Tests | ✅ `tests/test_smoke.py` + pytest | — | ✅ | Initiales Test-Setup mit pytest-asyncio | -| Dependabot | ✅ `.github/dependabot.yml` | — | ✅ | Automatische Dependency-Updates auf develop | - -### Phase 4 – Advanced Analytics ✅ ERLEDIGT (Session 9+10) -- [x] Watch Patterns Heatmap (7x24 Grid) – Activity.tsx (Session 9) -- [x] User Activity Timeline / Calendar Heatmap – UserDetail.tsx (Session 10) -- [x] Concurrent Streams Analysis – Analytics.tsx (Session 10) -- [x] Watch Duration Stats – Analytics.tsx (Session 10) -- [x] Completion Rate Analytics – Analytics.tsx (Session 10) -- [x] Binge-Watch Detection – Analytics.tsx (Session 10) -- [x] Shared vs. Solo Content Analysis – Analytics.tsx (Session 10) - -### Phase 5 – Smart Cleanup Integration ✅ ERLEDIGT -- [x] Per-User Cleanup Rules: `no_user_watched_days` – checks UserWatchHistory per user (Session 11) -- [x] User-Specific Exclusions: `exclude_if_user_favorited` – protects favorites of specific users (Session 11) -- [x] Enhanced Currently Watching: `exclude_active_sessions` – checks PlaybackActivity.is_active (Session 11) -- [x] Min Unique Viewers: `min_unique_viewers` – only delete if fewer than X viewers (Session 11) -- [x] Analytics-based Cleanup Suggestions: `/media/cleanup-suggestions` endpoint (Session 11) -- [x] Cleanup Suggestions UI: CleanupSuggestions.tsx with category filters + scoring (Session 11) - ---- - -## Bekannte Bugs (priorisiert) - -### 🔴 CRITICAL - -#### BUG-012: Dashboard “Most Viewed Movies” zeigt keine Daten -- **Datei**: `backend/app/services/sync.py` (Zeile 277) -- **Problem**: Radarr-Sync speichert den **Ordner-Pfad** (`movie.get("path")` → `/movies/Movie Title/`), aber Emby liefert den **Datei-Pfad** (`/movies/Movie Title/Movie.mkv`). Path-Matching in `track_watch_data` schlägt für ALLE Filme fehl → `watch_count` wird nie aktualisiert. -- **Auswirkung**: Keine Film-Watch-Statistiken, "No movie plays yet" auf Dashboard, `UserWatchHistory` für Filme leer -- **Fix**: `movie.get("movieFile", {}).get("path") or movie.get("path")` – bevorzugt den Datei-Pfad aus `movieFile` -- **Status**: ✅ ERLEDIGT (Session 6) -- **Hinweis**: Bestehende Filme werden beim nächsten Radarr-Sync automatisch korrigiert (Update-Loop setzt `path` neu) - -#### BUG-013: Users-Seite – Last Seen/Last Watched/Last Client fehlerhaft -- **Dateien**: `backend/app/services/sync.py`, `backend/app/api/routes/users.py` -- **Problem (3 Ursachen)**: - 1. `last_activity_at` hängt von `LastPlayedDate` aus Emby ab, das oft fehlt oder nicht geparst werden kann. Fehler wurden mit `except: pass` verschluckt. - 2. `UserWatchHistory`-Fallback filtert auf `last_played_at IS NOT NULL`, was ebenfalls von `LastPlayedDate` abhängt → kein Fallback. - 3. `UserWatchHistory` speichert keine Client/Device-Info → Fallback gibt immer `null`. -- **Auswirkung**: "Last Seen: Never" bei allen Usern, "Last Watched: Never" / "Last Client: N/A" bei Usern ohne aktive PlaybackActivity -- **Fix**: - - Fallback: `last_activity_at = datetime.now(UTC)` wenn User Plays hat aber kein `LastPlayedDate` - - `UserWatchHistory`-Query: `is_played == True` statt `last_played_at IS NOT NULL`, sortiert nach `coalesce(last_played_at, min)` + `play_count` - - Stilles `except: pass` → `logger.warning()` für `LastPlayedDate`-Parsing-Fehler -- **Status**: ✅ ERLEDIGT (Session 6) - -#### BUG-011: PlaybackActivity – position_ticks/runtime_ticks Integer Overflow -- **Datei**: `backend/app/models/database.py`, `backend/app/services/sync.py` -- **Problem**: `position_ticks` und `runtime_ticks` waren `Integer` (int32, max ~2.1B), aber Emby liefert Tick-Werte >int32 (z.B. `70223183889`). PostgreSQL wirft `value out of int32 range`. -- **Auswirkung**: Emby-Sync bricht bei Active Sessions ab, `services.py` commit schlägt fehl (PendingRollbackError) -- **Fix**: Spalten auf `BigInteger` geändert, Migration für PostgreSQL hinzugefügt (ALTER COLUMN TYPE BIGINT), Fehlerbehandlung in `_sync_active_sessions` und `services.py` mit `db.rollback()` versehen -- **Status**: ✅ ERLEDIGT (Session 6) - -#### BUG-001: LibraryDetail.tsx – Doppeltes `/api/` Prefix -- **Datei**: `frontend/src/pages/LibraryDetail.tsx` -- **Problem**: Die Seite benutzt Pfade wie `/api/libraries/${id}/details`, aber die Axios-BaseURL ist bereits `/api`. Das erzeugt Requests an `/api/api/libraries/...`. -- **Auswirkung**: **Seite ist wahrscheinlich komplett kaputt** (404 auf alle Requests) -- **Fix**: Alle Fetch-Calls auf relative Pfade ohne `/api/` Prefix umstellen, oder besser: auf React Query + `api.get()` migrieren (wie alle anderen Seiten) -- **Status**: ✅ ERLEDIGT (Session 3) – Komplett rewritten: API-Pfade korrigiert, auf 3× `useQuery` migriert, lokale Utils entfernt → importiert aus `lib/utils.ts`, Light/Dark-Mode Klassen - -### 🟡 HIGH - -#### BUG-002: Light-Mode kaputt auf Login/Register -- **Dateien**: `frontend/src/pages/Login.tsx`, `frontend/src/pages/Register.tsx` -- **Problem**: Hart-kodierte Dark-Klassen (`bg-dark-800`, `text-white`, `text-dark-200`) ohne `dark:` Prefix-Varianten -- **Auswirkung**: Im Light-Mode erscheint ein dunkler Kasten auf hellem Hintergrund. Text teilweise unsichtbar. -- **Fix**: Alle Farbklassen auf `bg-white dark:bg-dark-800`, `text-gray-900 dark:text-white`, etc. umstellen -- **Status**: ✅ ERLEDIGT (Session 3) – Alle Farbklassen mit `dark:` Varianten versehen - -#### BUG-003: ConfirmDialog – Light-Mode kaputt -- **Datei**: `frontend/src/components/ConfirmDialog.tsx` -- **Problem**: Hart-kodiert `bg-dark-800`. Cancel-Button im Light-Mode unsichtbar. -- **Fix**: `dark:` Varianten hinzufügen -- **Status**: ✅ ERLEDIGT (Session 3) - -#### BUG-004: Skeleton – Light-Mode kaputt -- **Datei**: `frontend/src/components/Skeleton.tsx` -- **Problem**: Hart-kodiert `bg-dark-700`, `bg-dark-800`. Ladeanimationen werden zu schwarzen Rechtecken. -- **Fix**: `bg-gray-200 dark:bg-dark-700` etc. -- **Status**: ✅ ERLEDIGT (Session 3) - -### 🟢 MEDIUM - -#### BUG-005: Toast-Notifications – Light-Mode -- **Datei**: `frontend/src/main.tsx` -- **Problem**: Toaster-Farben hart-kodiert auf dunkel (`#1e293b`) -- **Fix**: CSS-Variablen oder Theme-aware Konfiguration -- **Status**: ✅ ERLEDIGT (Session 3) – Hardcoded Hex durch Tailwind CSS `className` ersetzt - -#### BUG-006: Sprachmix Deutsch/Englisch -- **Dateien**: - - `frontend/src/components/Layout.tsx`: Update-Banner mit `"Update verfügbar!"`, `"Changelog ansehen"`, `"Aktuelle Version"` - - `frontend/src/lib/utils.ts`: `formatDate`/`formatDateTime` benutzen `'de-DE'` Locale -- **Fix**: Alles auf Englisch (oder ein i18n-System), Locale konfigurierbar machen -- **Status**: ✅ ERLEDIGT (Session 3+4) – `utils.ts` Locale `de-DE` → `en-US` geändert (Session 3). Layout.tsx deutsche Strings → Englisch (Session 4). - -#### BUG-007: LibraryDetail.tsx – Kein React Query -- **Datei**: `frontend/src/pages/LibraryDetail.tsx` -- **Problem**: Einzige Seite die manuell `useState` + `useEffect` benutzt statt React Query. Kein Caching, kein Retry, inkonsistent. -- **Fix**: Auf `useQuery`/`useMutation` migrieren wie alle anderen Seiten -- **Status**: ✅ ERLEDIGT (Session 3) – 3× `useQuery` mit `enabled` Flags, zusammen mit BUG-001 behoben - -#### BUG-008: Auth-Store – fetchUser nie aufgerufen -- **Datei**: `frontend/src/stores/auth.ts` -- **Problem**: `isAuthenticated` initialisiert sich aus `!!getToken()`, aber `user` kommt nur aus persistiertem Store. `fetchUser` wird beim App-Start nie aufgerufen → evtl. veraltete User-Daten. -- **Fix**: `fetchUser()` bei App-Init aufrufen (z.B. in `App.tsx` oder `ProtectedRoute`) -- **Status**: ✅ ERLEDIGT (Session 3) – `useEffect` in `ProtectedRoute` ruft `fetchUser()` bei Mount auf - -### 🔵 LOW - -#### BUG-009: Code-Duplizierung – formatBytes -- **Dateien**: `LibraryDetail.tsx`, `Preview.tsx` haben eigene `formatBytes` statt `utils.ts` zu importieren -- **Fix**: Löschen und aus `lib/utils.ts` importieren -- **Status**: ✅ ERLEDIGT (Session 3) – Lokale Funktionen entfernt, importiert aus `lib/utils.ts`. `formatDuration` ebenfalls nach utils.ts verschoben. - -#### BUG-010: Users.tsx – Eigener Debounce statt Hook -- **Datei**: `frontend/src/pages/Users.tsx` -- **Problem**: Manueller `setTimeout`-Debounce statt `useDebounce` Hook -- **Fix**: `useDebounce` aus `hooks/useDebounce.ts` verwenden -- **Status**: ✅ ERLEDIGT (Session 3) - ---- - -## UX/UI-Probleme - -### Mobile-Responsiveness – Tabellen laufen über -| Seite | Nutzt ResponsiveTable? | Status | -|-------|----------------------|--------| -| History.tsx | ✅ Ja | ✅ Gut | -| Users.tsx | ✅ Ja | ✅ Gut | -| LibraryDetail.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 5) | -| UserDetail.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 4) | -| Activity.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 4) | -| Preview.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 6, mit Expand-Row-Support) | -| Staging.tsx | ✅ ResponsiveTable | ✅ Migriert (Session 4) | -| Jobs.tsx | ✅ ResponsiveTable | ✅ Executions-Tabelle migriert (Session 4) | - -### Fehlende UI-Features (geplant aber nicht implementiert) -- **Activity-Seite**: ~~IP-Adresse Spalte, Device-Spalte, Expand-Row, Library-Filter, Items-per-Page Selector~~ ✅ Alles implementiert (Session 4+5+6). ~~Genre RadarChart, Watch Heatmap~~ ✅ (Session 9) -- **LibraryDetail**: ~~Genre-Distribution Charts~~ ✅ (Session 9), ~~Grid-View mit Poster-Bildern~~ ✅ (Session 10), ~~Expand-Row~~ ✅ (Session 6) -- **UserDetail**: ~~Favorite Genres Sektion~~ ✅ (Session 9), ~~Timeline-Tab~~ ✅ (Session 10), ~~Expand-Row~~ ✅ (Session 6) -- **Rules.tsx**: Modal ist sehr lang – kein Wizard/Accordion, keine Genre/Tag-Autocomplete -- **Settings.tsx**: Cron-Eingaben ohne Hilfe/Validierung -- **Dashboard**: ~~Keine Charts~~ recharts Charts implementiert (Session 5). ~~Genre Distribution~~ ✅ (Session 9) - -### Performance -- ~~**Kein Code-Splitting**~~: ✅ Alle Seiten (außer Login/Register) auf `React.lazy` + `Suspense` migriert (Session 4). -- **Jobs.tsx**: 5-Sekunden-Refetch auf 2 Queries = konstanter Netzwerk-Traffic -- **Staging.tsx**: Beide Queries auf 30s Refetch – könnte reduziert werden - -### Accessibility -- ~~Kein `aria-label` auf Mobile-Hamburger-Button in Layout~~ ✅ War bereits vorhanden -- ~~ConfirmDialog hat keinen Focus-Trap / `aria-modal`~~ ✅ Behoben (Session 6) – `role="dialog"`, `aria-modal`, `aria-labelledby`, Focus-Trap, Escape-Key, Click-Outside -- ~~Farbkontrast im Light-Mode gebrochen~~ ✅ Behoben (Session 3) - ---- - -## Code-Qualitätsprobleme - -### Frontend-Inkonsistenzen -| Problem | Betroffene Dateien | Beschreibung | Status | -|---------|--------------------|-------------|--------| -| Fetch-Pattern | LibraryDetail.tsx | Einzige Seite ohne React Query | ✅ Behoben (Session 3) | -| Utility-Duplizierung | LibraryDetail.tsx, Preview.tsx, Activity.tsx, UserDetail.tsx | Eigene formatBytes/formatDuration/formatWatchTime statt utils.ts | ✅ Behoben (Session 3+4) | -| Debounce-Pattern | Users.tsx, Activity.tsx | Manuell statt useDebounce Hook | ✅ Behoben (Session 3+4) | -| API-Prefix | LibraryDetail.tsx | Doppeltes /api/ | ✅ Behoben (Session 3) | -| Theme-Klassen | Login, Register, ConfirmDialog, Skeleton | Fehlende dark: Varianten | ✅ Behoben (Session 3) | - -### Backend – Keine bekannten kritischen Issues -Das Backend ist gut strukturiert mit: -- Sauberer Abstraktion (BaseServiceClient → Emby/Radarr/Sonarr) -- Vollständige API-Coverage für alle Features -- Rate Limiting auf allen Routes mit Security-Event-Logging -- Audit-Logging mit automatischer Retention -- Proper Error Handling -- Caching in EmbyClient (SimpleCache mit TTL) -- Umfassende Security-Härtung (httpOnly Cookies, CSRF, CSP, SSRF-Schutz, Account Lockout) - ---- - -## Was gut funktioniert - -### Backend ✅ -- **Sync-Engine**: Emby→DB Sync (Users, Watch History, Active Sessions, Libraries) robust implementiert -- **Cleanup-Engine**: Rule-Evaluation mit detailliertem Preview/Dry-Run, Grace Periods, Import Exclusions -- **Staging-System**: Soft-Delete mit Emby Staging-Library, Auto-Restore bei Watch, Per-Library Settings -- **Notifications**: Apprise-basiert mit Template-Engine (Mustache-like), Retry-Logik, Event-Type-Filtering -- **API-Design**: RESTful, gut paginiert, konsistente Error Responses - -### Frontend ✅ -- **Dashboard**: Gute Übersicht, Time-Range-Selector, Most Viewed/Popular/Active Users -- **Rules + Preview**: Templates, Export/Import, Bulk-Actions, Dry-Run mit detailliertem Reasoning -- **Staging UI**: Durchdachtes 2-Tab-Layout, Urgency-Farbcodierung, Global + Per-Library Settings -- **Notifications**: Multi-URL-Input, Template-Editor mit Variable-Suggestions, Live-Preview -- **Services & Jobs**: Übersichtlich, Test-Verbindung, manueller Sync-Trigger, editierbare Intervalle -- **Auth-Flow**: httpOnly Cookie Auth, CSRF Protection, Refresh-Token Rotation, Account Lockout -- **ResponsiveTable-Komponente**: Gut gebaut (Table→Cards auf Mobile), jetzt auf allen Seiten eingesetzt inkl. Expand-Row-Support - ---- - -## Nächste Schritte (priorisiert) - -### ~~Priorität 1: Kritische Bugs fixen~~ ✅ ERLEDIGT (Session 3) -1. ~~**BUG-001**: LibraryDetail.tsx `/api/`-Prefix fixen + auf React Query migrieren + Utils importieren~~ ✅ -2. ~~**BUG-002/003/004**: Light-Mode fixen (Login, Register, ConfirmDialog, Skeleton)~~ ✅ - -### ~~Priorität 2: UX-Verbesserungen~~ ✅ ERLEDIGT (Session 3+4) -3. ~~**BUG-006**: Sprachmix Deutsch→Englisch bereinigen~~ ✅ utils.ts Locale (Session 3) + Layout.tsx deutsche Strings (Session 4) -4. ~~**Mobile Tables**: ResponsiveTable in Activity, UserDetail, Staging, Jobs, Preview einsetzen~~ ✅ (Session 4+6). Alle Seiten migriert. -5. ~~**BUG-005**: Toast-Theming fixen~~ ✅ - -### ~~Priorität 3: Phase 3 – Charts implementieren~~ ✅ ERLEDIGT (Session 4+9) -6. ~~**Daily Play Count Chart**: Area Chart mit recharts auf Activity-Seite~~ ✅ -7. ~~**Plays by Day of Week**: Bar Chart auf Activity-Seite~~ ✅ -8. ~~**Plays by Hour**: Bar Chart auf Activity-Seite~~ ✅ -9. ~~**Genre Distribution**: Backend-API `/activity/genre-stats` + RadarChart (Activity) + BarChart (Dashboard, LibraryDetail, UserDetail)~~ ✅ (Session 9) - -### ~~Priorität 4: Fehlende Phase 2 Features~~ ✅ ERLEDIGT -10. ~~**Activity-Seite erweitern**: IP, Device Spalten, Library-Filter, Items-per-Page, Expand-Row~~ ✅ (Session 4+5+6). -11. ~~**UserDetail erweitern**: Library-Filter auf Activity, Expand-Row, Favorite Genres~~ ✅ (Session 5+6+9). -12. ~~**LibraryDetail erweitern**: ResponsiveTable, Expand-Row, Genre-Charts~~ ✅ (Session 5+6+9). Grid-View offen. -13. ~~**Dashboard Charts**: Daily Plays, Day-of-Week, Hour-of-Day, Genre Distribution recharts Charts~~ ✅ (Session 5+9). - -### ~~Priorität 5: Code-Qualität~~ ✅ ERLEDIGT (Session 3+4) -13. ~~Code-Splitting mit React.lazy~~ ✅ (Session 4) – 13 Seiten lazy-loaded mit Suspense-Fallback -14. ~~BUG-008: fetchUser bei App-Init~~ ✅ -15. ~~BUG-009/010: Code-Duplizierung/Debounce aufräumen~~ ✅ - -### ~~Priorität 6: Security Hardening~~ ✅ ERLEDIGT (Session 7+8) -16. ~~httpOnly Cookie Migration (ADR-001)~~ ✅ -17. ~~CSRF Protection~~ ✅ -18. ~~Security Headers, SSRF Validation, Account Lockout~~ ✅ -19. ~~Admin-Only Routes, Input Sanitization, Sensitive Config Masking~~ ✅ -20. ~~CI/CD: Tests + Security Scanning Workflows~~ ✅ -21. ~~Dependabot Setup~~ ✅ - -### ~~Priorität 7: Phase 4+5~~ ✅ ERLEDIGT (Session 9+10+11) -22. ~~Advanced Analytics~~ ✅ (Session 9+10): Heatmaps, User Timeline, Concurrent Streams, Duration Stats, Completion Rates, Binge Detection, Content Reach -23. ~~Smart Cleanup Rules (Per-User Conditions)~~ ✅ (Session 11): no_user_watched_days, exclude_if_user_favorited, exclude_active_sessions, min_unique_viewers, Cleanup Suggestions API + UI -24. ~~Genre Distribution Charts~~ ✅ (Session 9) -25. ~~User Detail: Favorite Genres~~ ✅ (Session 9), ~~Timeline-Tab~~ ✅ (Session 10) -26. ~~LibraryDetail: Genre-Charts~~ ✅ (Session 9), ~~Grid-View mit Poster-Bildern~~ ✅ (Session 10) - ---- - -## Dateistruktur-Referenz - -### Wichtige Konfigurationsdateien -| Datei | Beschreibung | -|-------|-------------| -| `docker-compose.dev.yml` | Dev-Umgebung (Hot-Reload) | -| `docker-compose.yml` | Production mit SQLite | -| `docker-compose.postgres.yml` | Production mit PostgreSQL | -| `backend/pyproject.toml` | Python Dependencies | -| `frontend/package.json` | Node Dependencies | -| `frontend/vite.config.ts` | Vite Config (Proxy → Backend) | -| `frontend/src/index.css` | Tailwind @theme Definition | -| `docs/adr/001-httponly-cookie-migration.md` | ADR: JWT localStorage → httpOnly Cookies | -| `.github/workflows/tests.yml` | CI: Backend + Frontend Tests | -| `.github/workflows/security-scan.yml` | CI: SAST/DAST Security Scanning | -| `.github/dependabot.yml` | Dependabot Config (target: develop) | - -### API-Endpunkte (Kurzreferenz) -| Prefix | Router | Beschreibung | -|--------|--------|-------------| -| `/api/auth` | auth.py | Login, Register, Sessions | -| `/api/services` | services.py | Service CRUD, Test, Sync | -| `/api/libraries` | libraries.py | Libraries, Stats, Detail, Media | -| `/api/media` | media.py | Dashboard-Stats, Watch-Stats, Image Proxy | -| `/api/rules` | rules.py | Rules CRUD, Templates, Export | -| `/api/activity` | activity.py | Activity Log, Stats, Active Sessions | -| `/api/users` | users.py | Media Server Users, Timeline | -| `/api/notifications` | notifications.py | Channels CRUD, Templates | -| `/api/staging` | staging.py | Staging System | -| `/api/jobs` | jobs.py | Scheduler Jobs | -| `/api/setup` | setup.py | ★ NEU: Setup Wizard Status, Test, Add, Complete, Skip | -| `/api/system` | system.py | Health, Settings, Cleanup, Version | -| `/api/ws/jobs` | main.py | ★ NEU: WebSocket für Real-Time Job-Progress | -| `/api/audit` | audit.py | Audit Logs (Admin) | - ---- - -## Entwicklungsumgebung starten - -```bash -# Repository klonen -git clone https://github.com/Serph91P/MediaCleanup.git -cd MediaCleanup -git checkout develop - -# Dev-Umgebung starten (Hot-Reload für Frontend + Backend) -docker compose -f docker-compose.dev.yml up --build - -# Zugriff: -# Frontend: http://localhost:5173 -# Backend API Docs: http://localhost:8080/docs -# Erster Login erstellt Admin-Account -``` - ---- - -## Änderungshistorie dieses Dokuments - -| Datum | Änderung | -|-------|----------| -| 22.02.2026 | Vollständige Neuaufsetzung: Kompletter Code-Review (Backend + Frontend), Bug-Katalog mit 10 Einträgen, UX-Analyse, Priorisierte Roadmap, Architektur-Dokumentation | -| 22.02.2026 (2) | **Session 2**: WebSocket Real-Time System (ConnectionManager, Progress-Callbacks für Sonarr/Radarr/Emby, Scheduler-Integration), Global Toast-Notifications via WebSocket, Jobs-Page komplett neu geschrieben (Live-Progress-Bars, Running-Panel, WS-Status-Indikator), Layout.tsx Job-Badge, Setup-Wizard (Backend: /setup/status, /test-connection, /add-service, /complete, /skip; Frontend: 5-Step geführter Wizard mit Welcome→Arr→MediaServer→Sync→Complete), App.tsx SetupGate Redirect-Logik | -| 22.02.2026 (3) | **Session 3 – Bugfixes**: Alle 10 Bugs (BUG-001 bis BUG-010) behoben. LibraryDetail.tsx komplett rewritten (API-Pfade, React Query, shared Utils, Light/Dark-Mode). Light-Mode gefixt in Login, Register, ConfirmDialog, Skeleton, Toaster. Locale `de-DE` → `en-US`. fetchUser() bei App-Init. Code-Duplizierung (formatBytes/formatDuration) aufgeräumt. useDebounce Hook in Users.tsx. Branch: `fix/bugfixes-and-ux-improvements`, Commit: `f90a5f5` (10 Dateien, 295 Insertions, 342 Deletions) | -| 22.02.2026 (4) | **Session 4 – UX & Charts**: Layout.tsx deutsche Strings → Englisch (BUG-006 abgeschlossen). ResponsiveTable Light-Mode-Fix (fehlende `dark:` Varianten). Activity.tsx: `useDebounce` statt setTimeout, shared Utils (`formatDurationLong`, `formatWatchTime`), ResponsiveTable-Migration, 3× recharts Charts (Daily Plays Area, Day-of-Week Bar, Hour-of-Day Bar). UserDetail.tsx: shared Utils + ResponsiveTable. Staging.tsx: ResponsiveTable. Jobs.tsx: Executions-Tabelle → ResponsiveTable. App.tsx: React.lazy Code-Splitting (13 Seiten). Branch: `feature/ux-improvements-and-charts` | -| 22.02.2026 (5) | **Session 5 – Phase 2 Enhancements & Docs**: LibraryDetail.tsx Media+Activity Tabs → ResponsiveTable. Activity.tsx: Library-Filter Dropdown + Items-per-Page Selector (10/25/50/100). UserDetail.tsx: Library-Filter auf Activity-Tab. Dashboard.tsx: 3× recharts Charts (Daily Plays Area, Day-of-Week Bar, Hour-of-Day Bar) mit Dashboard-eigenem statsDays-Selector. PLANNED_FEATURES.md: Phase 2/3 Status aktualisiert, Implementation History Tabelle. Branch: `feature/phase2-enhancements-and-docs` | -| 22.02.2026 (6) | **Session 6 – Bugfix & Expand-Rows**: BUG-011 behoben: PlaybackActivity `position_ticks`/`runtime_ticks` Integer→BigInteger (PostgreSQL int32 overflow bei Emby-Ticks >2.1B). DB-Migration hinzugefügt. Fehlerbehandlung in `_sync_active_sessions` und `services.py` mit `db.rollback()`. ResponsiveTable: Expand-Row-Support (`onRowClick`, `isExpanded`, `expandedContent`). Preview.tsx: Beide Tabellen (Series+Movies) auf ResponsiveTable migriert (letzte Seite). Expand-Rows auf Activity, UserDetail, LibraryDetail (IP, Device, Play Method, Progress-Bar). ConfirmDialog: `aria-modal`, Focus-Trap, Escape-Key, Click-Outside. Library Activity API: `ip_address`, `transcode_video`, `transcode_audio` hinzugefügt. BUG-012: Radarr-Pfad Ordner→Datei-Pfad (Movie-Watch-Statistiken). BUG-013: User Last Seen/Watched/Client Fallback-Logik. Branch: `feature/phase2-enhancements-and-docs` | -| 24.02.2026 (7) | **Session 7 – Security Hardening I** (PR #19 `feature/security-hardening`): CORS Lockdown (Wildcard-Warnung in Production). API-Key/Notification-Secret Masking. Password Complexity Enforcement. Security Headers Middleware (X-Frame-Options, X-Content-Type-Options, Referrer-Policy, Permissions-Policy). WebSocket Auth (kurzlebiger Token). Refresh Token Rotation (altes Token revoked). Account Lockout (DB-Migration für `failed_login_attempts`, `locked_until`). Trusted Proxy Config. Rate-Limit Improvements. System Settings Allowlist. Staging Path Validation. Rule Import File Size Limit. `datetime.utcnow()` → `datetime.now(timezone.utc)`. Audit Log Data Retention Job. SECRET_KEY Enforcement in docker-compose. Frontend WebSocket Auth + Refresh Token Rotation Support. Branch: `feature/security-hardening`, Commit: `3058956` (PR #19) | -| 24.02.2026 (8) | **Session 8 – Security Hardening II + httpOnly Cookies** (PRs #20, #21, #22): httpOnly Cookie Auth Migration (ADR-001) – JWT aus localStorage entfernt, Set-Cookie im Backend, `credentials: 'include'` im Frontend, Cookie-Clearing bei Logout. CSRF Double-Submit Cookie Middleware (`csrf.py`). Security Event Logging (`security_events.py` – strukturiertes JSON für Auth/Rate-Limit/CSRF Events). SSRF-Safe URL Validation (`url_validation.py`). `escape_like()` für SQL-Injection-Schutz. Content-Security-Policy Header. WebSocket Connection Limits per IP. Body Size Limit Middleware. Admin-Only auf Rules, Jobs, Staging, Services, Notifications, System-Settings Routes. Outbound URL Validation auf alle Service-Connection/Notification/Setup Endpoints. Enhanced Rate Limiting mit Security-Event-Logging auf allen API-Routes. Refresh Token Cleanup Scheduler-Job. CI/CD: `tests.yml` (Backend+Frontend Tests), `security-scan.yml` (SAST/DAST). Pytest Setup mit Smoke Test. Dependabot Config. npm Dependency Bump. Branch: `feature/security-hardening2`, Commits: `148d0f6` (PR #20), `f0eec84` (PR #21), `657ad70` (PR #22) | -| 26.02.2026 (10) | **Session 10 – Phase 2 Completion + Phase 4 Advanced Analytics**: Phase 2: User Timeline Tab (Backend + Frontend Calendar Heatmap + Session-Gruppierung), Image Proxy (`GET /media/{id}/image`), LibraryDetail Grid View (Table/Grid Toggle mit Poster-Bildern), UserDetail Activity Type-Filter + Suche. Phase 4: Concurrent Streams Analysis, Watch Duration Stats, Completion Rate Analytics, Binge-Watch Detection, Shared vs. Solo Content Analysis. Neue Analytics-Seite mit 5 Tabs. Branches: `feature/phase2-completion`, `feature/phase4-advanced-analytics` | -| 26.02.2026 (11) | **Session 11 – Phase 5 Smart Cleanup Integration**: Per-User Rule Conditions (`no_user_watched_days`, `exclude_if_user_favorited`, `exclude_active_sessions`, `min_unique_viewers`) in RuleConditions Schema + CleanupEngine. Analytics-based Cleanup Suggestions API (`GET /media/cleanup-suggestions`). CleanupSuggestions.tsx Frontend-Seite mit Kategorie-Filter (Unwatched, Abandoned, Low Engagement, Stale, Storage Hog) und Score-basiertem Ranking. Rules.tsx: Smart Cleanup Per-User Section. Branch: `feature/phase5-smart-cleanup` | -| 30.12.2024 | Initiale Version: Session-Zusammenfassung (Rules Export, Sidebar, Theme Toggle, Staging UI) | From 14c0d339697d04705473e06619d1dee4e86ccdd6 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:14:52 +0100 Subject: [PATCH 100/122] feat: remove Planned Features document as it is no longer needed --- PLANNED_FEATURES.md | 432 -------------------------------------------- 1 file changed, 432 deletions(-) delete mode 100644 PLANNED_FEATURES.md diff --git a/PLANNED_FEATURES.md b/PLANNED_FEATURES.md deleted file mode 100644 index 0993673..0000000 --- a/PLANNED_FEATURES.md +++ /dev/null @@ -1,432 +0,0 @@ -# Planned Features - MediaCurator - -This document describes the planned statistics and analytics features for MediaCurator. These features will provide deep insights into media consumption patterns and enable smarter cleanup decisions. - ---- - -## Table of Contents - -1. [Library Detail View](#library-detail-view) -2. [Users Page](#users-page) -3. [User Detail View](#user-detail-view) -4. [Global Activity Log](#global-activity-log) -5. [Statistics Dashboard](#statistics-dashboard) -6. [Advanced Analytics](#advanced-analytics) -7. [Smart Cleanup Rules](#smart-cleanup-rules) - ---- - -## Library Detail View - -Click on any library to see detailed statistics and content. - -### Overview Tab -- **Time-based Stats**: Play counts and duration for Last 24 Hours, Last 7 Days, Last 30 Days, and All Time -- **Genre Distribution**: Radar charts showing which genres are watched most (by duration and by play count) -- **Recently Added**: Carousel/grid of recently added content with poster images -- **Last Watched**: Recently watched content in this library with user attribution - -### Media Tab -- **Content Browser**: Grid view of all media items with poster images ✅ Grid View with Poster Proxy (Session 10) -- **File Size Display**: Show file size on each item (e.g., "16.24 GB") ✅ -- **Sorting Options**: Sort by title, date added, size, play count ✅ -- **Filtering**: Filter by genre, year, rating, watched status - -### Activity Tab -- **Library Activity Log**: All playback sessions for this library -- **Columns**: User, IP Address, Title, Client, Transcode status, Device, Date, Total Playback -- **Filtering**: Filter by type (movie/episode), search by title -- **Pagination**: Handle large activity logs efficiently - ---- - -## Users Page - -Overview of all Emby/Jellyfin users synced to MediaCurator. - -### User List -| Column | Description | -|--------|-------------| -| User | Username with avatar | -| Tracked | Whether user is being tracked (checkmark) | -| Last Watched | Title of the last watched content | -| Last Client | Client app and device used (e.g., "Emby for iOS - iPhone") | -| Plays | Total play count across all time | -| Watch Time | Total watch duration (e.g., "9 Hours 33 Minutes") | -| Last Seen | When the user was last active (e.g., "8 Days 20 Hour 58 Minutes ago") | - -### Features -- **Search**: Search users by name -- **Pagination**: Handle many users -- **Click to Detail**: Click on any user to see their detail view - ---- - -## User Detail View - -Detailed statistics and activity for a single user. - -### Overview Tab -- **User Stats Box**: Play counts and watch time for Last 24 Hours, Last 7 Days, Last 30 Days, All Time -- **Last Watched**: Grid of recently watched content by this user -- **Favorite Genres**: What genres this user prefers (derived from watch history) ✅ BarChart (Session 9) - -### Activity Tab -- **Item Activity**: Complete playback history for this user -- **Columns**: IP Address, Title, Client, Transcode, Device, Date, Total Playback -- **Library Filter**: Filter activity by library ✅ -- **Type Filter**: Filter by movie/series ✅ (Session 10) -- **Search**: Search within activity ✅ (Session 10) - -### Timeline Tab ✅ Implemented (Session 10) -- **Visual Timeline**: Chronological view of user's watch activity ✅ Calendar Heatmap (90 days) -- **Library Filter**: Show only specific libraries ✅ -- **Date Range**: Filter by date range -- **Session Grouping**: Group consecutive plays into sessions ✅ 30-min gap detection - ---- - -## Global Activity Log - -Server-wide activity feed showing all playback across all users. - -### Activity Table -| Column | Description | -|--------|-------------| -| User | Which user is watching | -| IP Address | Client IP address | -| Title | Movie title or "Series : S01E05 - Episode Title" | -| Client | App name (e.g., "Emby for Samsung", "Emby Web") | -| Transcode | Direct Stream, Transcode (Video), Transcode (Audio), or both | -| Device | Device name (e.g., "Samsung Smart TV", "Firefox", "iPhone") | -| Date | Timestamp of playback start | -| Total Playback | Duration of the playback session | - -### Filters -- **Libraries**: Filter by specific library -- **Type**: All, Movies, Series, Music, etc. -- **Items per page**: 10, 25, 50, 100 -- **Search**: Search by title, user, or device - -### Expand Row ✅ Implemented (Session 6) -- Click to expand a row for more details: - - ~~Full file path~~ - - ~~Bitrate information~~ - - ~~Resolution~~ - - ~~Audio/video codecs~~ - - ~~Subtitle information~~ - - IP Address, Device, Play Method (color-coded), Progress Bar - - Implemented on: Activity, UserDetail, LibraryDetail, Preview (season breakdown) - ---- - -## Statistics Dashboard - -Visual charts and graphs for understanding viewing patterns. - -### Daily Play Count Chart -- **Stacked Area Chart**: Shows daily play counts over configurable time range -- **Per-Library Breakdown**: Different color for each library -- **Toggle**: Switch between Count (plays) and Duration (watch time) -- **Time Range**: Last 7, 14, 20, 30, 60, 90 days - -### Play Count By Day of Week -- **Bar Chart**: Which days of the week are most active -- **Per-Library Breakdown**: Stacked by library -- **Shows**: Sunday through Saturday activity patterns - -### Play Count By Hour -- **Bar Chart**: Which hours of the day are most active -- **24-Hour View**: 00:00 to 23:00 -- **Per-Library Breakdown**: See what's watched when -- **Peak Identification**: Easily spot prime-time hours - -### Genre Distribution ✅ Implemented (Session 9) -- **Radar/Spider Chart**: Visual representation of genre preferences ✅ RadarChart on Activity page -- **Two Views**: By Duration (total watch time) and By Play Count ✅ Radar (plays) + Bar (watch time hours) -- **Library-Specific**: Can be global or per-library ✅ Filterable by library_id and user_id - ---- - -## Advanced Analytics - -Deeper insights derived from watch data. - -### User Activity Timeline ✅ Implemented (Session 10) -**Purpose**: Visualize when each user watches content over time. - -- Calendar heatmap showing activity intensity ✅ 90-day GitHub-style heatmap -- Hour-by-day grid showing viewing patterns -- Identify each user's "watch schedule" ✅ Session grouping with 30-min gap - -### Watch Patterns / Heatmap ✅ Implemented (Session 9) -**Purpose**: Understand peak viewing times across all users. - -- 7x24 grid (days x hours) with color intensity ✅ CSS Grid on Activity page -- Identify server load patterns ✅ -- Plan maintenance windows during low-activity periods ✅ -- See if weekends differ from weekdays ✅ - -### Concurrent Streams Analysis ✅ Implemented (Session 10) -**Purpose**: Track how many simultaneous streams occur. - -- Historical peak concurrent users ✅ Daily peak chart + overall peak -- Time of day with most concurrent streams ✅ Hourly average bar chart -- Useful for: - - Server capacity planning ✅ - - Transcode load understanding ✅ - - License/limit management ✅ - -### Watch Duration Stats ✅ Implemented (Session 10) -**Purpose**: Understand typical viewing sessions. - -- Average session length (overall and per library) ✅ -- Distribution chart of session lengths ✅ Bucketed bar chart -- Compare movies vs. series session lengths ✅ By-type cards -- Identify "quick check" vs. "full watch" patterns ✅ - -### Completion Rate Analytics ✅ Implemented (Session 10) -**Purpose**: Track how often content is finished. - -- Percentage of movies watched to completion (>90%) ✅ Pie chart -- Series episode completion rates ✅ By-type stacked bars -- Identify content that users abandon early ✅ Most abandoned list -- Useful for cleanup decisions: "Content rarely finished" ✅ - -### Binge-Watch Detection ✅ Implemented (Session 10) -**Purpose**: Identify when users watch multiple episodes consecutively. - -- Detect 3+ episodes of same series in one session ✅ 4hr gap detection -- Track which series trigger binge behavior ✅ Top binged series -- User-specific binge patterns ✅ Top bingers leaderboard -- Useful for: - - Understanding engagement ✅ - - Cleanup rules: "Series being binged should not be deleted" ✅ - -### Shared vs. Solo Content ✅ Implemented (Session 10) -**Purpose**: Identify content watched by multiple users vs. single users. - -- **Shared Content**: Watched by 2+ unique users ✅ -- **Solo Content**: Only watched by 1 user ✅ -- **Unwatched**: Not watched by anyone ✅ -- Useful for cleanup: ✅ - - Keep shared content longer ✅ - - Solo content can be cleaned up sooner ✅ - - Unwatched content is priority for cleanup ✅ - ---- - -## Smart Cleanup Rules - -New rule conditions enabled by user tracking. - -### Cleanup Rules per User -**Condition**: "Delete only if NO user has watched in X days" - -Instead of relying on global "last watched" from Emby, check each user's watch history individually. - -**Use Cases**: -- Family server: Keep content if any family member watched recently -- Multi-user: Don't delete shared favorites -- Fair cleanup: Account for all users, not just the last one - -**Example Rule**: -``` -Delete movie if: - - No user has watched in 90 days - - AND file size > 10 GB - - AND rating < 7.0 -``` - -### User-Specific Exclusions -**Condition**: "Never delete if User X has it as favorite" - -**Use Cases**: -- Protect content for specific users (e.g., kids' content) -- VIP users whose favorites are always kept -- Admin override for important content - -### Currently Watching Detection (Enhanced) -**Condition**: "Never delete if any user is currently in the middle of watching" - -**Improvements**: -- Track per-user watch progress -- Detect active binge sessions -- Protect entire series if someone is actively watching - ---- - -## Implementation Priority - -### Phase 1 - Foundation ✅ -- [x] MediaServerUser model -- [x] UserWatchHistory model -- [x] Basic user stats on Dashboard -- [x] Library stats API -- [x] PlaybackActivity model - -### Phase 2 - Views & Navigation ✅ -- [x] Library Detail View (Overview, Media, Activity tabs) -- [x] Users Page (list with stats) -- [x] User Detail View (Overview, Activity, Timeline tabs) -- [x] Global Activity Log -- [x] Activity stats API (plays by day/hour/week) -- [x] Active sessions tracking -- [x] Library filter on Activity page -- [x] Library filter on User Detail activity tab -- [x] Items-per-page selector on Activity page -- [x] ResponsiveTable on all pages (History, Users, Activity, UserDetail, Staging, Jobs, LibraryDetail, Preview) -- [x] Expand-Row on Activity, UserDetail, LibraryDetail (IP, Device, Play Method, Progress) -- [x] ConfirmDialog accessibility (aria-modal, focus trap, escape key) -- [x] User Timeline Tab (calendar heatmap + session grouping) (Session 10) -- [x] Image Proxy endpoint (poster images from Emby/Jellyfin) (Session 10) -- [x] LibraryDetail Grid View with poster images (Session 10) -- [x] UserDetail Activity type filter + search (Session 10) - -### Phase 3 - Statistics & Charts ✅ Complete (Session 4+9) -- [x] Daily Play Count Chart (Activity page + Dashboard) -- [x] Play Count by Day of Week Chart (Activity page + Dashboard) -- [x] Play Count by Hour Chart (Activity page + Dashboard) -- [x] Genre Distribution Charts – Backend `/activity/genre-stats` + RadarChart (Activity) + BarChart (Dashboard, LibraryDetail, UserDetail) (Session 9) -- [x] Watch Patterns Heatmap – Backend `/activity/watch-heatmap` + CSS Grid 7×24 (Activity) (Session 9) - -### Phase 4 - Advanced Analytics ✅ Complete (Session 9+10) -- [x] Concurrent Streams Analysis – Backend `/activity/concurrent-streams` + Frontend Analytics page (Session 10) -- [x] Watch Duration Stats – Backend `/activity/duration-stats` + Frontend Analytics page (Session 10) -- [x] Completion Rate Analytics – Backend `/activity/completion-rates` + Frontend Analytics page (Session 10) -- [x] Binge-Watch Detection – Backend `/activity/binge-stats` + Frontend Analytics page (Session 10) -- [x] Shared vs. Solo Content – Backend `/media/content-reach` + Frontend Analytics page (Session 10) - -### Phase 5 - Smart Cleanup Integration ✅ Complete (Session 11) -- [x] Cleanup Rules per User condition – `no_user_watched_days` in RuleConditions (Session 11) -- [x] User-Specific Exclusions – `exclude_if_user_favorited` + `min_unique_viewers` in RuleConditions (Session 11) -- [x] Enhanced Currently Watching detection – `exclude_active_sessions` checks PlaybackActivity.is_active (Session 11) -- [x] Analytics-based cleanup suggestions – `/media/cleanup-suggestions` endpoint + CleanupSuggestions page (Session 11) - -### Security Hardening ✅ Complete (Session 7+8) -- [x] httpOnly Cookie Auth (JWT aus localStorage entfernt, ADR-001) -- [x] CSRF Double-Submit Cookie Protection -- [x] Security Headers Middleware (CSP, X-Frame-Options, X-Content-Type-Options) -- [x] Structured Security Event Logging -- [x] SSRF-safe URL Validation (outbound requests) -- [x] Account Lockout Mechanism -- [x] Refresh Token Rotation -- [x] WebSocket Authentication (short-lived token) -- [x] CORS Lockdown (wildcard warning in production) -- [x] Admin-Only Routes (Rules, Jobs, Staging, Services, Notifications, System) -- [x] Input Sanitization (escape_like for SQL queries) -- [x] Outbound URL Validation (Services, Notifications, Setup) -- [x] Staging Path Validation -- [x] Request Body Size Limit -- [x] WebSocket Connection Limits per IP -- [x] Sensitive Config Masking (API keys, secrets) -- [x] Password Complexity Enforcement -- [x] Secret Key Enforcement -- [x] Audit Log Data Retention Job -- [x] Refresh Token Cleanup Job -- [x] Trusted Proxy Configuration -- [x] CI/CD: Tests + Security Scanning Workflows -- [x] Pytest Setup with Smoke Tests -- [x] Dependabot Configuration -- [x] datetime.utcnow() → timezone-aware datetime.now(timezone.utc) - ---- - -## Data Requirements - -To enable these features, the following data needs to be tracked: - -### From Emby/Jellyfin Sync -- User list with IDs and names -- Per-user watch history (what, when, how long) -- Playback sessions (client, device, IP, transcode info) -- Current playback position (for in-progress detection) - -### Derived/Calculated -- Total watch time per user -- Total watch time per library -- Genre preferences per user -- Session detection (grouping consecutive plays) -- Concurrent stream peaks - -### Storage Considerations -- ~~Activity logs can grow large - implement retention policies~~ ✅ Implemented (Session 7: Audit Log Data Retention Job) -- Consider aggregating old data (daily summaries instead of individual events) -- Provide cleanup options for activity history - ---- - ---- - -## Implementation History - -| Phase | Feature | Session | Status | -|-------|---------|---------|--------| -| 1 | All Foundation models & APIs | Session 1 | ✅ | -| 2 | Library Detail, Users, Activity pages | Session 1-2 | ✅ | -| 2 | WebSocket Real-Time System | Session 2 | ✅ | -| 2 | Setup Wizard | Session 2 | ✅ | -| 2 | Bug fixes (Light-mode, API paths, etc.) | Session 3 | ✅ | -| 2 | ResponsiveTable migrations | Session 4 | ✅ | -| 3 | recharts charts on Activity page | Session 4 | ✅ | -| 2 | Library filter on Activity & UserDetail | Session 5 | ✅ | -| 2 | Items-per-page on Activity | Session 5 | ✅ | -| 2 | LibraryDetail ResponsiveTable migration | Session 5 | ✅ | -| 3 | Dashboard recharts charts | Session 5 | ✅ | -| 4 | Code-splitting with React.lazy | Session 4 | ✅ | -| 2 | Preview.tsx ResponsiveTable migration | Session 6 | ✅ | -| 2 | Expand-Row (Activity, UserDetail, LibraryDetail) | Session 6 | ✅ | -| 2 | ConfirmDialog accessibility (focus trap, aria-modal) | Session 6 | ✅ | -| - | BUG-011: position_ticks int32→BigInteger | Session 6 | ✅ | -| - | BUG-012: Radarr folder→file path (movie stats) | Session 6 | ✅ | -| - | BUG-013: User Last Seen/Watched/Client fallback | Session 6 | ✅ | -| Sec | CORS Lockdown, Secret Key Enforcement, API Key Masking | Session 7 | ✅ | -| Sec | Security Headers Middleware (CSP, X-Frame-Options) | Session 7 | ✅ | -| Sec | Account Lockout + Refresh Token Rotation | Session 7 | ✅ | -| Sec | WebSocket Auth Token + Frontend Auth Rotation | Session 7 | ✅ | -| Sec | Trusted Proxy Config + Rate Limit Improvements | Session 7 | ✅ | -| Sec | Staging Path Validation + Rule Import Size Limit | Session 7 | ✅ | -| Sec | Audit Log Data Retention Job | Session 7 | ✅ | -| Sec | datetime.utcnow() → timezone-aware | Session 7 | ✅ | -| Sec | httpOnly Cookie Auth Migration (ADR-001) | Session 8 | ✅ | -| Sec | CSRF Double-Submit Cookie Middleware | Session 8 | ✅ | -| Sec | Security Event Logging (structured JSON) | Session 8 | ✅ | -| Sec | SSRF-safe URL Validation | Session 8 | ✅ | -| Sec | SQL Injection Protection (escape_like) | Session 8 | ✅ | -| Sec | Content-Security-Policy Header | Session 8 | ✅ | -| Sec | WebSocket Connection Limits per IP | Session 8 | ✅ | -| Sec | Admin-Only Routes (all sensitive endpoints) | Session 8 | ✅ | -| Sec | Outbound URL Validation (SSRF) on all endpoints | Session 8 | ✅ | -| Sec | Body Size Limit + Rate Limiting on all Routes | Session 8 | ✅ | -| Sec | Refresh Token Cleanup Job | Session 8 | ✅ | -| Sec | Password Complexity + Input Validation | Session 8 | ✅ | -| CI | GitHub Actions: tests.yml + security-scan.yml | Session 8 | ✅ | -| CI | Pytest Setup + Smoke Test | Session 8 | ✅ | -| CI | Dependabot Configuration | Session 8 | ✅ | -| 3 | Genre Distribution API (`/activity/genre-stats`) | Session 9 | ✅ | -| 4 | Watch Patterns Heatmap API (`/activity/watch-heatmap`) | Session 9 | ✅ | -| 3 | Genre RadarChart on Activity page | Session 9 | ✅ | -| 4 | Watch Heatmap (7×24 CSS Grid) on Activity page | Session 9 | ✅ | -| 3 | Genre Charts on LibraryDetail (Radar + Bar) | Session 9 | ✅ | -| 2 | Favorite Genres BarChart on UserDetail | Session 9 | ✅ | -| 3 | Genre Distribution BarChart on Dashboard | Session 9 | ✅ | -| 2 | User Timeline API (`/users/{id}/timeline`) | Session 10 | ✅ | -| 2 | Image Proxy API (`/media/{id}/image`) | Session 10 | ✅ | -| 2 | User Timeline Tab (Calendar Heatmap + Sessions) | Session 10 | ✅ | -| 2 | LibraryDetail Grid View with Poster Images | Session 10 | ✅ | -| 2 | UserDetail Activity Type Filter + Search | Session 10 | ✅ | -| 4 | Concurrent Streams API (`/activity/concurrent-streams`) | Session 10 | ✅ | -| 4 | Duration Stats API (`/activity/duration-stats`) | Session 10 | ✅ | -| 4 | Completion Rates API (`/activity/completion-rates`) | Session 10 | ✅ | -| 4 | Binge Stats API (`/activity/binge-stats`) | Session 10 | ✅ | -| 4 | Content Reach API (`/media/content-reach`) | Session 10 | ✅ | -| 4 | Analytics Page (5-tab: Concurrent, Duration, Completion, Binge, Reach) | Session 10 | ✅ | -| 5 | Per-User Rule Conditions (no_user_watched_days, exclude_if_user_favorited) | Session 11 | ✅ | -| 5 | Active Session Protection (exclude_active_sessions) | Session 11 | ✅ | -| 5 | Min Unique Viewers Rule Condition | Session 11 | ✅ | -| 5 | Cleanup Suggestions API (`/media/cleanup-suggestions`) | Session 11 | ✅ | -| 5 | Cleanup Suggestions Page (CleanupSuggestions.tsx) | Session 11 | ✅ | - ---- - -*This document will be updated as features are implemented and new ideas emerge.* From 16c5b64a9610a25096f4b2390e3ece85a4ecb47d Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:34:32 +0100 Subject: [PATCH 101/122] feat: add endpoints for flagging and staging cleanup suggestions --- backend/app/api/routes/media.py | 264 +++++++++++++++++++++++++++++++- 1 file changed, 263 insertions(+), 1 deletion(-) diff --git a/backend/app/api/routes/media.py b/backend/app/api/routes/media.py index cf7d734..f2c54bb 100644 --- a/backend/app/api/routes/media.py +++ b/backend/app/api/routes/media.py @@ -9,13 +9,15 @@ from typing import Dict, Any, Optional, List from datetime import datetime, timedelta, timezone import httpx +from pydantic import BaseModel, Field, field_validator from loguru import logger from ...core.database import get_db from ...core.rate_limit import limiter, RateLimits -from ...models import MediaItem, ServiceConnection, ServiceType, MediaType, ImportStats, CleanupLog, MediaServerUser, UserWatchHistory, Library +from ...models import MediaItem, ServiceConnection, ServiceType, MediaType, ImportStats, CleanupLog, MediaServerUser, UserWatchHistory, Library, AuditActionType from ...schemas import CleanupLogResponse from ..deps import get_current_user +from ...services.audit import AuditService router = APIRouter(prefix="/media", tags=["Media"]) @@ -1194,3 +1196,263 @@ async def get_media_image( logger.warning(f"Failed to fetch image for media {media_id}: {e}") raise HTTPException(status_code=502, detail="Could not reach media server") + +# --- Actionable Cleanup Suggestions --- + +# Hard limits to prevent abuse +_MAX_SUGGESTION_BATCH_SIZE = 50 +_MAX_GRACE_PERIOD_DAYS = 365 +_MIN_GRACE_PERIOD_DAYS = 1 + + +class FlagSuggestionsRequest(BaseModel): + """Request schema for flagging cleanup suggestions.""" + media_ids: List[int] = Field( + ..., + min_length=1, + max_length=_MAX_SUGGESTION_BATCH_SIZE, + description="List of media item IDs to flag for cleanup" + ) + grace_period_days: int = Field( + default=7, + ge=_MIN_GRACE_PERIOD_DAYS, + le=_MAX_GRACE_PERIOD_DAYS, + description="Grace period in days before scheduled cleanup" + ) + + @field_validator('media_ids') + @classmethod + def validate_unique_ids(cls, v: List[int]) -> List[int]: + if len(v) != len(set(v)): + raise ValueError("Duplicate media IDs are not allowed") + return v + + +class StageSuggestionsRequest(BaseModel): + """Request schema for staging cleanup suggestions.""" + media_ids: List[int] = Field( + ..., + min_length=1, + max_length=_MAX_SUGGESTION_BATCH_SIZE, + description="List of media item IDs to move to staging" + ) + + @field_validator('media_ids') + @classmethod + def validate_unique_ids(cls, v: List[int]) -> List[int]: + if len(v) != len(set(v)): + raise ValueError("Duplicate media IDs are not allowed") + return v + + +class SuggestionActionResponse(BaseModel): + """Response schema for cleanup suggestion actions.""" + success: bool + message: str + flagged: int = 0 + staged: int = 0 + skipped: int = 0 + failed: int = 0 + total_size_bytes: float = 0 + errors: List[str] = [] + + +@router.post("/cleanup-suggestions/flag", response_model=SuggestionActionResponse) +@limiter.limit(RateLimits.CLEANUP_OPERATION) +async def flag_cleanup_suggestions( + request: Request, + body: FlagSuggestionsRequest, + db: AsyncSession = Depends(get_db), + current_user=Depends(get_current_user), +): + """ + Flag suggested media items for cleanup with a grace period. + + Sets flagged_for_cleanup = true and schedules cleanup after the grace period. + Items flagged manually (not by a rule) have flagged_by_rule_id = null so they + can be distinguished from rule-driven flags. + """ + now = datetime.now(timezone.utc) + scheduled_at = now + timedelta(days=body.grace_period_days) + + # Fetch all requested items in one query — avoid N+1 + result = await db.execute( + select(MediaItem).where(MediaItem.id.in_(body.media_ids)) + ) + items_by_id = {item.id: item for item in result.scalars().all()} + + flagged_count = 0 + skipped_count = 0 + failed_count = 0 + total_size = 0.0 + errors: List[str] = [] + flagged_titles: List[str] = [] + + for media_id in body.media_ids: + item = items_by_id.get(media_id) + if not item: + failed_count += 1 + errors.append(f"Media item {media_id} not found") + continue + + # Skip items already flagged or staged — don't silently overwrite rule flags + if item.flagged_for_cleanup: + skipped_count += 1 + errors.append(f"{item.title}: already flagged for cleanup") + continue + if item.is_staged: + skipped_count += 1 + errors.append(f"{item.title}: already staged") + continue + + item.flagged_for_cleanup = True + item.flagged_at = now + item.flagged_by_rule_id = None # Manual flag, not rule-driven + item.scheduled_cleanup_at = scheduled_at + flagged_count += 1 + total_size += item.size_bytes or 0 + flagged_titles.append(item.title) + + if flagged_count > 0: + await db.commit() + + # Audit log the bulk action + await AuditService.log( + db=db, + action=AuditActionType.SUGGESTION_FLAGGED, + request=request, + user=current_user, + resource_type="media", + details={ + "media_ids": [mid for mid in body.media_ids if mid in items_by_id and items_by_id[mid].flagged_for_cleanup], + "titles": flagged_titles, + "grace_period_days": body.grace_period_days, + "scheduled_cleanup_at": scheduled_at.isoformat(), + "count": flagged_count, + "total_size_bytes": total_size, + }, + ) + + # Create cleanup log entries for each flagged item + for media_id in body.media_ids: + item = items_by_id.get(media_id) + if item and item.flagged_for_cleanup and item.flagged_at == now: + log_entry = CleanupLog( + media_item_id=item.id, + rule_id=None, + action="flagged_from_suggestion", + status="success", + details={ + "grace_period_days": body.grace_period_days, + "scheduled_cleanup_at": scheduled_at.isoformat(), + "flagged_by_user": current_user.username, + }, + media_title=item.title, + media_path=item.path, + media_size_bytes=item.size_bytes, + ) + db.add(log_entry) + await db.commit() + + logger.info( + f"User {current_user.username} flagged {flagged_count} items from suggestions " + f"(skipped={skipped_count}, failed={failed_count}, grace={body.grace_period_days}d)" + ) + + return SuggestionActionResponse( + success=failed_count == 0 and flagged_count > 0, + message=f"Flagged {flagged_count} item(s) for cleanup (grace period: {body.grace_period_days} days)", + flagged=flagged_count, + skipped=skipped_count, + failed=failed_count, + total_size_bytes=total_size, + errors=errors, + ) + + +@router.post("/cleanup-suggestions/stage", response_model=SuggestionActionResponse) +@limiter.limit(RateLimits.CLEANUP_OPERATION) +async def stage_cleanup_suggestions( + request: Request, + body: StageSuggestionsRequest, + db: AsyncSession = Depends(get_db), + current_user=Depends(get_current_user), +): + """ + Move suggested media items directly to staging. + + Bypasses rules entirely — immediately relocates files to the staging + directory and starts the grace period countdown. Requires staging to be + enabled for each item's library (or globally). + """ + from ...services.staging import StagingService + from ...services.emby import EmbyService + + staging_service = StagingService(db) + emby_service = EmbyService(db) + + # Fetch all requested items in one query + result = await db.execute( + select(MediaItem).where(MediaItem.id.in_(body.media_ids)) + ) + items_by_id = {item.id: item for item in result.scalars().all()} + + staged_count = 0 + skipped_count = 0 + failed_count = 0 + total_size = 0.0 + errors: List[str] = [] + staged_titles: List[str] = [] + + for media_id in body.media_ids: + item = items_by_id.get(media_id) + if not item: + failed_count += 1 + errors.append(f"Media item {media_id} not found") + continue + + # Skip already-staged items + if item.is_staged: + skipped_count += 1 + errors.append(f"{item.title}: already staged") + continue + + stage_result = await staging_service.move_to_staging(item, emby_service) + if stage_result["success"]: + staged_count += 1 + total_size += item.size_bytes or 0 + staged_titles.append(item.title) + else: + failed_count += 1 + errors.append(f"{item.title}: {stage_result.get('error', 'Unknown error')}") + + if staged_count > 0: + # Audit log the bulk action + await AuditService.log( + db=db, + action=AuditActionType.SUGGESTION_STAGED, + request=request, + user=current_user, + resource_type="media", + details={ + "media_ids": [mid for mid in body.media_ids if mid in items_by_id and getattr(items_by_id[mid], 'is_staged', False)], + "titles": staged_titles, + "count": staged_count, + "total_size_bytes": total_size, + }, + ) + + logger.info( + f"User {current_user.username} staged {staged_count} items from suggestions " + f"(skipped={skipped_count}, failed={failed_count})" + ) + + return SuggestionActionResponse( + success=failed_count == 0 and staged_count > 0, + message=f"Staged {staged_count} item(s) for cleanup", + staged=staged_count, + skipped=skipped_count, + failed=failed_count, + total_size_bytes=total_size, + errors=errors, + ) From 69d5a525f5f68250389290dfdb82a0cbdf50994a Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:34:37 +0100 Subject: [PATCH 102/122] feat: add suggestion_flagged and suggestion_staged actions to AuditActionType --- backend/app/models/database.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/app/models/database.py b/backend/app/models/database.py index c1f993f..925f065 100644 --- a/backend/app/models/database.py +++ b/backend/app/models/database.py @@ -115,6 +115,8 @@ class AuditActionType(str, Enum): # Additional actions MEDIA_FLAGGED = "media_flagged" + SUGGESTION_FLAGGED = "suggestion_flagged" + SUGGESTION_STAGED = "suggestion_staged" SYNC_COMPLETED = "sync_completed" ERROR = "error" TEST = "test" From b8d95d6286fad0b6914c5a4ae828d192cfac8f41 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:34:42 +0100 Subject: [PATCH 103/122] feat: implement flagging and staging actions for cleanup suggestions with confirmation dialogs --- frontend/src/pages/CleanupSuggestions.tsx | 420 +++++++++++++++++----- 1 file changed, 340 insertions(+), 80 deletions(-) diff --git a/frontend/src/pages/CleanupSuggestions.tsx b/frontend/src/pages/CleanupSuggestions.tsx index d5ca0e0..86a5506 100644 --- a/frontend/src/pages/CleanupSuggestions.tsx +++ b/frontend/src/pages/CleanupSuggestions.tsx @@ -1,8 +1,7 @@ -import { useState } from 'react' -import { useQuery } from '@tanstack/react-query' +import { useState, useMemo, useCallback } from 'react' +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' import { LightBulbIcon, - TrashIcon, FilmIcon, TvIcon, ArrowPathIcon, @@ -11,9 +10,13 @@ import { UsersIcon, ClockIcon, ServerIcon, + FlagIcon, + ArchiveBoxIcon, + CheckIcon, } from '@heroicons/react/24/outline' import api from '../lib/api' import { formatBytes } from '../lib/utils' +import ConfirmDialog from '../components/ConfirmDialog' interface CleanupSuggestion { item_id: number @@ -47,6 +50,25 @@ interface SuggestionsResponse { summary: SuggestionSummary } +interface SuggestionActionResponse { + success: boolean + message: string + flagged: number + staged: number + skipped: number + failed: number + total_size_bytes: number + errors: string[] +} + +interface StagingSettings { + enabled: boolean + staging_path: string + grace_period_days: number + library_name: string + auto_restore_on_watch: boolean +} + const categoryInfo: Record = { unwatched: { label: 'Unwatched', @@ -80,9 +102,23 @@ const categoryInfo: Record(null) + const [selectedIds, setSelectedIds] = useState>(new Set()) + const [gracePeriodDays, setGracePeriodDays] = useState(7) + + // Confirmation dialog state + const [confirmAction, setConfirmAction] = useState<'flag' | 'stage' | null>(null) const { data, isLoading, isFetching } = useQuery({ queryKey: ['cleanup-suggestions', days], @@ -92,23 +128,118 @@ export default function CleanupSuggestions() { }, }) + // Fetch staging settings to know if staging is enabled + const { data: stagingSettings } = useQuery({ + queryKey: ['staging-settings'], + queryFn: async () => { + const res = await api.get('/staging/settings') + return res.data + }, + }) + + const stagingEnabled = stagingSettings?.enabled ?? false + const suggestions = data?.suggestions || [] const summary = data?.summary // Filter by category - const filtered = filterCategory - ? suggestions.filter(s => { - const reasonText = s.reasons.join(' ') - switch (filterCategory) { - case 'unwatched': return reasonText.includes('Never watched') - case 'abandoned': return reasonText.includes('Abandoned') - case 'low_engagement': return reasonText.includes('Low engagement') - case 'stale': return reasonText.includes('not revisited') - case 'storage_hog': return reasonText.includes('Large file') - default: return true - } + const filtered = useMemo(() => { + if (!filterCategory) return suggestions + return suggestions.filter(s => { + const reasonText = s.reasons.join(' ') + switch (filterCategory) { + case 'unwatched': return reasonText.includes('Never watched') + case 'abandoned': return reasonText.includes('Abandoned') + case 'low_engagement': return reasonText.includes('Low engagement') + case 'stale': return reasonText.includes('not revisited') + case 'storage_hog': return reasonText.includes('Large file') + default: return true + } + }) + }, [suggestions, filterCategory]) + + // Selection helpers + const toggleSelect = useCallback((id: number) => { + setSelectedIds(prev => { + const next = new Set(prev) + if (next.has(id)) { + next.delete(id) + } else { + next.add(id) + } + return next + }) + }, []) + + const selectAllFiltered = useCallback(() => { + setSelectedIds(prev => { + const allSelected = filtered.every(s => prev.has(s.item_id)) + if (allSelected) { + // Deselect all filtered + const next = new Set(prev) + filtered.forEach(s => next.delete(s.item_id)) + return next + } + // Select all filtered + const next = new Set(prev) + filtered.forEach(s => next.add(s.item_id)) + return next + }) + }, [filtered]) + + const selectedSuggestions = useMemo( + () => suggestions.filter(s => selectedIds.has(s.item_id)), + [suggestions, selectedIds] + ) + + const selectedTotalSize = useMemo( + () => selectedSuggestions.reduce((sum, s) => sum + s.size_bytes, 0), + [selectedSuggestions] + ) + + const allFilteredSelected = filtered.length > 0 && filtered.every(s => selectedIds.has(s.item_id)) + + // Flag mutation + const flagMutation = useMutation({ + mutationFn: async () => { + const ids = [...selectedIds] + const res = await api.post('/media/cleanup-suggestions/flag', { + media_ids: ids, + grace_period_days: gracePeriodDays, }) - : suggestions + return res.data + }, + onSuccess: () => { + setSelectedIds(new Set()) + setConfirmAction(null) + queryClient.invalidateQueries({ queryKey: ['cleanup-suggestions'] }) + }, + onError: () => { + setConfirmAction(null) + }, + }) + + // Stage mutation + const stageMutation = useMutation({ + mutationFn: async () => { + const ids = [...selectedIds] + const res = await api.post('/media/cleanup-suggestions/stage', { + media_ids: ids, + }) + return res.data + }, + onSuccess: () => { + setSelectedIds(new Set()) + setConfirmAction(null) + queryClient.invalidateQueries({ queryKey: ['cleanup-suggestions'] }) + queryClient.invalidateQueries({ queryKey: ['staging'] }) + }, + onError: () => { + setConfirmAction(null) + }, + }) + + const isActioning = flagMutation.isPending || stageMutation.isPending const dayOptions = [ { value: 30, label: '30 days' }, @@ -201,6 +332,60 @@ export default function CleanupSuggestions() {
)} + {/* Action Bar — visible when items are selected */} + {selectedIds.size > 0 && ( +
+
+
+ +
+
+ {selectedIds.size} item{selectedIds.size !== 1 ? 's' : ''} selected + ({formatBytes(selectedTotalSize)}) +
+ +
+
+ {/* Grace period selector for flagging */} +
+ + +
+ + {stagingEnabled && ( + + )} +
+
+ )} + {/* Loading */} {(isLoading || isFetching) && (
@@ -218,79 +403,129 @@ export default function CleanupSuggestions() {
)} + {/* Select All bar */} + {!isLoading && filtered.length > 0 && ( +
+ +
+ )} + {/* Suggestions List */} {!isLoading && filtered.length > 0 && (
- {filtered.map((suggestion) => ( -
-
-
-
- {suggestion.media_type === 'movie' ? ( - - ) : ( - - )} -
-
-

- {suggestion.title} -

-
- {formatBytes(suggestion.size_bytes)} - - {suggestion.watch_count} plays - - {suggestion.unique_viewers} viewer{suggestion.unique_viewers !== 1 ? 's' : ''} - {suggestion.avg_progress > 0 && ( - <> - - Avg progress: {suggestion.avg_progress}% - - )} - {suggestion.last_watched_at && ( - <> - - Last watched: {new Date(suggestion.last_watched_at).toLocaleDateString()} - - )} - {suggestion.added_at && ( - <> - - Added: {new Date(suggestion.added_at).toLocaleDateString()} - + {filtered.map((suggestion) => { + const isSelected = selectedIds.has(suggestion.item_id) + return ( +
toggleSelect(suggestion.item_id)} + > +
+
+
+ toggleSelect(suggestion.item_id)} + onClick={(e) => e.stopPropagation()} + className="w-4 h-4 rounded border-dark-500 bg-dark-700 text-primary-500 focus:ring-primary-500 focus:ring-offset-0" + /> + {suggestion.media_type === 'movie' ? ( + + ) : ( + )}
-
- {suggestion.reasons.map((reason, i) => ( - - {reason} - - ))} +
+

+ {suggestion.title} +

+
+ {formatBytes(suggestion.size_bytes)} + + {suggestion.watch_count} plays + + {suggestion.unique_viewers} viewer{suggestion.unique_viewers !== 1 ? 's' : ''} + {suggestion.avg_progress > 0 && ( + <> + + Avg progress: {suggestion.avg_progress}% + + )} + {suggestion.last_watched_at && ( + <> + + Last watched: {new Date(suggestion.last_watched_at).toLocaleDateString()} + + )} + {suggestion.added_at && ( + <> + + Added: {new Date(suggestion.added_at).toLocaleDateString()} + + )} +
+
+ {suggestion.reasons.map((reason, i) => ( + + {reason} + + ))} +
-
-
-
-
= 40 ? 'text-red-400' : - suggestion.score >= 25 ? 'text-yellow-400' : - 'text-dark-300' - }`}> - Score: {suggestion.score} +
+
+
= 40 ? 'text-red-400' : + suggestion.score >= 25 ? 'text-yellow-400' : + 'text-dark-300' + }`}> + Score: {suggestion.score} +
+
cleanup priority
-
cleanup priority
-
- ))} + ) + })} +
+ )} + + {/* Mutation result feedback */} + {(flagMutation.isSuccess || stageMutation.isSuccess) && ( +
+ {flagMutation.data?.message || stageMutation.data?.message} + {((flagMutation.data?.errors?.length ?? 0) > 0 || (stageMutation.data?.errors?.length ?? 0) > 0) && ( +
    + {(flagMutation.data?.errors || stageMutation.data?.errors || []).map((err, i) => ( +
  • {err}
  • + ))} +
+ )} +
+ )} + + {(flagMutation.isError || stageMutation.isError) && ( +
+ Failed to perform action. Please try again.
)} @@ -304,11 +539,36 @@ export default function CleanupSuggestions() {
  • Stale — Fully watched content that hasn't been revisited
  • Storage Hog — Files larger than 5 GB with minimal play counts
  • -

    - Suggestions are ranked by a composite score. Higher scores indicate stronger cleanup candidates. - Use these insights to create or refine your cleanup rules. +

    + Select items and use the action bar to flag them for cleanup (with a grace period) or move them directly to staging. + Flagged items will be processed by the scheduled cleanup job after the grace period expires. + Staged items are moved to a separate directory and can still be restored from the Staging page.

    + + {/* Confirmation Dialog — Flag */} + flagMutation.mutate()} + onCancel={() => setConfirmAction(null)} + variant="warning" + /> + + {/* Confirmation Dialog — Stage */} + stageMutation.mutate()} + onCancel={() => setConfirmAction(null)} + variant="danger" + />
    ) } From 40974b580aea2f7f07001cb1314822b711b025c3 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:49:58 +0100 Subject: [PATCH 104/122] feat: update .env.example with structured sections and additional configuration options --- .env.example | 74 ++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 66 insertions(+), 8 deletions(-) diff --git a/.env.example b/.env.example index be5c2e0..8c6f61b 100644 --- a/.env.example +++ b/.env.example @@ -1,19 +1,67 @@ -# Timezone +# ═══════════════════════════════════════════════════════════════════ +# GENERAL +# ═══════════════════════════════════════════════════════════════════ + +# Timezone (used for scheduled jobs and log timestamps) TZ=Europe/Berlin -# Security - IMPORTANT: Generate a secure random key for production -# You can generate one with: openssl rand -hex 32 +# Optional: Debug mode — disables Secure cookie flag, relaxes secret key +# validation, and shows more verbose logs. Never enable in production! +# DEBUG=false + +# ═══════════════════════════════════════════════════════════════════ +# SECURITY & AUTHENTICATION +# ═══════════════════════════════════════════════════════════════════ +# MediaCurator uses HttpOnly cookie-based auth. Tokens are never +# exposed to JavaScript — the browser handles everything automatically. + +# REQUIRED: A strong random key used to sign JWT tokens. +# Generate one with: openssl rand -hex 32 SECRET_KEY=change-me-to-a-secure-random-key -# Media path on your host system -MEDIA_PATH=/path/to/your/media +# Access token lifetime in minutes (default: 15) +# Short-lived JWT stored in an HttpOnly cookie, scoped to /api. +# ACCESS_TOKEN_EXPIRE_MINUTES=15 + +# Refresh token lifetime in days (default: 30) +# Opaque token stored in an HttpOnly cookie, scoped to /api/auth. +# Rotated on every refresh (old token is revoked). +# REFRESH_TOKEN_EXPIRE_DAYS=30 + +# ═══════════════════════════════════════════════════════════════════ +# CORS & REVERSE PROXY +# ═══════════════════════════════════════════════════════════════════ +# These settings are critical when running behind a reverse proxy +# (Nginx, Traefik, Caddy, etc.). + +# Comma-separated list of allowed origins for CORS. +# Must match the URL users access in their browser. +# Default: http://localhost:5173,http://localhost:8080 +# Example: https://media.example.com +# CORS_ORIGINS=http://localhost:5173,http://localhost:8080 + +# Comma-separated list of trusted reverse proxy IPs. +# When set, X-Forwarded-For / X-Real-IP headers are only trusted +# from these IPs. Leave empty to always use the direct connection IP. +# Example: 172.18.0.1,10.0.0.1 +# TRUSTED_PROXIES= + +# ═══════════════════════════════════════════════════════════════════ +# ADMIN USER +# ═══════════════════════════════════════════════════════════════════ +# Optional: Pre-create an admin user on first startup. +# If not set, the first user to register via the web UI becomes admin. -# Optional: Pre-create admin user # ADMIN_USER=admin # ADMIN_PASSWORD=your-secure-password -# Optional: Debug mode -# DEBUG=false +# ═══════════════════════════════════════════════════════════════════ +# MEDIA +# ═══════════════════════════════════════════════════════════════════ + +# Path to your media directory on the host system. +# Must match what Emby/Jellyfin sees (see docker-compose.yml for details). +MEDIA_PATH=/path/to/your/media # ═══════════════════════════════════════════════════════════════════ # DATABASE CONFIGURATION @@ -31,3 +79,13 @@ MEDIA_PATH=/path/to/your/media # POSTGRES_USER=mediacurator # POSTGRES_PASSWORD=your-secure-password # POSTGRES_DB=mediacurator + +# ═══════════════════════════════════════════════════════════════════ +# MAINTENANCE & SCHEDULER +# ═══════════════════════════════════════════════════════════════════ + +# How many days to keep audit log entries before automatic cleanup (default: 90) +# AUDIT_RETENTION_DAYS=90 + +# Interval in minutes between automated cleanup checks (default: 60) +# CLEANUP_CHECK_INTERVAL_MINUTES=60 From 41c62ad77c76d564b0448204bc4d5757a0dbb352 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:50:25 +0100 Subject: [PATCH 105/122] feat: add CORS and proxy configuration options to Docker Compose files --- docker-compose.dev.yml | 4 ++++ docker-compose.postgres.yml | 17 +++++++++++++++++ docker-compose.yml | 16 ++++++++++++++++ 3 files changed, 37 insertions(+) diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 840705a..b085882 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -29,6 +29,10 @@ services: # Development settings - DEBUG=true + # CORS & proxy (defaults work for local dev: localhost:5173 + localhost:8080) + # - CORS_ORIGINS=http://localhost:5173,http://localhost:8080 + # - TRUSTED_PROXIES= + # Optional: Pre-create admin user for dev # - INITIAL_ADMIN_USER=admin # - INITIAL_ADMIN_PASSWORD=admin123 diff --git a/docker-compose.postgres.yml b/docker-compose.postgres.yml index be3ba8e..e325e53 100644 --- a/docker-compose.postgres.yml +++ b/docker-compose.postgres.yml @@ -22,6 +22,23 @@ services: - POSTGRES_USER=${POSTGRES_USER:-mediacurator} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-mediacurator} - POSTGRES_DB=${POSTGRES_DB:-mediacurator} + + # ── Cookie Auth & Reverse Proxy ────────────────────────────────── + # CORS: Comma-separated origins matching the URL users see in the browser. + # - CORS_ORIGINS=https://media.example.com + + # Trusted proxy IPs for correct client IP resolution. + # - TRUSTED_PROXIES=172.18.0.1 + + # Optional: Token lifetimes + # - ACCESS_TOKEN_EXPIRE_MINUTES=15 + # - REFRESH_TOKEN_EXPIRE_DAYS=30 + + # Optional: Audit log retention in days + # - AUDIT_RETENTION_DAYS=90 + + # Optional: Debug mode (not recommended for production) + # - DEBUG=false volumes: - mediacurator_config:/app/config diff --git a/docker-compose.yml b/docker-compose.yml index 1742220..48f985e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -30,6 +30,22 @@ services: # Database - stored in /app/config (separate from media paths) - DATABASE_URL=sqlite+aiosqlite:////app/config/mediacurator.db + # ── Cookie Auth & Reverse Proxy ────────────────────────────────── + # CORS: Comma-separated origins matching the URL users see in the browser. + # Must be set when behind a reverse proxy with a custom domain! + # - CORS_ORIGINS=https://media.example.com + + # Trusted proxy IPs — only these may set X-Forwarded-For / X-Real-IP. + # Required for correct client IP resolution (rate limiting, audit logs). + # - TRUSTED_PROXIES=172.18.0.1 + + # Optional: Token lifetimes (sensible defaults, rarely need changing) + # - ACCESS_TOKEN_EXPIRE_MINUTES=15 + # - REFRESH_TOKEN_EXPIRE_DAYS=30 + + # Optional: Audit log retention in days + # - AUDIT_RETENTION_DAYS=90 + # Optional: Pre-create admin user (useful for automated deployments) # If not set, first registered user via web UI becomes admin automatically # - INITIAL_ADMIN_USER=${ADMIN_USER:-} From dd55256136dd78ba8796a2df79b740caa7e0d43d Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:50:29 +0100 Subject: [PATCH 106/122] feat: remove docker-compose configuration file --- docker-compose.running.yml | 120 ------------------------------------- 1 file changed, 120 deletions(-) delete mode 100644 docker-compose.running.yml diff --git a/docker-compose.running.yml b/docker-compose.running.yml deleted file mode 100644 index 5b8e24b..0000000 --- a/docker-compose.running.yml +++ /dev/null @@ -1,120 +0,0 @@ -# MediaCurator Production Deployment -# Komodo Stack Configuration for existing media network - -services: - mediacurator: - # renovate: datasource=docker depName=ghcr.io/serph91p/mediacurator - image: ghcr.io/serph91p/mediacurator:dev.0.0.133 - container_name: mediacurator - restart: unless-stopped - - labels: - - "komodo.stack=mediacurator" - - "komodo.enabled=true" - - # Run as non-root user (appuser with UID 1000) - # Note: Named volumes are managed by Docker, no manual permission fixes needed - user: "1000:1000" - - environment: - # Timezone - - TZ=${TZ:-Europe/Berlin} - - # Security - IMPORTANT: Change this in production! - - SECRET_KEY=${SECRET_KEY:?ERROR: SECRET_KEY must be set. Generate one with: openssl rand -hex 32} - - # Database - - POSTGRES_HOST=postgres - - POSTGRES_PORT=5432 - - POSTGRES_USER=${POSTGRES_USER:-mediacurator} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-mediacurator} - - POSTGRES_DB=${POSTGRES_DB:-mediacurator} - - # Optional: Pre-create admin user (useful for automated deployments) - # If not set, first registered user via web UI becomes admin automatically - # - INITIAL_ADMIN_USER=${ADMIN_USER:-} - # - INITIAL_ADMIN_PASSWORD=${ADMIN_PASSWORD:-} - - # Optional: Debug mode (not recommended for production) - # - DEBUG=false - - volumes: - # Application data (database, config) - using named volume - - mediacurator_data:/app/config - - # Application logs - using named volume - - mediacurator_logs:/app/logs - - # Media mount - REQUIRED: Point to your actual media location - # Mount as read-only (:ro) for safety - - ${MEDIA_PATH:-/media}:/data - - # Optional: Additional media paths - # - /mnt/movies:/media/movies:ro - # - /mnt/tv:/media/tv:ro - - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8080/api/health"] - interval: 30s - timeout: 10s - retries: 3 - start_period: 30s - - depends_on: - postgres: - condition: service_healthy - - # Resource limits (optional but recommended) - deploy: - resources: - limits: - cpus: '2.0' - memory: 2G - reservations: - cpus: '0.5' - memory: 512M - - networks: - - media - - mediacurator - - postgres: - image: postgres:18.1-alpine - container_name: mediacurator_postgres - restart: unless-stopped - - environment: - - POSTGRES_USER=${POSTGRES_USER:-mediacurator} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-mediacurator} - - POSTGRES_DB=${POSTGRES_DB:-mediacurator} - - volumes: - - postgres_data:/var/lib/postgresql/data - - labels: - - "komodo.stack=mediacurator" - - "komodo.enabled=true" - - healthcheck: - test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-mediacurator} -d ${POSTGRES_DB:-mediacurator}"] - interval: 10s - timeout: 5s - retries: 5 - start_period: 10s - - networks: - - mediacurator - -volumes: - mediacurator_data: - driver: local - mediacurator_logs: - driver: local - postgres_data: - driver: local - -networks: - media: - external: true - name: media - mediacurator: \ No newline at end of file From e7dc9c77700b9eb89eabd9197eaeb055ba462c26 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:53:28 +0100 Subject: [PATCH 107/122] fix: ensure SECRET_KEY environment variable is properly quoted in Docker Compose --- docker-compose.postgres.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.postgres.yml b/docker-compose.postgres.yml index e325e53..5d5a59b 100644 --- a/docker-compose.postgres.yml +++ b/docker-compose.postgres.yml @@ -14,7 +14,7 @@ services: environment: - TZ=${TZ:-Europe/Berlin} - - SECRET_KEY=${SECRET_KEY:?ERROR: SECRET_KEY must be set. Generate one with: openssl rand -hex 32} + - "SECRET_KEY=${SECRET_KEY:?ERROR: SECRET_KEY must be set. Generate one with: openssl rand -hex 32}" # PostgreSQL Database Configuration - POSTGRES_HOST=postgres From b466c1938588fe9963650fcacc11b31db0834050 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Tue, 24 Feb 2026 19:53:32 +0100 Subject: [PATCH 108/122] fix: properly quote SECRET_KEY environment variable in Docker Compose --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 48f985e..941a27e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -25,7 +25,7 @@ services: - TZ=${TZ:-Europe/Berlin} # Security - REQUIRED: Set a strong, unique secret key! - - SECRET_KEY=${SECRET_KEY:?ERROR: SECRET_KEY must be set. Generate one with: openssl rand -hex 32} + - "SECRET_KEY=${SECRET_KEY:?ERROR: SECRET_KEY must be set. Generate one with: openssl rand -hex 32}" # Database - stored in /app/config (separate from media paths) - DATABASE_URL=sqlite+aiosqlite:////app/config/mediacurator.db From d04131a91b2cd6c77ea420776533a6811dbd7879 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Thu, 26 Feb 2026 19:36:31 +0100 Subject: [PATCH 109/122] fix: add params support to BaseServiceClient.post() and delete() The post() and delete() methods did not accept a params keyword argument, causing staging to fail with 'unexpected keyword argument' when creating or deleting Emby libraries. --- backend/app/services/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/app/services/base.py b/backend/app/services/base.py index d036355..537c0b4 100644 --- a/backend/app/services/base.py +++ b/backend/app/services/base.py @@ -88,17 +88,17 @@ async def get(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> A """Make a GET request.""" return await self._request("GET", endpoint, params=params) - async def post(self, endpoint: str, json: Optional[Dict[str, Any]] = None) -> Any: + async def post(self, endpoint: str, params: Optional[Dict[str, Any]] = None, json: Optional[Dict[str, Any]] = None) -> Any: """Make a POST request.""" - return await self._request("POST", endpoint, json=json) + return await self._request("POST", endpoint, params=params, json=json) async def put(self, endpoint: str, json: Optional[Dict[str, Any]] = None) -> Any: """Make a PUT request.""" return await self._request("PUT", endpoint, json=json) - async def delete(self, endpoint: str) -> Any: + async def delete(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Any: """Make a DELETE request.""" - return await self._request("DELETE", endpoint) + return await self._request("DELETE", endpoint, params=params) @abstractmethod async def test_connection(self) -> Dict[str, Any]: From d745413e2c1b80fe6db9a4b7211438f3acdfb918 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Thu, 26 Feb 2026 19:39:34 +0100 Subject: [PATCH 110/122] fix: use params kwarg instead of manual query strings in service clients - RadarrClient.delete_movie(): use params= instead of manual query string - SonarrClient.delete_series(): use params= instead of manual query string - BaseServiceClient.put(): add params support for consistency with other methods --- backend/app/services/base.py | 4 ++-- backend/app/services/radarr.py | 6 ++++-- backend/app/services/sonarr.py | 4 +++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/backend/app/services/base.py b/backend/app/services/base.py index 537c0b4..825cda3 100644 --- a/backend/app/services/base.py +++ b/backend/app/services/base.py @@ -92,9 +92,9 @@ async def post(self, endpoint: str, params: Optional[Dict[str, Any]] = None, jso """Make a POST request.""" return await self._request("POST", endpoint, params=params, json=json) - async def put(self, endpoint: str, json: Optional[Dict[str, Any]] = None) -> Any: + async def put(self, endpoint: str, params: Optional[Dict[str, Any]] = None, json: Optional[Dict[str, Any]] = None) -> Any: """Make a PUT request.""" - return await self._request("PUT", endpoint, json=json) + return await self._request("PUT", endpoint, params=params, json=json) async def delete(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Any: """Make a DELETE request.""" diff --git a/backend/app/services/radarr.py b/backend/app/services/radarr.py index 496b778..449fb08 100644 --- a/backend/app/services/radarr.py +++ b/backend/app/services/radarr.py @@ -41,8 +41,10 @@ async def get_movie_by_id(self, movie_id: int) -> Dict[str, Any]: async def delete_movie(self, movie_id: int, delete_files: bool = True, add_exclusion: bool = False) -> None: """Delete a movie.""" - params = f"deleteFiles={str(delete_files).lower()}&addImportExclusion={str(add_exclusion).lower()}" - await self.delete(f"/api/v3/movie/{movie_id}?{params}") + await self.delete(f"/api/v3/movie/{movie_id}", params={ + "deleteFiles": str(delete_files).lower(), + "addImportExclusion": str(add_exclusion).lower(), + }) logger.info(f"Deleted movie {movie_id} from Radarr") async def unmonitor_movie(self, movie_id: int) -> Dict[str, Any]: diff --git a/backend/app/services/sonarr.py b/backend/app/services/sonarr.py index b11fd8d..af5a836 100644 --- a/backend/app/services/sonarr.py +++ b/backend/app/services/sonarr.py @@ -54,7 +54,9 @@ async def delete_episode_file(self, episode_file_id: int) -> None: async def delete_series(self, series_id: int, delete_files: bool = True) -> None: """Delete a series.""" - await self.delete(f"/api/v3/series/{series_id}?deleteFiles={str(delete_files).lower()}") + await self.delete(f"/api/v3/series/{series_id}", params={ + "deleteFiles": str(delete_files).lower(), + }) logger.info(f"Deleted series {series_id} from Sonarr") async def unmonitor_series(self, series_id: int) -> Dict[str, Any]: From 292345b980df4ae6b62d286cf9a4abd40ecb67c8 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Thu, 26 Feb 2026 20:49:54 +0100 Subject: [PATCH 111/122] fix: invalidate library cache after create/delete to prevent duplicate staging libraries --- backend/app/services/emby.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/backend/app/services/emby.py b/backend/app/services/emby.py index b261ff1..1c63db6 100644 --- a/backend/app/services/emby.py +++ b/backend/app/services/emby.py @@ -61,6 +61,12 @@ def _make_cache_key(self, endpoint: str, params: Optional[Dict] = None) -> str: cache_data = f"{self.base_url}:{endpoint}:{json.dumps(params or {}, sort_keys=True)}" return hashlib.sha256(cache_data.encode()).hexdigest() + def invalidate_library_cache(self) -> None: + """Invalidate the cached library list.""" + cache_key = self._make_cache_key("/Library/VirtualFolders") + self.cache.remove(cache_key) + logger.debug("Invalidated library cache") + def _get_headers(self) -> Dict[str, str]: return { "X-Emby-Token": self.api_key, @@ -383,6 +389,9 @@ async def create_library( result = await self.post("/Library/VirtualFolders", params={"name": name}, json=data) logger.info(f"Created library '{name}' at paths: {paths}") + # Invalidate cache so the new library is found + self.invalidate_library_cache() + # Get library ID libraries = await self.get_libraries() library = next((lib for lib in libraries if lib.get("Name") == name), None) @@ -398,6 +407,7 @@ async def create_library( async def delete_library(self, name: str) -> None: """Delete a library by name.""" await self.delete("/Library/VirtualFolders", params={"name": name}) + self.invalidate_library_cache() logger.info(f"Deleted library '{name}'") async def get_library_by_name(self, name: str) -> Optional[Dict[str, Any]]: From fc16805b000459e9ef7a375755897afe673ed60b Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Fri, 27 Feb 2026 07:58:00 +0100 Subject: [PATCH 112/122] fix: use collectionType query parameter for Emby library creation - Move collectionType from JSON body to query parameter (Emby API requirement) - Pass library_type through ensure_staging_library chain - Remove redundant refresh call (handled by refreshLibrary param) - Log library type on creation for debugging --- backend/app/services/emby.py | 40 +++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/backend/app/services/emby.py b/backend/app/services/emby.py index 1c63db6..fe9d220 100644 --- a/backend/app/services/emby.py +++ b/backend/app/services/emby.py @@ -355,7 +355,7 @@ async def create_library( self, name: str, paths: List[str], - library_type: str = "mixed", + library_type: str = "movies", refresh_library: bool = True ) -> Dict[str, Any]: """ @@ -364,16 +364,21 @@ async def create_library( Args: name: Library name paths: List of folder paths for the library - library_type: Type of library (movies, tvshows, mixed, music, etc.) + library_type: Type of library (movies, tvshows, music, etc.) refresh_library: Whether to refresh library after creation Returns: Dict with library info including ItemId """ + # Emby requires collectionType as query parameter, not in body + params = { + "name": name, + "collectionType": library_type, + "refreshLibrary": str(refresh_library).lower() + } + + # LibraryOptions in body for additional settings data = { - "Name": name, - "Paths": paths, - "CollectionType": library_type if library_type != "mixed" else None, "LibraryOptions": { "EnablePhotos": False, "EnableRealtimeMonitor": True, @@ -386,8 +391,8 @@ async def create_library( } } - result = await self.post("/Library/VirtualFolders", params={"name": name}, json=data) - logger.info(f"Created library '{name}' at paths: {paths}") + result = await self.post("/Library/VirtualFolders", params=params, json=data) + logger.info(f"Created library '{name}' (type: {library_type}) at paths: {paths}") # Invalidate cache so the new library is found self.invalidate_library_cache() @@ -396,12 +401,6 @@ async def create_library( libraries = await self.get_libraries() library = next((lib for lib in libraries if lib.get("Name") == name), None) - if library and refresh_library: - # Trigger library scan - library_id = library.get("ItemId") - if library_id: - await self.refresh_library(library_id) - return library or {"Name": name, "Paths": paths} async def delete_library(self, name: str) -> None: @@ -423,7 +422,8 @@ async def refresh_library(self, library_id: str) -> None: async def ensure_staging_library( self, library_name: str, - staging_path: str + staging_path: str, + library_type: str = "movies" ) -> Optional[str]: """ Ensure staging library exists, create if not. @@ -431,6 +431,7 @@ async def ensure_staging_library( Args: library_name: Name for the staging library staging_path: Path to staging directory + library_type: Type of library - 'movies' or 'tvshows' Returns: Library ItemId or None if failed @@ -443,16 +444,16 @@ async def ensure_staging_library( logger.info(f"Staging library '{library_name}' already exists with ID {library_id}") return library_id - # Create library + # Create library with correct type for proper media recognition try: library = await self.create_library( name=library_name, paths=[staging_path], - library_type="mixed", # Support both movies and series + library_type=library_type, refresh_library=True ) library_id = library.get("ItemId") - logger.info(f"Created staging library '{library_name}' with ID {library_id}") + logger.info(f"Created staging library '{library_name}' (type: {library_type}) with ID {library_id}") return library_id except ServiceClientError as e: logger.error(f"Failed to create staging library: {e}") @@ -503,7 +504,8 @@ async def get_primary_emby_service(self) -> Optional[ServiceConnection]: async def ensure_staging_library( self, library_name: str, - staging_path: str + staging_path: str, + library_type: str = "movies" ) -> Optional[str]: """Ensure staging library exists on primary Emby service.""" service = await self.get_primary_emby_service() @@ -515,7 +517,7 @@ async def ensure_staging_library( if not client: return None - return await client.ensure_staging_library(library_name, staging_path) + return await client.ensure_staging_library(library_name, staging_path, library_type) async def refresh_staging_library(self, library_id: str) -> bool: """Refresh staging library on primary Emby service.""" From 865baadd267cd171e53f211ad7a3c6790704af05 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Fri, 27 Feb 2026 08:12:07 +0100 Subject: [PATCH 113/122] fix: inherit library type from source library for staging - Determine library_type (movies/tvshows) from source library's media_type - Pass correct library_type to ensure_staging_library - Emby will now correctly recognize content in staging libraries --- backend/app/services/staging.py | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/backend/app/services/staging.py b/backend/app/services/staging.py index 3db8fd1..49164ba 100644 --- a/backend/app/services/staging.py +++ b/backend/app/services/staging.py @@ -220,11 +220,33 @@ async def move_to_staging( # If Emby service provided, update libraries if emby_service: - # Remove from original library (trigger Emby scan) - # Add to staging library (will be created if not exists) + # Determine library type from source library for correct Emby recognition + # Get the source library to copy its type + library = None + if media_item.library_id: + result = await self.db.execute( + select(Library).where(Library.id == media_item.library_id) + ) + library = result.scalar_one_or_none() + + # Map MediaCurator media types to Emby collection types + if library and library.media_type: + if library.media_type.value in ("series", "episode", "season"): + library_type = "tvshows" + else: + library_type = "movies" + else: + # Fallback: determine from media item type + if media_item.media_type.value in ("series", "episode", "season"): + library_type = "tvshows" + else: + library_type = "movies" + + # Create staging library with correct type staging_library_id = await emby_service.ensure_staging_library( settings['library_name'], - settings['staging_path'] + settings['staging_path'], + library_type ) media_item.staged_library_id = staging_library_id From f83a935b5974e339289605e015510af3b0dff8ac Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Mar 2026 05:30:17 +0000 Subject: [PATCH 114/122] chore(deps): bump docker/metadata-action from 5 to 6 Bumps [docker/metadata-action](https://github.com/docker/metadata-action) from 5 to 6. - [Release notes](https://github.com/docker/metadata-action/releases) - [Commits](https://github.com/docker/metadata-action/compare/v5...v6) --- updated-dependencies: - dependency-name: docker/metadata-action dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3dce074..76b0d84 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -55,7 +55,7 @@ jobs: - name: Docker metadata id: docker_meta - uses: docker/metadata-action@v5 + uses: docker/metadata-action@v6 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} tags: | From c7bc34b3af0b7d3a64eb5df22aea236d604857b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Mar 2026 05:30:21 +0000 Subject: [PATCH 115/122] chore(deps): bump docker/build-push-action from 6 to 7 Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6 to 7. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v6...v7) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-version: '7' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3dce074..49e34d7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -97,7 +97,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push Docker image - uses: docker/build-push-action@v6 + uses: docker/build-push-action@v7 with: context: . platforms: linux/amd64,linux/arm64 From bcb03855a951ff542ba1636152297e060b18176e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Mar 2026 05:30:25 +0000 Subject: [PATCH 116/122] chore(deps): bump docker/setup-qemu-action from 3 to 4 Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3 to 4. - [Release notes](https://github.com/docker/setup-qemu-action/releases) - [Commits](https://github.com/docker/setup-qemu-action/compare/v3...v4) --- updated-dependencies: - dependency-name: docker/setup-qemu-action dependency-version: '4' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3dce074..f60fd41 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -84,7 +84,7 @@ jobs: uses: actions/checkout@v6 - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 From f8af2da748437f2ebb00e288122043fd7faa829c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Mar 2026 05:32:27 +0000 Subject: [PATCH 117/122] chore(deps): bump the npm-minor group across 1 directory with 17 updates Bumps the npm-minor group with 15 updates in the /frontend directory: | Package | From | To | | --- | --- | --- | | [@tailwindcss/vite](https://github.com/tailwindlabs/tailwindcss/tree/HEAD/packages/@tailwindcss-vite) | `4.1.18` | `4.2.1` | | [@tanstack/react-query](https://github.com/TanStack/query/tree/HEAD/packages/react-query) | `5.90.20` | `5.90.21` | | [axios](https://github.com/axios/axios) | `1.13.3` | `1.13.6` | | [react](https://github.com/facebook/react/tree/HEAD/packages/react) | `19.2.3` | `19.2.4` | | [@types/react](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/react) | `19.2.9` | `19.2.14` | | [react-dom](https://github.com/facebook/react/tree/HEAD/packages/react-dom) | `19.2.3` | `19.2.4` | | [react-hook-form](https://github.com/react-hook-form/react-hook-form) | `7.71.1` | `7.71.2` | | [react-router-dom](https://github.com/remix-run/react-router/tree/HEAD/packages/react-router-dom) | `7.13.0` | `7.13.1` | | [recharts](https://github.com/recharts/recharts) | `3.7.0` | `3.8.0` | | [zustand](https://github.com/pmndrs/zustand) | `5.0.10` | `5.0.11` | | [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) | `8.53.1` | `8.56.1` | | [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/tree/HEAD/packages/plugin-react) | `5.1.2` | `5.1.4` | | [autoprefixer](https://github.com/postcss/autoprefixer) | `10.4.23` | `10.4.27` | | [eslint-plugin-react-refresh](https://github.com/ArnaudBarre/eslint-plugin-react-refresh) | `0.4.26` | `0.5.2` | | [postcss](https://github.com/postcss/postcss) | `8.5.6` | `8.5.8` | Updates `@tailwindcss/vite` from 4.1.18 to 4.2.1 - [Release notes](https://github.com/tailwindlabs/tailwindcss/releases) - [Changelog](https://github.com/tailwindlabs/tailwindcss/blob/main/CHANGELOG.md) - [Commits](https://github.com/tailwindlabs/tailwindcss/commits/v4.2.1/packages/@tailwindcss-vite) Updates `@tanstack/react-query` from 5.90.20 to 5.90.21 - [Release notes](https://github.com/TanStack/query/releases) - [Changelog](https://github.com/TanStack/query/blob/main/packages/react-query/CHANGELOG.md) - [Commits](https://github.com/TanStack/query/commits/@tanstack/react-query@5.90.21/packages/react-query) Updates `axios` from 1.13.3 to 1.13.6 - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/v1.x/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v1.13.3...v1.13.6) Updates `react` from 19.2.3 to 19.2.4 - [Release notes](https://github.com/facebook/react/releases) - [Changelog](https://github.com/facebook/react/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/react/commits/v19.2.4/packages/react) Updates `@types/react` from 19.2.9 to 19.2.14 - [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases) - [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/react) Updates `react-dom` from 19.2.3 to 19.2.4 - [Release notes](https://github.com/facebook/react/releases) - [Changelog](https://github.com/facebook/react/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/react/commits/v19.2.4/packages/react-dom) Updates `react-hook-form` from 7.71.1 to 7.71.2 - [Release notes](https://github.com/react-hook-form/react-hook-form/releases) - [Changelog](https://github.com/react-hook-form/react-hook-form/blob/master/CHANGELOG.md) - [Commits](https://github.com/react-hook-form/react-hook-form/compare/v7.71.1...v7.71.2) Updates `react-router-dom` from 7.13.0 to 7.13.1 - [Release notes](https://github.com/remix-run/react-router/releases) - [Changelog](https://github.com/remix-run/react-router/blob/main/packages/react-router-dom/CHANGELOG.md) - [Commits](https://github.com/remix-run/react-router/commits/react-router-dom@7.13.1/packages/react-router-dom) Updates `recharts` from 3.7.0 to 3.8.0 - [Release notes](https://github.com/recharts/recharts/releases) - [Changelog](https://github.com/recharts/recharts/blob/main/CHANGELOG.md) - [Commits](https://github.com/recharts/recharts/compare/v3.7.0...v3.8.0) Updates `zustand` from 5.0.10 to 5.0.11 - [Release notes](https://github.com/pmndrs/zustand/releases) - [Commits](https://github.com/pmndrs/zustand/compare/v5.0.10...v5.0.11) Updates `@types/react` from 19.2.9 to 19.2.14 - [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases) - [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/react) Updates `@typescript-eslint/eslint-plugin` from 8.53.1 to 8.56.1 - [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases) - [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/eslint-plugin/CHANGELOG.md) - [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.56.1/packages/eslint-plugin) Updates `@typescript-eslint/parser` from 8.53.1 to 8.56.1 - [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases) - [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/parser/CHANGELOG.md) - [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.56.1/packages/parser) Updates `@vitejs/plugin-react` from 5.1.2 to 5.1.4 - [Release notes](https://github.com/vitejs/vite-plugin-react/releases) - [Changelog](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/CHANGELOG.md) - [Commits](https://github.com/vitejs/vite-plugin-react/commits/plugin-react@5.1.4/packages/plugin-react) Updates `autoprefixer` from 10.4.23 to 10.4.27 - [Release notes](https://github.com/postcss/autoprefixer/releases) - [Changelog](https://github.com/postcss/autoprefixer/blob/main/CHANGELOG.md) - [Commits](https://github.com/postcss/autoprefixer/compare/10.4.23...10.4.27) Updates `eslint-plugin-react-refresh` from 0.4.26 to 0.5.2 - [Release notes](https://github.com/ArnaudBarre/eslint-plugin-react-refresh/releases) - [Changelog](https://github.com/ArnaudBarre/eslint-plugin-react-refresh/blob/main/CHANGELOG.md) - [Commits](https://github.com/ArnaudBarre/eslint-plugin-react-refresh/compare/v0.4.26...v0.5.2) Updates `postcss` from 8.5.6 to 8.5.8 - [Release notes](https://github.com/postcss/postcss/releases) - [Changelog](https://github.com/postcss/postcss/blob/main/CHANGELOG.md) - [Commits](https://github.com/postcss/postcss/compare/8.5.6...8.5.8) Updates `tailwindcss` from 4.1.18 to 4.2.1 - [Release notes](https://github.com/tailwindlabs/tailwindcss/releases) - [Changelog](https://github.com/tailwindlabs/tailwindcss/blob/main/CHANGELOG.md) - [Commits](https://github.com/tailwindlabs/tailwindcss/commits/v4.2.1/packages/tailwindcss) --- updated-dependencies: - dependency-name: "@tailwindcss/vite" dependency-version: 4.2.1 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: npm-minor - dependency-name: "@tanstack/react-query" dependency-version: 5.90.21 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: axios dependency-version: 1.13.6 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: react dependency-version: 19.2.4 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: "@types/react" dependency-version: 19.2.14 dependency-type: direct:development update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: react-dom dependency-version: 19.2.4 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: react-hook-form dependency-version: 7.71.2 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: react-router-dom dependency-version: 7.13.1 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: recharts dependency-version: 3.8.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: npm-minor - dependency-name: zustand dependency-version: 5.0.11 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: "@types/react" dependency-version: 19.2.14 dependency-type: direct:development update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: "@typescript-eslint/eslint-plugin" dependency-version: 8.56.1 dependency-type: direct:development update-type: version-update:semver-minor dependency-group: npm-minor - dependency-name: "@typescript-eslint/parser" dependency-version: 8.56.1 dependency-type: direct:development update-type: version-update:semver-minor dependency-group: npm-minor - dependency-name: "@vitejs/plugin-react" dependency-version: 5.1.4 dependency-type: direct:development update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: autoprefixer dependency-version: 10.4.27 dependency-type: direct:development update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: eslint-plugin-react-refresh dependency-version: 0.5.2 dependency-type: direct:development update-type: version-update:semver-minor dependency-group: npm-minor - dependency-name: postcss dependency-version: 8.5.8 dependency-type: direct:development update-type: version-update:semver-patch dependency-group: npm-minor - dependency-name: tailwindcss dependency-version: 4.2.1 dependency-type: direct:development update-type: version-update:semver-minor dependency-group: npm-minor ... Signed-off-by: dependabot[bot] --- frontend/package-lock.json | 649 +++++++++++++++++++------------------ frontend/package.json | 30 +- 2 files changed, 348 insertions(+), 331 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 61029a4..341c946 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -10,39 +10,39 @@ "dependencies": { "@headlessui/react": "^2.2.9", "@heroicons/react": "^2.2.0", - "@tailwindcss/vite": "^4.1.18", - "@tanstack/react-query": "^5.90.20", - "axios": "^1.13.3", + "@tailwindcss/vite": "^4.2.1", + "@tanstack/react-query": "^5.90.21", + "axios": "^1.13.6", "clsx": "^2.1.0", "date-fns": "^4.1.0", - "react": "^19.2.3", - "react-dom": "^19.2.3", - "react-hook-form": "^7.49.3", + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-hook-form": "^7.71.2", "react-hot-toast": "^2.4.1", - "react-router-dom": "^7.13.0", - "recharts": "^3.7.0", - "zustand": "^5.0.10" + "react-router-dom": "^7.13.1", + "recharts": "^3.8.0", + "zustand": "^5.0.11" }, "devDependencies": { - "@types/react": "^19.2.9", + "@types/react": "^19.2.14", "@types/react-dom": "^19.2.3", - "@typescript-eslint/eslint-plugin": "^8.53.1", + "@typescript-eslint/eslint-plugin": "^8.56.1", "@typescript-eslint/parser": "^8.53.1", - "@vitejs/plugin-react": "^5.1.2", - "autoprefixer": "^10.4.23", + "@vitejs/plugin-react": "^5.1.4", + "autoprefixer": "^10.4.27", "eslint": "^9.39.2", "eslint-plugin-react-hooks": "^7.0.1", - "eslint-plugin-react-refresh": "^0.4.5", - "postcss": "^8.5.6", + "eslint-plugin-react-refresh": "^0.5.2", + "postcss": "^8.5.8", "tailwindcss": "^4.1.18", "typescript": "^5.3.3", "vite": "^7.3.1" } }, "node_modules/@babel/code-frame": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.28.6.tgz", - "integrity": "sha512-JYgintcMjRiCvS8mMECzaEn+m3PfoQiyqukOMCCVQtoJGYJw8j/8LBJEiqkHLkfwCcs74E3pbAUFNg7d9VNJ+Q==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", "dev": true, "license": "MIT", "dependencies": { @@ -65,21 +65,21 @@ } }, "node_modules/@babel/core": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.6.tgz", - "integrity": "sha512-H3mcG6ZDLTlYfaSNi0iOKkigqMFvkTKlGUYlD8GW7nNOYRrevuA46iTypPyv+06V3fEmvvazfntkBU34L0azAw==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.28.6", - "@babel/generator": "^7.28.6", + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", "@babel/helper-compilation-targets": "^7.28.6", "@babel/helper-module-transforms": "^7.28.6", "@babel/helpers": "^7.28.6", - "@babel/parser": "^7.28.6", + "@babel/parser": "^7.29.0", "@babel/template": "^7.28.6", - "@babel/traverse": "^7.28.6", - "@babel/types": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", @@ -106,14 +106,14 @@ } }, "node_modules/@babel/generator": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.6.tgz", - "integrity": "sha512-lOoVRwADj8hjf7al89tvQ2a1lf53Z+7tiXMgpZJL3maQPDxh0DgLMN62B2MKUOFcoodBHLMbDM6WAbKgNy5Suw==", + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.6", - "@babel/types": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" @@ -246,13 +246,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.6.tgz", - "integrity": "sha512-TeR9zWR18BvbfPmGbLampPMW+uW1NZnJlRuuHso8i87QZNq2JRF9i6RgxRqtEq+wQGsS19NNTWr2duhnE49mfQ==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.28.6" + "@babel/types": "^7.29.0" }, "bin": { "parser": "bin/babel-parser.js" @@ -309,18 +309,18 @@ } }, "node_modules/@babel/traverse": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.6.tgz", - "integrity": "sha512-fgWX62k02qtjqdSNTAGxmKYY/7FSL9WAS1o2Hu5+I5m9T0yxZzr4cnrfXQ/MX0rIifthCSs6FKTlzYbJcPtMNg==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.28.6", - "@babel/generator": "^7.28.6", + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.6", + "@babel/parser": "^7.29.0", "@babel/template": "^7.28.6", - "@babel/types": "^7.28.6", + "@babel/types": "^7.29.0", "debug": "^4.3.1" }, "engines": { @@ -328,9 +328,9 @@ } }, "node_modules/@babel/types": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.6.tgz", - "integrity": "sha512-0ZrskXVEHSWIqZM/sQZ4EV3jZJXRkio/WCxaqKZP1g//CEWEPSfeZFcms4XeKBCHU0ZKnIkdJeU/kF+eRp5lBg==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", "dev": true, "license": "MIT", "dependencies": { @@ -1259,9 +1259,9 @@ } }, "node_modules/@rolldown/pluginutils": { - "version": "1.0.0-beta.53", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.53.tgz", - "integrity": "sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==", + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.3.tgz", + "integrity": "sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==", "dev": true, "license": "MIT" }, @@ -1612,47 +1612,47 @@ } }, "node_modules/@tailwindcss/node": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.18.tgz", - "integrity": "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.1.tgz", + "integrity": "sha512-jlx6sLk4EOwO6hHe1oCGm1Q4AN/s0rSrTTPBGPM0/RQ6Uylwq17FuU8IeJJKEjtc6K6O07zsvP+gDO6MMWo7pg==", "license": "MIT", "dependencies": { - "@jridgewell/remapping": "^2.3.4", - "enhanced-resolve": "^5.18.3", + "@jridgewell/remapping": "^2.3.5", + "enhanced-resolve": "^5.19.0", "jiti": "^2.6.1", - "lightningcss": "1.30.2", + "lightningcss": "1.31.1", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", - "tailwindcss": "4.1.18" + "tailwindcss": "4.2.1" } }, "node_modules/@tailwindcss/oxide": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.18.tgz", - "integrity": "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.1.tgz", + "integrity": "sha512-yv9jeEFWnjKCI6/T3Oq50yQEOqmpmpfzG1hcZsAOaXFQPfzWprWrlHSdGPEF3WQTi8zu8ohC9Mh9J470nT5pUw==", "license": "MIT", "engines": { - "node": ">= 10" + "node": ">= 20" }, "optionalDependencies": { - "@tailwindcss/oxide-android-arm64": "4.1.18", - "@tailwindcss/oxide-darwin-arm64": "4.1.18", - "@tailwindcss/oxide-darwin-x64": "4.1.18", - "@tailwindcss/oxide-freebsd-x64": "4.1.18", - "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", - "@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", - "@tailwindcss/oxide-linux-arm64-musl": "4.1.18", - "@tailwindcss/oxide-linux-x64-gnu": "4.1.18", - "@tailwindcss/oxide-linux-x64-musl": "4.1.18", - "@tailwindcss/oxide-wasm32-wasi": "4.1.18", - "@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", - "@tailwindcss/oxide-win32-x64-msvc": "4.1.18" + "@tailwindcss/oxide-android-arm64": "4.2.1", + "@tailwindcss/oxide-darwin-arm64": "4.2.1", + "@tailwindcss/oxide-darwin-x64": "4.2.1", + "@tailwindcss/oxide-freebsd-x64": "4.2.1", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.1", + "@tailwindcss/oxide-linux-arm64-gnu": "4.2.1", + "@tailwindcss/oxide-linux-arm64-musl": "4.2.1", + "@tailwindcss/oxide-linux-x64-gnu": "4.2.1", + "@tailwindcss/oxide-linux-x64-musl": "4.2.1", + "@tailwindcss/oxide-wasm32-wasi": "4.2.1", + "@tailwindcss/oxide-win32-arm64-msvc": "4.2.1", + "@tailwindcss/oxide-win32-x64-msvc": "4.2.1" } }, "node_modules/@tailwindcss/oxide-android-arm64": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.18.tgz", - "integrity": "sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.1.tgz", + "integrity": "sha512-eZ7G1Zm5EC8OOKaesIKuw77jw++QJ2lL9N+dDpdQiAB/c/B2wDh0QPFHbkBVrXnwNugvrbJFk1gK2SsVjwWReg==", "cpu": [ "arm64" ], @@ -1662,13 +1662,13 @@ "android" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-darwin-arm64": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.18.tgz", - "integrity": "sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.1.tgz", + "integrity": "sha512-q/LHkOstoJ7pI1J0q6djesLzRvQSIfEto148ppAd+BVQK0JYjQIFSK3JgYZJa+Yzi0DDa52ZsQx2rqytBnf8Hw==", "cpu": [ "arm64" ], @@ -1678,13 +1678,13 @@ "darwin" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-darwin-x64": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.18.tgz", - "integrity": "sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.1.tgz", + "integrity": "sha512-/f/ozlaXGY6QLbpvd/kFTro2l18f7dHKpB+ieXz+Cijl4Mt9AI2rTrpq7V+t04nK+j9XBQHnSMdeQRhbGyt6fw==", "cpu": [ "x64" ], @@ -1694,13 +1694,13 @@ "darwin" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-freebsd-x64": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.18.tgz", - "integrity": "sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.1.tgz", + "integrity": "sha512-5e/AkgYJT/cpbkys/OU2Ei2jdETCLlifwm7ogMC7/hksI2fC3iiq6OcXwjibcIjPung0kRtR3TxEITkqgn0TcA==", "cpu": [ "x64" ], @@ -1710,13 +1710,13 @@ "freebsd" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.18.tgz", - "integrity": "sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.1.tgz", + "integrity": "sha512-Uny1EcVTTmerCKt/1ZuKTkb0x8ZaiuYucg2/kImO5A5Y/kBz41/+j0gxUZl+hTF3xkWpDmHX+TaWhOtba2Fyuw==", "cpu": [ "arm" ], @@ -1726,13 +1726,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.18.tgz", - "integrity": "sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.1.tgz", + "integrity": "sha512-CTrwomI+c7n6aSSQlsPL0roRiNMDQ/YzMD9EjcR+H4f0I1SQ8QqIuPnsVp7QgMkC1Qi8rtkekLkOFjo7OlEFRQ==", "cpu": [ "arm64" ], @@ -1742,13 +1742,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-arm64-musl": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.18.tgz", - "integrity": "sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.1.tgz", + "integrity": "sha512-WZA0CHRL/SP1TRbA5mp9htsppSEkWuQ4KsSUumYQnyl8ZdT39ntwqmz4IUHGN6p4XdSlYfJwM4rRzZLShHsGAQ==", "cpu": [ "arm64" ], @@ -1758,13 +1758,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-x64-gnu": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.18.tgz", - "integrity": "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.1.tgz", + "integrity": "sha512-qMFzxI2YlBOLW5PhblzuSWlWfwLHaneBE0xHzLrBgNtqN6mWfs+qYbhryGSXQjFYB1Dzf5w+LN5qbUTPhW7Y5g==", "cpu": [ "x64" ], @@ -1774,13 +1774,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-x64-musl": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.18.tgz", - "integrity": "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.1.tgz", + "integrity": "sha512-5r1X2FKnCMUPlXTWRYpHdPYUY6a1Ar/t7P24OuiEdEOmms5lyqjDRvVY1yy9Rmioh+AunQ0rWiOTPE8F9A3v5g==", "cpu": [ "x64" ], @@ -1790,13 +1790,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-wasm32-wasi": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.18.tgz", - "integrity": "sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.1.tgz", + "integrity": "sha512-MGFB5cVPvshR85MTJkEvqDUnuNoysrsRxd6vnk1Lf2tbiqNlXpHYZqkqOQalydienEWOHHFyyuTSYRsLfxFJ2Q==", "bundleDependencies": [ "@napi-rs/wasm-runtime", "@emnapi/core", @@ -1811,19 +1811,19 @@ "license": "MIT", "optional": true, "dependencies": { - "@emnapi/core": "^1.7.1", - "@emnapi/runtime": "^1.7.1", + "@emnapi/core": "^1.8.1", + "@emnapi/runtime": "^1.8.1", "@emnapi/wasi-threads": "^1.1.0", - "@napi-rs/wasm-runtime": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.1.1", "@tybys/wasm-util": "^0.10.1", - "tslib": "^2.4.0" + "tslib": "^2.8.1" }, "engines": { "node": ">=14.0.0" } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": { - "version": "1.7.1", + "version": "1.8.1", "inBundle": true, "license": "MIT", "optional": true, @@ -1833,7 +1833,7 @@ } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": { - "version": "1.7.1", + "version": "1.8.1", "inBundle": true, "license": "MIT", "optional": true, @@ -1851,7 +1851,7 @@ } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { - "version": "1.1.0", + "version": "1.1.1", "inBundle": true, "license": "MIT", "optional": true, @@ -1859,6 +1859,10 @@ "@emnapi/core": "^1.7.1", "@emnapi/runtime": "^1.7.1", "@tybys/wasm-util": "^0.10.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": { @@ -1877,9 +1881,9 @@ "optional": true }, "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.18.tgz", - "integrity": "sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.1.tgz", + "integrity": "sha512-YlUEHRHBGnCMh4Nj4GnqQyBtsshUPdiNroZj8VPkvTZSoHsilRCwXcVKnG9kyi0ZFAS/3u+qKHBdDc81SADTRA==", "cpu": [ "arm64" ], @@ -1889,13 +1893,13 @@ "win32" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-win32-x64-msvc": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.18.tgz", - "integrity": "sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.1.tgz", + "integrity": "sha512-rbO34G5sMWWyrN/idLeVxAZgAKWrn5LiR3/I90Q9MkA67s6T1oB0xtTe+0heoBvHSpbU9Mk7i6uwJnpo4u21XQ==", "cpu": [ "x64" ], @@ -1905,18 +1909,18 @@ "win32" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/vite": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.18.tgz", - "integrity": "sha512-jVA+/UpKL1vRLg6Hkao5jldawNmRo7mQYrZtNHMIVpLfLhDml5nMRUo/8MwoX2vNXvnaXNNMedrMfMugAVX1nA==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.2.1.tgz", + "integrity": "sha512-TBf2sJjYeb28jD2U/OhwdW0bbOsxkWPwQ7SrqGf9sVcoYwZj7rkXljroBO9wKBut9XnmQLXanuDUeqQK0lGg/w==", "license": "MIT", "dependencies": { - "@tailwindcss/node": "4.1.18", - "@tailwindcss/oxide": "4.1.18", - "tailwindcss": "4.1.18" + "@tailwindcss/node": "4.2.1", + "@tailwindcss/oxide": "4.2.1", + "tailwindcss": "4.2.1" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" @@ -1933,9 +1937,9 @@ } }, "node_modules/@tanstack/react-query": { - "version": "5.90.20", - "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.20.tgz", - "integrity": "sha512-vXBxa+qeyveVO7OA0jX1z+DeyCA4JKnThKv411jd5SORpBKgkcVnYKCiBgECvADvniBX7tobwBmg01qq9JmMJw==", + "version": "5.90.21", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.21.tgz", + "integrity": "sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==", "license": "MIT", "dependencies": { "@tanstack/query-core": "5.90.20" @@ -2097,9 +2101,9 @@ "license": "MIT" }, "node_modules/@types/react": { - "version": "19.2.9", - "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.9.tgz", - "integrity": "sha512-Lpo8kgb/igvMIPeNV2rsYKTgaORYdO1XGVZ4Qz3akwOj0ySGYMPlQWa8BaLn0G63D1aSaAQ5ldR06wCpChQCjA==", + "version": "19.2.14", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz", + "integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==", "devOptional": true, "license": "MIT", "dependencies": { @@ -2123,17 +2127,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.1.tgz", - "integrity": "sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz", + "integrity": "sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/type-utils": "8.53.1", - "@typescript-eslint/utils": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/scope-manager": "8.56.1", + "@typescript-eslint/type-utils": "8.56.1", + "@typescript-eslint/utils": "8.56.1", + "@typescript-eslint/visitor-keys": "8.56.1", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -2146,22 +2150,22 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.53.1", - "eslint": "^8.57.0 || ^9.0.0", + "@typescript-eslint/parser": "^8.56.1", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/parser": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.53.1.tgz", - "integrity": "sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.1.tgz", + "integrity": "sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/scope-manager": "8.56.1", + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/typescript-estree": "8.56.1", + "@typescript-eslint/visitor-keys": "8.56.1", "debug": "^4.4.3" }, "engines": { @@ -2172,19 +2176,19 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.53.1.tgz", - "integrity": "sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.1.tgz", + "integrity": "sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.53.1", - "@typescript-eslint/types": "^8.53.1", + "@typescript-eslint/tsconfig-utils": "^8.56.1", + "@typescript-eslint/types": "^8.56.1", "debug": "^4.4.3" }, "engines": { @@ -2199,14 +2203,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.53.1.tgz", - "integrity": "sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.1.tgz", + "integrity": "sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1" + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/visitor-keys": "8.56.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2217,9 +2221,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.1.tgz", - "integrity": "sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.1.tgz", + "integrity": "sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==", "dev": true, "license": "MIT", "engines": { @@ -2234,15 +2238,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.53.1.tgz", - "integrity": "sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.1.tgz", + "integrity": "sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1", - "@typescript-eslint/utils": "8.53.1", + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/typescript-estree": "8.56.1", + "@typescript-eslint/utils": "8.56.1", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" }, @@ -2254,14 +2258,14 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/types": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.53.1.tgz", - "integrity": "sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.1.tgz", + "integrity": "sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==", "dev": true, "license": "MIT", "engines": { @@ -2273,18 +2277,18 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.1.tgz", - "integrity": "sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.1.tgz", + "integrity": "sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.53.1", - "@typescript-eslint/tsconfig-utils": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/visitor-keys": "8.53.1", + "@typescript-eslint/project-service": "8.56.1", + "@typescript-eslint/tsconfig-utils": "8.56.1", + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/visitor-keys": "8.56.1", "debug": "^4.4.3", - "minimatch": "^9.0.5", + "minimatch": "^10.2.2", "semver": "^7.7.3", "tinyglobby": "^0.2.15", "ts-api-utils": "^2.4.0" @@ -2301,16 +2305,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.53.1.tgz", - "integrity": "sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.1.tgz", + "integrity": "sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.53.1", - "@typescript-eslint/types": "8.53.1", - "@typescript-eslint/typescript-estree": "8.53.1" + "@typescript-eslint/scope-manager": "8.56.1", + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/typescript-estree": "8.56.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2320,19 +2324,19 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.53.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.1.tgz", - "integrity": "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.1.tgz", + "integrity": "sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.1", - "eslint-visitor-keys": "^4.2.1" + "@typescript-eslint/types": "8.56.1", + "eslint-visitor-keys": "^5.0.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2343,29 +2347,29 @@ } }, "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", - "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz", + "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==", "dev": true, "license": "Apache-2.0", "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + "node": "^20.19.0 || ^22.13.0 || >=24" }, "funding": { "url": "https://opencollective.com/eslint" } }, "node_modules/@vitejs/plugin-react": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.2.tgz", - "integrity": "sha512-EcA07pHJouywpzsoTUqNh5NwGayl2PPVEJKUSinGGSxFGYn+shYbqMGBg6FXDqgXum9Ou/ecb+411ssw8HImJQ==", + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.4.tgz", + "integrity": "sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/core": "^7.28.5", + "@babel/core": "^7.29.0", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", - "@rolldown/pluginutils": "1.0.0-beta.53", + "@rolldown/pluginutils": "1.0.0-rc.3", "@types/babel__core": "^7.20.5", "react-refresh": "^0.18.0" }, @@ -2446,9 +2450,9 @@ "license": "MIT" }, "node_modules/autoprefixer": { - "version": "10.4.23", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.23.tgz", - "integrity": "sha512-YYTXSFulfwytnjAPlw8QHncHJmlvFKtczb8InXaAx9Q0LbfDnfEYDE55omerIJKihhmU61Ft+cAOSzQVaBUmeA==", + "version": "10.4.27", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz", + "integrity": "sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==", "dev": true, "funding": [ { @@ -2467,7 +2471,7 @@ "license": "MIT", "dependencies": { "browserslist": "^4.28.1", - "caniuse-lite": "^1.0.30001760", + "caniuse-lite": "^1.0.30001774", "fraction.js": "^5.3.4", "picocolors": "^1.1.1", "postcss-value-parser": "^4.2.0" @@ -2483,13 +2487,13 @@ } }, "node_modules/axios": { - "version": "1.13.3", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.3.tgz", - "integrity": "sha512-ERT8kdX7DZjtUm7IitEyV7InTHAF42iJuMArIiDIV5YtPanJkgw4hw5Dyg9fh0mihdWNn1GKaeIWErfe56UQ1g==", + "version": "1.13.6", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.6.tgz", + "integrity": "sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ==", "license": "MIT", "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.4", + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, @@ -2511,13 +2515,26 @@ } }, "node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz", + "integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^1.0.0" + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "18 || 20 || >=22" + } + }, + "node_modules/brace-expansion/node_modules/balanced-match": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", + "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "18 || 20 || >=22" } }, "node_modules/browserslist": { @@ -2578,9 +2595,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001765", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001765.tgz", - "integrity": "sha512-LWcNtSyZrakjECqmpP4qdg0MMGdN368D7X8XvvAqOcqMv0RxnlqVKZl2V6/mBR68oYMxOZPLw/gO7DuisMHUvQ==", + "version": "1.0.30001777", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001777.tgz", + "integrity": "sha512-tmN+fJxroPndC74efCdp12j+0rk0RHwV5Jwa1zWaFVyw2ZxAuPeG8ZgWC3Wz7uSjT3qMRQ5XHZ4COgQmsCMJAQ==", "dev": true, "funding": [ { @@ -2906,13 +2923,13 @@ "license": "ISC" }, "node_modules/enhanced-resolve": { - "version": "5.18.4", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.4.tgz", - "integrity": "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q==", + "version": "5.20.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.0.tgz", + "integrity": "sha512-/ce7+jQ1PQ6rVXwe+jKEg5hW5ciicHwIQUagZkp6IufBoY3YDgdTTY1azVs0qoRgVmvsNB+rbjLJxDAeHHtwsQ==", "license": "MIT", "dependencies": { "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" + "tapable": "^2.3.0" }, "engines": { "node": ">=10.13.0" @@ -3118,13 +3135,13 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.26", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.26.tgz", - "integrity": "sha512-1RETEylht2O6FM/MvgnyvT+8K21wLqDNg4qD51Zj3guhjt433XbnnkVttHMyaVyAFD03QSV4LPS5iE3VQmO7XQ==", + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.5.2.tgz", + "integrity": "sha512-hmgTH57GfzoTFjVN0yBwTggnsVUF2tcqi7RJZHqi9lIezSs4eFyAMktA68YD4r5kNw1mxyY4dmkyoFDb3FIqrA==", "dev": true, "license": "MIT", "peerDependencies": { - "eslint": ">=8.40" + "eslint": "^9 || ^10" } }, "node_modules/eslint-scope": { @@ -3785,9 +3802,9 @@ } }, "node_modules/lightningcss": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", - "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.31.1.tgz", + "integrity": "sha512-l51N2r93WmGUye3WuFoN5k10zyvrVs0qfKBhyC5ogUQ6Ew6JUSswh78mbSO+IU3nTWsyOArqPCcShdQSadghBQ==", "license": "MPL-2.0", "dependencies": { "detect-libc": "^2.0.3" @@ -3800,23 +3817,23 @@ "url": "https://opencollective.com/parcel" }, "optionalDependencies": { - "lightningcss-android-arm64": "1.30.2", - "lightningcss-darwin-arm64": "1.30.2", - "lightningcss-darwin-x64": "1.30.2", - "lightningcss-freebsd-x64": "1.30.2", - "lightningcss-linux-arm-gnueabihf": "1.30.2", - "lightningcss-linux-arm64-gnu": "1.30.2", - "lightningcss-linux-arm64-musl": "1.30.2", - "lightningcss-linux-x64-gnu": "1.30.2", - "lightningcss-linux-x64-musl": "1.30.2", - "lightningcss-win32-arm64-msvc": "1.30.2", - "lightningcss-win32-x64-msvc": "1.30.2" + "lightningcss-android-arm64": "1.31.1", + "lightningcss-darwin-arm64": "1.31.1", + "lightningcss-darwin-x64": "1.31.1", + "lightningcss-freebsd-x64": "1.31.1", + "lightningcss-linux-arm-gnueabihf": "1.31.1", + "lightningcss-linux-arm64-gnu": "1.31.1", + "lightningcss-linux-arm64-musl": "1.31.1", + "lightningcss-linux-x64-gnu": "1.31.1", + "lightningcss-linux-x64-musl": "1.31.1", + "lightningcss-win32-arm64-msvc": "1.31.1", + "lightningcss-win32-x64-msvc": "1.31.1" } }, "node_modules/lightningcss-android-arm64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.30.2.tgz", - "integrity": "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.31.1.tgz", + "integrity": "sha512-HXJF3x8w9nQ4jbXRiNppBCqeZPIAfUo8zE/kOEGbW5NZvGc/K7nMxbhIr+YlFlHW5mpbg/YFPdbnCh1wAXCKFg==", "cpu": [ "arm64" ], @@ -3834,9 +3851,9 @@ } }, "node_modules/lightningcss-darwin-arm64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz", - "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.31.1.tgz", + "integrity": "sha512-02uTEqf3vIfNMq3h/z2cJfcOXnQ0GRwQrkmPafhueLb2h7mqEidiCzkE4gBMEH65abHRiQvhdcQ+aP0D0g67sg==", "cpu": [ "arm64" ], @@ -3854,9 +3871,9 @@ } }, "node_modules/lightningcss-darwin-x64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.2.tgz", - "integrity": "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.31.1.tgz", + "integrity": "sha512-1ObhyoCY+tGxtsz1lSx5NXCj3nirk0Y0kB/g8B8DT+sSx4G9djitg9ejFnjb3gJNWo7qXH4DIy2SUHvpoFwfTA==", "cpu": [ "x64" ], @@ -3874,9 +3891,9 @@ } }, "node_modules/lightningcss-freebsd-x64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.2.tgz", - "integrity": "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.31.1.tgz", + "integrity": "sha512-1RINmQKAItO6ISxYgPwszQE1BrsVU5aB45ho6O42mu96UiZBxEXsuQ7cJW4zs4CEodPUioj/QrXW1r9pLUM74A==", "cpu": [ "x64" ], @@ -3894,9 +3911,9 @@ } }, "node_modules/lightningcss-linux-arm-gnueabihf": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.2.tgz", - "integrity": "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.31.1.tgz", + "integrity": "sha512-OOCm2//MZJ87CdDK62rZIu+aw9gBv4azMJuA8/KB74wmfS3lnC4yoPHm0uXZ/dvNNHmnZnB8XLAZzObeG0nS1g==", "cpu": [ "arm" ], @@ -3914,9 +3931,9 @@ } }, "node_modules/lightningcss-linux-arm64-gnu": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.2.tgz", - "integrity": "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.31.1.tgz", + "integrity": "sha512-WKyLWztD71rTnou4xAD5kQT+982wvca7E6QoLpoawZ1gP9JM0GJj4Tp5jMUh9B3AitHbRZ2/H3W5xQmdEOUlLg==", "cpu": [ "arm64" ], @@ -3934,9 +3951,9 @@ } }, "node_modules/lightningcss-linux-arm64-musl": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.2.tgz", - "integrity": "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.31.1.tgz", + "integrity": "sha512-mVZ7Pg2zIbe3XlNbZJdjs86YViQFoJSpc41CbVmKBPiGmC4YrfeOyz65ms2qpAobVd7WQsbW4PdsSJEMymyIMg==", "cpu": [ "arm64" ], @@ -3954,9 +3971,9 @@ } }, "node_modules/lightningcss-linux-x64-gnu": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz", - "integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.31.1.tgz", + "integrity": "sha512-xGlFWRMl+0KvUhgySdIaReQdB4FNudfUTARn7q0hh/V67PVGCs3ADFjw+6++kG1RNd0zdGRlEKa+T13/tQjPMA==", "cpu": [ "x64" ], @@ -3974,9 +3991,9 @@ } }, "node_modules/lightningcss-linux-x64-musl": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz", - "integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.31.1.tgz", + "integrity": "sha512-eowF8PrKHw9LpoZii5tdZwnBcYDxRw2rRCyvAXLi34iyeYfqCQNA9rmUM0ce62NlPhCvof1+9ivRaTY6pSKDaA==", "cpu": [ "x64" ], @@ -3994,9 +4011,9 @@ } }, "node_modules/lightningcss-win32-arm64-msvc": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.2.tgz", - "integrity": "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.31.1.tgz", + "integrity": "sha512-aJReEbSEQzx1uBlQizAOBSjcmr9dCdL3XuC/6HLXAxmtErsj2ICo5yYggg1qOODQMtnjNQv2UHb9NpOuFtYe4w==", "cpu": [ "arm64" ], @@ -4014,9 +4031,9 @@ } }, "node_modules/lightningcss-win32-x64-msvc": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.2.tgz", - "integrity": "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==", + "version": "1.31.1", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.31.1.tgz", + "integrity": "sha512-I9aiFrbd7oYHwlnQDqr1Roz+fTz61oDDJX7n9tYF9FJymH1cIN1DtKw3iYt6b8WZgEjoNwVSncwF4wx/ZedMhw==", "cpu": [ "x64" ], @@ -4106,16 +4123,16 @@ } }, "node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", "dev": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "brace-expansion": "^2.0.1" + "brace-expansion": "^5.0.2" }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": "18 || 20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -4250,9 +4267,9 @@ "license": "ISC" }, "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", "funding": [ { "type": "opencollective", @@ -4311,30 +4328,30 @@ } }, "node_modules/react": { - "version": "19.2.3", - "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz", - "integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==", + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz", + "integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==", "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/react-dom": { - "version": "19.2.3", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz", - "integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==", + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.4.tgz", + "integrity": "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==", "license": "MIT", "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { - "react": "^19.2.3" + "react": "^19.2.4" } }, "node_modules/react-hook-form": { - "version": "7.71.1", - "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.71.1.tgz", - "integrity": "sha512-9SUJKCGKo8HUSsCO+y0CtqkqI5nNuaDqTxyqPsZPqIwudpj4rCrAz/jZV+jn57bx5gtZKOh3neQu94DXMc+w5w==", + "version": "7.71.2", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.71.2.tgz", + "integrity": "sha512-1CHvcDYzuRUNOflt4MOq3ZM46AronNJtQ1S7tnX6YN4y72qhgiUItpacZUAQ0TyWYci3yz1X+rXaSxiuEm86PA==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -4405,9 +4422,9 @@ } }, "node_modules/react-router": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.0.tgz", - "integrity": "sha512-PZgus8ETambRT17BUm/LL8lX3Of+oiLaPuVTRH3l1eLvSPpKO3AvhAEb5N7ihAFZQrYDqkvvWfFh9p0z9VsjLw==", + "version": "7.13.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.1.tgz", + "integrity": "sha512-td+xP4X2/6BJvZoX6xw++A2DdEi++YypA69bJUV5oVvqf6/9/9nNlD70YO1e9d3MyamJEBQFEzk6mbfDYbqrSA==", "license": "MIT", "dependencies": { "cookie": "^1.0.1", @@ -4427,12 +4444,12 @@ } }, "node_modules/react-router-dom": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.13.0.tgz", - "integrity": "sha512-5CO/l5Yahi2SKC6rGZ+HDEjpjkGaG/ncEP7eWFTvFxbHP8yeeI0PxTDjimtpXYlR3b3i9/WIL4VJttPrESIf2g==", + "version": "7.13.1", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.13.1.tgz", + "integrity": "sha512-UJnV3Rxc5TgUPJt2KJpo1Jpy0OKQr0AjgbZzBFjaPJcFOb2Y8jA5H3LT8HUJAiRLlWrEXWHbF1Z4SCZaQjWDHw==", "license": "MIT", "dependencies": { - "react-router": "7.13.0" + "react-router": "7.13.1" }, "engines": { "node": ">=20.0.0" @@ -4443,15 +4460,15 @@ } }, "node_modules/recharts": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/recharts/-/recharts-3.7.0.tgz", - "integrity": "sha512-l2VCsy3XXeraxIID9fx23eCb6iCBsxUQDnE8tWm6DFdszVAO7WVY/ChAD9wVit01y6B2PMupYiMmQwhgPHc9Ew==", + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-3.8.0.tgz", + "integrity": "sha512-Z/m38DX3L73ExO4Tpc9/iZWHmHnlzWG4njQbxsF5aSjwqmHNDDIm0rdEBArkwsBvR8U6EirlEHiQNYWCVh9sGQ==", "license": "MIT", "workspaces": [ "www" ], "dependencies": { - "@reduxjs/toolkit": "1.x.x || 2.x.x", + "@reduxjs/toolkit": "^1.9.0 || 2.x.x", "clsx": "^2.1.1", "decimal.js-light": "^2.5.1", "es-toolkit": "^1.39.3", @@ -4554,9 +4571,9 @@ "license": "MIT" }, "node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -4637,9 +4654,9 @@ "license": "MIT" }, "node_modules/tailwindcss": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz", - "integrity": "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.1.tgz", + "integrity": "sha512-/tBrSQ36vCleJkAOsy9kbNTgaxvGbyOamC30PRePTQe/o1MFwEKHQk4Cn7BNGaPtjp+PuUrByJehM1hgxfq4sw==", "license": "MIT" }, "node_modules/tapable": { @@ -4997,9 +5014,9 @@ } }, "node_modules/zustand": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.10.tgz", - "integrity": "sha512-U1AiltS1O9hSy3rul+Ub82ut2fqIAefiSuwECWt6jlMVUGejvf+5omLcRBSzqbRagSM3hQZbtzdeRc6QVScXTg==", + "version": "5.0.11", + "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.11.tgz", + "integrity": "sha512-fdZY+dk7zn/vbWNCYmzZULHRrss0jx5pPFiOuMZ/5HJN6Yv3u+1Wswy/4MpZEkEGhtNH+pwxZB8OKgUBPzYAGg==", "license": "MIT", "engines": { "node": ">=12.20.0" diff --git a/frontend/package.json b/frontend/package.json index 2fbb31d..7d7b197 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -12,30 +12,30 @@ "dependencies": { "@headlessui/react": "^2.2.9", "@heroicons/react": "^2.2.0", - "@tailwindcss/vite": "^4.1.18", - "@tanstack/react-query": "^5.90.20", - "axios": "^1.13.3", + "@tailwindcss/vite": "^4.2.1", + "@tanstack/react-query": "^5.90.21", + "axios": "^1.13.6", "clsx": "^2.1.0", "date-fns": "^4.1.0", - "react": "^19.2.3", - "react-dom": "^19.2.3", - "react-hook-form": "^7.49.3", + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-hook-form": "^7.71.2", "react-hot-toast": "^2.4.1", - "react-router-dom": "^7.13.0", - "recharts": "^3.7.0", - "zustand": "^5.0.10" + "react-router-dom": "^7.13.1", + "recharts": "^3.8.0", + "zustand": "^5.0.11" }, "devDependencies": { - "@types/react": "^19.2.9", + "@types/react": "^19.2.14", "@types/react-dom": "^19.2.3", - "@typescript-eslint/eslint-plugin": "^8.53.1", + "@typescript-eslint/eslint-plugin": "^8.56.1", "@typescript-eslint/parser": "^8.53.1", - "@vitejs/plugin-react": "^5.1.2", - "autoprefixer": "^10.4.23", + "@vitejs/plugin-react": "^5.1.4", + "autoprefixer": "^10.4.27", "eslint": "^9.39.2", "eslint-plugin-react-hooks": "^7.0.1", - "eslint-plugin-react-refresh": "^0.4.5", - "postcss": "^8.5.6", + "eslint-plugin-react-refresh": "^0.5.2", + "postcss": "^8.5.8", "tailwindcss": "^4.1.18", "typescript": "^5.3.3", "vite": "^7.3.1" From ab4c9d229e14fd81f2c378522750a543a0b58502 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Mar 2026 11:46:51 +0000 Subject: [PATCH 118/122] chore(deps): bump actions/upload-artifact from 6 to 7 (#39) Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 6 to 7. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v6...v7) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-version: '7' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/security-scan.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/security-scan.yml b/.github/workflows/security-scan.yml index 53022d8..982e2f9 100644 --- a/.github/workflows/security-scan.yml +++ b/.github/workflows/security-scan.yml @@ -49,7 +49,7 @@ jobs: - name: Upload Bandit results if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: bandit-results path: bandit-results.json @@ -88,7 +88,7 @@ jobs: - name: Upload results if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: pip-audit-results path: pip-audit-results.json @@ -126,7 +126,7 @@ jobs: - name: Upload results if: always() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: npm-audit-results path: frontend/npm-audit-results.json From 6b39e0c9ffa56d7cf903d87c6b3d79be0ccda3c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Mar 2026 11:46:57 +0000 Subject: [PATCH 119/122] chore(deps): bump docker/setup-buildx-action from 3 to 4 (#38) Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3 to 4. - [Release notes](https://github.com/docker/setup-buildx-action/releases) - [Commits](https://github.com/docker/setup-buildx-action/compare/v3...v4) --- updated-dependencies: - dependency-name: docker/setup-buildx-action dependency-version: '4' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c96d3ad..4984dca 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -100,7 +100,7 @@ jobs: uses: docker/setup-qemu-action@v4 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 - name: Log in to GitHub Container Registry uses: docker/login-action@v3 From d0ddb921eb01f6c4f90d060731531faa559af5c0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Mar 2026 11:47:21 +0000 Subject: [PATCH 120/122] chore(deps): bump aquasecurity/trivy-action from 0.34.1 to 0.35.0 (#37) Bumps [aquasecurity/trivy-action](https://github.com/aquasecurity/trivy-action) from 0.34.1 to 0.35.0. - [Release notes](https://github.com/aquasecurity/trivy-action/releases) - [Commits](https://github.com/aquasecurity/trivy-action/compare/0.34.1...0.35.0) --- updated-dependencies: - dependency-name: aquasecurity/trivy-action dependency-version: 0.35.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/security-scan.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/security-scan.yml b/.github/workflows/security-scan.yml index 982e2f9..0eab4b4 100644 --- a/.github/workflows/security-scan.yml +++ b/.github/workflows/security-scan.yml @@ -142,7 +142,7 @@ jobs: - uses: actions/checkout@v6 - name: Run Trivy filesystem scanner (SARIF) - uses: aquasecurity/trivy-action@0.34.1 + uses: aquasecurity/trivy-action@0.35.0 with: scan-type: 'fs' scan-ref: '.' @@ -160,7 +160,7 @@ jobs: - name: Run Trivy filesystem scanner (table for logs) if: always() - uses: aquasecurity/trivy-action@0.34.1 + uses: aquasecurity/trivy-action@0.35.0 with: scan-type: 'fs' scan-ref: '.' @@ -184,7 +184,7 @@ jobs: - name: Run Trivy image scanner (SARIF) if: steps.docker-build.outcome == 'success' - uses: aquasecurity/trivy-action@0.34.1 + uses: aquasecurity/trivy-action@0.35.0 with: image-ref: 'mediacurator:scan' format: 'sarif' @@ -201,7 +201,7 @@ jobs: - name: Run Trivy image scanner (table for logs) if: steps.docker-build.outcome == 'success' - uses: aquasecurity/trivy-action@0.34.1 + uses: aquasecurity/trivy-action@0.35.0 with: image-ref: 'mediacurator:scan' format: 'table' From 7577f57735c2fe11b065ac23d448b009d7f460eb Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Fri, 13 Mar 2026 14:58:12 +0000 Subject: [PATCH 121/122] chore(deps): update safe frontend dependencies (#44) - @vitejs/plugin-react 5.1.4 -> 5.2.0 (adds vite 8 future support) - @typescript-eslint/eslint-plugin 8.56.1 -> 8.57.0 - @typescript-eslint/parser 8.53.1 -> 8.57.0 - Fix 3 vulnerabilities (rollup, minimatch, ajv) via npm audit fix Skipped major bumps due to peer dep conflicts: - vite 8: @tailwindcss/vite only supports ^5/^6/^7 - eslint 10: eslint-plugin-react-hooks only supports up to ^9 Co-authored-by: Serph91P --- frontend/package-lock.json | 389 ++++++++++++++++++++----------------- frontend/package.json | 4 +- 2 files changed, 216 insertions(+), 177 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 341c946..9825b77 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -26,8 +26,8 @@ "devDependencies": { "@types/react": "^19.2.14", "@types/react-dom": "^19.2.3", - "@typescript-eslint/eslint-plugin": "^8.56.1", - "@typescript-eslint/parser": "^8.53.1", + "@typescript-eslint/eslint-plugin": "^8.57.0", + "@typescript-eslint/parser": "^8.57.0", "@vitejs/plugin-react": "^5.1.4", "autoprefixer": "^10.4.27", "eslint": "^9.39.2", @@ -813,9 +813,9 @@ } }, "node_modules/@eslint/config-array/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", "dev": true, "license": "ISC", "dependencies": { @@ -897,9 +897,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", "dev": true, "license": "ISC", "dependencies": { @@ -1266,9 +1266,9 @@ "license": "MIT" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.55.2.tgz", - "integrity": "sha512-21J6xzayjy3O6NdnlO6aXi/urvSRjm6nCI6+nF6ra2YofKruGixN9kfT+dt55HVNwfDmpDHJcaS3JuP/boNnlA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz", + "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", "cpu": [ "arm" ], @@ -1279,9 +1279,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.55.2.tgz", - "integrity": "sha512-eXBg7ibkNUZ+sTwbFiDKou0BAckeV6kIigK7y5Ko4mB/5A1KLhuzEKovsmfvsL8mQorkoincMFGnQuIT92SKqA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz", + "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", "cpu": [ "arm64" ], @@ -1292,9 +1292,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.55.2.tgz", - "integrity": "sha512-UCbaTklREjrc5U47ypLulAgg4njaqfOVLU18VrCrI+6E5MQjuG0lSWaqLlAJwsD7NpFV249XgB0Bi37Zh5Sz4g==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz", + "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", "cpu": [ "arm64" ], @@ -1305,9 +1305,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.55.2.tgz", - "integrity": "sha512-dP67MA0cCMHFT2g5XyjtpVOtp7y4UyUxN3dhLdt11at5cPKnSm4lY+EhwNvDXIMzAMIo2KU+mc9wxaAQJTn7sQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz", + "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", "cpu": [ "x64" ], @@ -1318,9 +1318,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.55.2.tgz", - "integrity": "sha512-WDUPLUwfYV9G1yxNRJdXcvISW15mpvod1Wv3ok+Ws93w1HjIVmCIFxsG2DquO+3usMNCpJQ0wqO+3GhFdl6Fow==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz", + "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", "cpu": [ "arm64" ], @@ -1331,9 +1331,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.55.2.tgz", - "integrity": "sha512-Ng95wtHVEulRwn7R0tMrlUuiLVL/HXA8Lt/MYVpy88+s5ikpntzZba1qEulTuPnPIZuOPcW9wNEiqvZxZmgmqQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz", + "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", "cpu": [ "x64" ], @@ -1344,12 +1344,15 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.55.2.tgz", - "integrity": "sha512-AEXMESUDWWGqD6LwO/HkqCZgUE1VCJ1OhbvYGsfqX2Y6w5quSXuyoy/Fg3nRqiwro+cJYFxiw5v4kB2ZDLhxrw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz", + "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", "cpu": [ "arm" ], + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -1357,12 +1360,15 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.55.2.tgz", - "integrity": "sha512-ZV7EljjBDwBBBSv570VWj0hiNTdHt9uGznDtznBB4Caj3ch5rgD4I2K1GQrtbvJ/QiB+663lLgOdcADMNVC29Q==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz", + "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", "cpu": [ "arm" ], + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -1370,12 +1376,15 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.55.2.tgz", - "integrity": "sha512-uvjwc8NtQVPAJtq4Tt7Q49FOodjfbf6NpqXyW/rjXoV+iZ3EJAHLNAnKT5UJBc6ffQVgmXTUL2ifYiLABlGFqA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz", + "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", "cpu": [ "arm64" ], + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -1383,12 +1392,15 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.55.2.tgz", - "integrity": "sha512-s3KoWVNnye9mm/2WpOZ3JeUiediUVw6AvY/H7jNA6qgKA2V2aM25lMkVarTDfiicn/DLq3O0a81jncXszoyCFA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz", + "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", "cpu": [ "arm64" ], + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -1396,12 +1408,15 @@ ] }, "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.55.2.tgz", - "integrity": "sha512-gi21faacK+J8aVSyAUptML9VQN26JRxe484IbF+h3hpG+sNVoMXPduhREz2CcYr5my0NE3MjVvQ5bMKX71pfVA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz", + "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", "cpu": [ "loong64" ], + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -1409,12 +1424,15 @@ ] }, "node_modules/@rollup/rollup-linux-loong64-musl": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.55.2.tgz", - "integrity": "sha512-qSlWiXnVaS/ceqXNfnoFZh4IiCA0EwvCivivTGbEu1qv2o+WTHpn1zNmCTAoOG5QaVr2/yhCoLScQtc/7RxshA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz", + "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", "cpu": [ "loong64" ], + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -1422,12 +1440,15 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.55.2.tgz", - "integrity": "sha512-rPyuLFNoF1B0+wolH277E780NUKf+KoEDb3OyoLbAO18BbeKi++YN6gC/zuJoPPDlQRL3fIxHxCxVEWiem2yXw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz", + "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", "cpu": [ "ppc64" ], + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -1435,12 +1456,15 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-musl": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.55.2.tgz", - "integrity": "sha512-g+0ZLMook31iWV4PvqKU0i9E78gaZgYpSrYPed/4Bu+nGTgfOPtfs1h11tSSRPXSjC5EzLTjV/1A7L2Vr8pJoQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz", + "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", "cpu": [ "ppc64" ], + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -1448,12 +1472,15 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.55.2.tgz", - "integrity": "sha512-i+sGeRGsjKZcQRh3BRfpLsM3LX3bi4AoEVqmGDyc50L6KfYsN45wVCSz70iQMwPWr3E5opSiLOwsC9WB4/1pqg==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz", + "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", "cpu": [ "riscv64" ], + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -1461,12 +1488,15 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.55.2.tgz", - "integrity": "sha512-C1vLcKc4MfFV6I0aWsC7B2Y9QcsiEcvKkfxprwkPfLaN8hQf0/fKHwSF2lcYzA9g4imqnhic729VB9Fo70HO3Q==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz", + "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", "cpu": [ "riscv64" ], + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -1474,12 +1504,15 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.55.2.tgz", - "integrity": "sha512-68gHUK/howpQjh7g7hlD9DvTTt4sNLp1Bb+Yzw2Ki0xvscm2cOdCLZNJNhd2jW8lsTPrHAHuF751BygifW4bkQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz", + "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", "cpu": [ "s390x" ], + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -1487,12 +1520,15 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.55.2.tgz", - "integrity": "sha512-1e30XAuaBP1MAizaOBApsgeGZge2/Byd6wV4a8oa6jPdHELbRHBiw7wvo4dp7Ie2PE8TZT4pj9RLGZv9N4qwlw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz", + "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", "cpu": [ "x64" ], + "libc": [ + "glibc" + ], "license": "MIT", "optional": true, "os": [ @@ -1500,12 +1536,15 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.55.2.tgz", - "integrity": "sha512-4BJucJBGbuGnH6q7kpPqGJGzZnYrpAzRd60HQSt3OpX/6/YVgSsJnNzR8Ot74io50SeVT4CtCWe/RYIAymFPwA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz", + "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", "cpu": [ "x64" ], + "libc": [ + "musl" + ], "license": "MIT", "optional": true, "os": [ @@ -1513,9 +1552,9 @@ ] }, "node_modules/@rollup/rollup-openbsd-x64": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.55.2.tgz", - "integrity": "sha512-cT2MmXySMo58ENv8p6/O6wI/h/gLnD3D6JoajwXFZH6X9jz4hARqUhWpGuQhOgLNXscfZYRQMJvZDtWNzMAIDw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz", + "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", "cpu": [ "x64" ], @@ -1526,9 +1565,9 @@ ] }, "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.55.2.tgz", - "integrity": "sha512-sZnyUgGkuzIXaK3jNMPmUIyJrxu/PjmATQrocpGA1WbCPX8H5tfGgRSuYtqBYAvLuIGp8SPRb1O4d1Fkb5fXaQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz", + "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", "cpu": [ "arm64" ], @@ -1539,9 +1578,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.55.2.tgz", - "integrity": "sha512-sDpFbenhmWjNcEbBcoTV0PWvW5rPJFvu+P7XoTY0YLGRupgLbFY0XPfwIbJOObzO7QgkRDANh65RjhPmgSaAjQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz", + "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", "cpu": [ "arm64" ], @@ -1552,9 +1591,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.55.2.tgz", - "integrity": "sha512-GvJ03TqqaweWCigtKQVBErw2bEhu1tyfNQbarwr94wCGnczA9HF8wqEe3U/Lfu6EdeNP0p6R+APeHVwEqVxpUQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz", + "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", "cpu": [ "ia32" ], @@ -1565,9 +1604,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.55.2.tgz", - "integrity": "sha512-KvXsBvp13oZz9JGe5NYS7FNizLe99Ny+W8ETsuCyjXiKdiGrcz2/J/N8qxZ/RSwivqjQguug07NLHqrIHrqfYw==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz", + "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", "cpu": [ "x64" ], @@ -1578,9 +1617,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.55.2.tgz", - "integrity": "sha512-xNO+fksQhsAckRtDSPWaMeT1uIM+JrDRXlerpnWNXhn1TdB3YZ6uKBMBTKP0eX9XtYEP978hHk1f8332i2AW8Q==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz", + "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", "cpu": [ "x64" ], @@ -2127,17 +2166,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz", - "integrity": "sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.0.tgz", + "integrity": "sha512-qeu4rTHR3/IaFORbD16gmjq9+rEs9fGKdX0kF6BKSfi+gCuG3RCKLlSBYzn/bGsY9Tj7KE/DAQStbp8AHJGHEQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.56.1", - "@typescript-eslint/type-utils": "8.56.1", - "@typescript-eslint/utils": "8.56.1", - "@typescript-eslint/visitor-keys": "8.56.1", + "@typescript-eslint/scope-manager": "8.57.0", + "@typescript-eslint/type-utils": "8.57.0", + "@typescript-eslint/utils": "8.57.0", + "@typescript-eslint/visitor-keys": "8.57.0", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -2150,22 +2189,22 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.56.1", + "@typescript-eslint/parser": "^8.57.0", "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/parser": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.1.tgz", - "integrity": "sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.0.tgz", + "integrity": "sha512-XZzOmihLIr8AD1b9hL9ccNMzEMWt/dE2u7NyTY9jJG6YNiNthaD5XtUHVF2uCXZ15ng+z2hT3MVuxnUYhq6k1g==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.56.1", - "@typescript-eslint/types": "8.56.1", - "@typescript-eslint/typescript-estree": "8.56.1", - "@typescript-eslint/visitor-keys": "8.56.1", + "@typescript-eslint/scope-manager": "8.57.0", + "@typescript-eslint/types": "8.57.0", + "@typescript-eslint/typescript-estree": "8.57.0", + "@typescript-eslint/visitor-keys": "8.57.0", "debug": "^4.4.3" }, "engines": { @@ -2181,14 +2220,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.1.tgz", - "integrity": "sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.0.tgz", + "integrity": "sha512-pR+dK0BlxCLxtWfaKQWtYr7MhKmzqZxuii+ZjuFlZlIGRZm22HnXFqa2eY+90MUz8/i80YJmzFGDUsi8dMOV5w==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.56.1", - "@typescript-eslint/types": "^8.56.1", + "@typescript-eslint/tsconfig-utils": "^8.57.0", + "@typescript-eslint/types": "^8.57.0", "debug": "^4.4.3" }, "engines": { @@ -2203,14 +2242,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.1.tgz", - "integrity": "sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.0.tgz", + "integrity": "sha512-nvExQqAHF01lUM66MskSaZulpPL5pgy5hI5RfrxviLgzZVffB5yYzw27uK/ft8QnKXI2X0LBrHJFr1TaZtAibw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.56.1", - "@typescript-eslint/visitor-keys": "8.56.1" + "@typescript-eslint/types": "8.57.0", + "@typescript-eslint/visitor-keys": "8.57.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2221,9 +2260,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.1.tgz", - "integrity": "sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.0.tgz", + "integrity": "sha512-LtXRihc5ytjJIQEH+xqjB0+YgsV4/tW35XKX3GTZHpWtcC8SPkT/d4tqdf1cKtesryHm2bgp6l555NYcT2NLvA==", "dev": true, "license": "MIT", "engines": { @@ -2238,15 +2277,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.1.tgz", - "integrity": "sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.0.tgz", + "integrity": "sha512-yjgh7gmDcJ1+TcEg8x3uWQmn8ifvSupnPfjP21twPKrDP/pTHlEQgmKcitzF/rzPSmv7QjJ90vRpN4U+zoUjwQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.56.1", - "@typescript-eslint/typescript-estree": "8.56.1", - "@typescript-eslint/utils": "8.56.1", + "@typescript-eslint/types": "8.57.0", + "@typescript-eslint/typescript-estree": "8.57.0", + "@typescript-eslint/utils": "8.57.0", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" }, @@ -2263,9 +2302,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.1.tgz", - "integrity": "sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.0.tgz", + "integrity": "sha512-dTLI8PEXhjUC7B9Kre+u0XznO696BhXcTlOn0/6kf1fHaQW8+VjJAVHJ3eTI14ZapTxdkOmc80HblPQLaEeJdg==", "dev": true, "license": "MIT", "engines": { @@ -2277,16 +2316,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.1.tgz", - "integrity": "sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.0.tgz", + "integrity": "sha512-m7faHcyVg0BT3VdYTlX8GdJEM7COexXxS6KqGopxdtkQRvBanK377QDHr4W/vIPAR+ah9+B/RclSW5ldVniO1Q==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.56.1", - "@typescript-eslint/tsconfig-utils": "8.56.1", - "@typescript-eslint/types": "8.56.1", - "@typescript-eslint/visitor-keys": "8.56.1", + "@typescript-eslint/project-service": "8.57.0", + "@typescript-eslint/tsconfig-utils": "8.57.0", + "@typescript-eslint/types": "8.57.0", + "@typescript-eslint/visitor-keys": "8.57.0", "debug": "^4.4.3", "minimatch": "^10.2.2", "semver": "^7.7.3", @@ -2305,16 +2344,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.1.tgz", - "integrity": "sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.0.tgz", + "integrity": "sha512-5iIHvpD3CZe06riAsbNxxreP+MuYgVUsV0n4bwLH//VJmgtt54sQeY2GszntJ4BjYCpMzrfVh2SBnUQTtys2lQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.56.1", - "@typescript-eslint/types": "8.56.1", - "@typescript-eslint/typescript-estree": "8.56.1" + "@typescript-eslint/scope-manager": "8.57.0", + "@typescript-eslint/types": "8.57.0", + "@typescript-eslint/typescript-estree": "8.57.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2329,13 +2368,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.56.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.1.tgz", - "integrity": "sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.0.tgz", + "integrity": "sha512-zm6xx8UT/Xy2oSr2ZXD0pZo7Jx2XsCoID2IUh9YSTFRu7z+WdwYTRk6LhUftm1crwqbuoF6I8zAFeCMw0YjwDg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/types": "8.57.0", "eslint-visitor-keys": "^5.0.0" }, "engines": { @@ -2360,9 +2399,9 @@ } }, "node_modules/@vitejs/plugin-react": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.4.tgz", - "integrity": "sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.2.0.tgz", + "integrity": "sha512-YmKkfhOAi3wsB1PhJq5Scj3GXMn3WvtQ/JC0xoopuHoXSdmtdStOpFrYaT1kie2YgFBcIe64ROzMYRjCrYOdYw==", "dev": true, "license": "MIT", "dependencies": { @@ -2377,7 +2416,7 @@ "node": "^20.19.0 || >=22.12.0" }, "peerDependencies": { - "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0" } }, "node_modules/acorn": { @@ -2404,9 +2443,9 @@ } }, "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz", + "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", "dev": true, "license": "MIT", "dependencies": { @@ -3209,9 +3248,9 @@ } }, "node_modules/eslint/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", "dev": true, "license": "ISC", "dependencies": { @@ -4521,9 +4560,9 @@ } }, "node_modules/rollup": { - "version": "4.55.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.55.2.tgz", - "integrity": "sha512-PggGy4dhwx5qaW+CKBilA/98Ql9keyfnb7lh4SR6shQ91QQQi1ORJ1v4UinkdP2i87OBs9AQFooQylcrrRfIcg==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz", + "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", "license": "MIT", "dependencies": { "@types/estree": "1.0.8" @@ -4536,31 +4575,31 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.55.2", - "@rollup/rollup-android-arm64": "4.55.2", - "@rollup/rollup-darwin-arm64": "4.55.2", - "@rollup/rollup-darwin-x64": "4.55.2", - "@rollup/rollup-freebsd-arm64": "4.55.2", - "@rollup/rollup-freebsd-x64": "4.55.2", - "@rollup/rollup-linux-arm-gnueabihf": "4.55.2", - "@rollup/rollup-linux-arm-musleabihf": "4.55.2", - "@rollup/rollup-linux-arm64-gnu": "4.55.2", - "@rollup/rollup-linux-arm64-musl": "4.55.2", - "@rollup/rollup-linux-loong64-gnu": "4.55.2", - "@rollup/rollup-linux-loong64-musl": "4.55.2", - "@rollup/rollup-linux-ppc64-gnu": "4.55.2", - "@rollup/rollup-linux-ppc64-musl": "4.55.2", - "@rollup/rollup-linux-riscv64-gnu": "4.55.2", - "@rollup/rollup-linux-riscv64-musl": "4.55.2", - "@rollup/rollup-linux-s390x-gnu": "4.55.2", - "@rollup/rollup-linux-x64-gnu": "4.55.2", - "@rollup/rollup-linux-x64-musl": "4.55.2", - "@rollup/rollup-openbsd-x64": "4.55.2", - "@rollup/rollup-openharmony-arm64": "4.55.2", - "@rollup/rollup-win32-arm64-msvc": "4.55.2", - "@rollup/rollup-win32-ia32-msvc": "4.55.2", - "@rollup/rollup-win32-x64-gnu": "4.55.2", - "@rollup/rollup-win32-x64-msvc": "4.55.2", + "@rollup/rollup-android-arm-eabi": "4.59.0", + "@rollup/rollup-android-arm64": "4.59.0", + "@rollup/rollup-darwin-arm64": "4.59.0", + "@rollup/rollup-darwin-x64": "4.59.0", + "@rollup/rollup-freebsd-arm64": "4.59.0", + "@rollup/rollup-freebsd-x64": "4.59.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", + "@rollup/rollup-linux-arm-musleabihf": "4.59.0", + "@rollup/rollup-linux-arm64-gnu": "4.59.0", + "@rollup/rollup-linux-arm64-musl": "4.59.0", + "@rollup/rollup-linux-loong64-gnu": "4.59.0", + "@rollup/rollup-linux-loong64-musl": "4.59.0", + "@rollup/rollup-linux-ppc64-gnu": "4.59.0", + "@rollup/rollup-linux-ppc64-musl": "4.59.0", + "@rollup/rollup-linux-riscv64-gnu": "4.59.0", + "@rollup/rollup-linux-riscv64-musl": "4.59.0", + "@rollup/rollup-linux-s390x-gnu": "4.59.0", + "@rollup/rollup-linux-x64-gnu": "4.59.0", + "@rollup/rollup-linux-x64-musl": "4.59.0", + "@rollup/rollup-openbsd-x64": "4.59.0", + "@rollup/rollup-openharmony-arm64": "4.59.0", + "@rollup/rollup-win32-arm64-msvc": "4.59.0", + "@rollup/rollup-win32-ia32-msvc": "4.59.0", + "@rollup/rollup-win32-x64-gnu": "4.59.0", + "@rollup/rollup-win32-x64-msvc": "4.59.0", "fsevents": "~2.3.2" } }, diff --git a/frontend/package.json b/frontend/package.json index 7d7b197..0b74def 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -28,8 +28,8 @@ "devDependencies": { "@types/react": "^19.2.14", "@types/react-dom": "^19.2.3", - "@typescript-eslint/eslint-plugin": "^8.56.1", - "@typescript-eslint/parser": "^8.53.1", + "@typescript-eslint/eslint-plugin": "^8.57.0", + "@typescript-eslint/parser": "^8.57.0", "@vitejs/plugin-react": "^5.1.4", "autoprefixer": "^10.4.27", "eslint": "^9.39.2", From 37ffca034a80cb2537aa15f8e39c36c95c5e49d6 Mon Sep 17 00:00:00 2001 From: Seraph91P Date: Sun, 15 Mar 2026 14:50:00 +0000 Subject: [PATCH 122/122] ci: unify versioning with conventional commits auto-bump (#46) - Rewrite version determination to use highest tag across all branches - Auto-detect bump type from conventional commits (feat->minor, fix->patch, breaking->major) - Add copilot-instructions.md with commit message convention - Add .vscode/settings.json for commit message generation - Update .gitignore to track .vscode/settings.json --- .github/copilot-instructions.md | 87 +++++++++++++++++++++ .github/workflows/release.yml | 131 ++++++++++++++++++++++++-------- .gitignore | 3 +- .vscode/settings.json | 7 ++ 4 files changed, 197 insertions(+), 31 deletions(-) create mode 100644 .github/copilot-instructions.md create mode 100644 .vscode/settings.json diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 0000000..67e7d85 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,87 @@ +# Copilot Instructions for MediaCurator + +## Commit Message Convention + +This project uses **Conventional Commits** for automatic versioning and changelog generation. +Every commit message MUST follow this format: + +``` +(): + +[optional body] + +[optional footer(s)] +``` + +### Types + +| Type | Purpose | Version Impact | +|------|---------|----------------| +| `feat` | New feature or capability | **Minor** bump (0.X.0) | +| `fix` | Bug fix | **Patch** bump (0.0.X) | +| `docs` | Documentation only | Patch bump | +| `style` | Formatting, whitespace, no code change | Patch bump | +| `refactor` | Code restructuring, no behavior change | Patch bump | +| `perf` | Performance improvement | Patch bump | +| `test` | Adding or updating tests | Patch bump | +| `chore` | Build, tooling, dependencies | Patch bump | +| `ci` | CI/CD pipeline changes | Patch bump | + +### Breaking Changes → Major Bump + +A breaking change triggers a **Major** version bump (X.0.0). Mark it with either: + +- An `!` after the type/scope: `feat!: remove legacy API` +- A `BREAKING CHANGE:` footer in the commit body: + ``` + refactor(api): change auth endpoints + + BREAKING CHANGE: /auth/login now requires JSON body instead of form data + ``` + +### Scopes + +Use scopes to indicate the affected area. Common scopes for this project: + +- `api` – Backend API routes/endpoints +- `ui` – Frontend components/pages +- `auth` – Authentication/security +- `db` – Database/models/migrations +- `docker` – Docker/deployment +- `sync` – Media sync services (Radarr, Sonarr, Emby) +- `rules` – Cleanup rules engine +- `notifications` – Notification system +- `scheduler` – Background jobs/scheduler + +### Examples + +``` +feat(api): add library statistics endpoint +fix(ui): correct pagination on media list +refactor(sync): simplify Sonarr client error handling +docs: update README with Docker Compose examples +feat!: redesign rule evaluation engine +chore(deps): update FastAPI to 0.115 +ci: add ARM64 Docker build +perf(db): add index on media.last_played +``` + +### Rules + +- Type and description are **required** +- Scope is optional but encouraged +- Description must be lowercase, imperative mood ("add" not "added" or "adds") +- No period at the end of the description +- Body and footer are optional +- Use `!` or `BREAKING CHANGE:` only for genuinely incompatible changes + +## Language + +- Commit messages in **English** +- Code comments in **English** + +## Tech Stack + +- **Backend**: Python 3.12, FastAPI, SQLAlchemy, SQLite/PostgreSQL +- **Frontend**: React, TypeScript, Vite, Tailwind CSS +- **Deployment**: Docker, GitHub Actions diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4984dca..4682528 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -41,26 +41,99 @@ jobs: env: REF: ${{ github.ref }} run: | - # Get commit count for consistent versioning across branches - COMMIT_COUNT=$(git rev-list --count HEAD) - - if [[ "$REF" == refs/tags/vdev.* ]]; then - # Dev tag (e.g., vdev.0.0.103 -> dev.0.0.103) - VERSION=${GITHUB_REF#refs/tags/v} - IS_PRERELEASE=true - elif [[ "$REF" == refs/tags/v* ]]; then - # Stable tag (e.g., v1.2.3 -> 1.2.3) + if [[ "$REF" == refs/tags/v* ]]; then + # Manual tag push - use the tag version directly VERSION=${GITHUB_REF#refs/tags/v} - IS_PRERELEASE=false - elif [[ "$REF" == refs/heads/main ]]; then - # Main branch - stable release with same count as dev - VERSION="0.0.${COMMIT_COUNT}" - IS_PRERELEASE=false + if [[ "$VERSION" == *-dev* ]] || [[ "$VERSION" == dev.* ]]; then + IS_PRERELEASE=true + else + IS_PRERELEASE=false + fi else - # Develop branch - prerelease with dev prefix - VERSION="dev.0.0.${COMMIT_COUNT}" - IS_PRERELEASE=true + # Auto-version: find highest version across ALL tags (dev + stable) + # Extract numeric version from all tags: v0.0.236, vdev.0.0.235, v0.1.0-dev, etc. + HIGHEST_TAG="" + HIGHEST_MAJOR=0 + HIGHEST_MINOR=0 + HIGHEST_PATCH=0 + + for tag in $(git tag -l 'v*'); do + # Strip tag prefix: v0.0.236 -> 0.0.236, vdev.0.0.235 -> 0.0.235 + ver="$tag" + ver="${ver#v}" # remove leading v + ver="${ver#dev.}" # remove dev. prefix (old format) + ver="${ver%%-dev*}" # remove -dev* suffix (new format) + + M=$(echo "$ver" | cut -d. -f1) + m=$(echo "$ver" | cut -d. -f2) + P=$(echo "$ver" | cut -d. -f3) + + # Skip if not numeric + [[ "$M" =~ ^[0-9]+$ ]] || continue + [[ "$m" =~ ^[0-9]+$ ]] || continue + [[ "$P" =~ ^[0-9]+$ ]] || continue + + # Compare: is this tag's version higher than current highest? + if (( M > HIGHEST_MAJOR )) || \ + (( M == HIGHEST_MAJOR && m > HIGHEST_MINOR )) || \ + (( M == HIGHEST_MAJOR && m == HIGHEST_MINOR && P > HIGHEST_PATCH )); then + HIGHEST_MAJOR=$M + HIGHEST_MINOR=$m + HIGHEST_PATCH=$P + HIGHEST_TAG="$tag" + fi + done + + echo "Highest existing version: ${HIGHEST_MAJOR}.${HIGHEST_MINOR}.${HIGHEST_PATCH} (tag: ${HIGHEST_TAG:-none})" + + # Analyze commits since last tag for conventional commit bump type + BUMP="patch" + if [ -n "$HIGHEST_TAG" ]; then + COMMITS=$(git log "${HIGHEST_TAG}..HEAD" --pretty=format:"%s%n%b" 2>/dev/null || echo "") + else + COMMITS=$(git log --pretty=format:"%s%n%b" 2>/dev/null || echo "") + fi + + # Check for breaking changes -> major bump + if echo "$COMMITS" | grep -qiE '(^[a-z]+!:|BREAKING CHANGE)'; then + BUMP="major" + # Check for features -> minor bump + elif echo "$COMMITS" | grep -qiE '^feat(\(|:)'; then + BUMP="minor" + fi + + echo "Bump type: $BUMP" + + # Calculate next version + case "$BUMP" in + major) + NEXT_MAJOR=$((HIGHEST_MAJOR + 1)) + NEXT_MINOR=0 + NEXT_PATCH=0 + ;; + minor) + NEXT_MAJOR=$HIGHEST_MAJOR + NEXT_MINOR=$((HIGHEST_MINOR + 1)) + NEXT_PATCH=0 + ;; + patch) + NEXT_MAJOR=$HIGHEST_MAJOR + NEXT_MINOR=$HIGHEST_MINOR + NEXT_PATCH=$((HIGHEST_PATCH + 1)) + ;; + esac + + VERSION="${NEXT_MAJOR}.${NEXT_MINOR}.${NEXT_PATCH}" + + if [[ "$REF" == refs/heads/main ]]; then + IS_PRERELEASE=false + else + # Develop branch: append -dev suffix + VERSION="${VERSION}-dev" + IS_PRERELEASE=true + fi fi + echo "version=${VERSION}" >> $GITHUB_OUTPUT echo "is_prerelease=${IS_PRERELEASE}" >> $GITHUB_OUTPUT echo "Version: ${VERSION}, Prerelease: ${IS_PRERELEASE}" @@ -72,9 +145,9 @@ jobs: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} tags: | # For tagged releases - type=semver,pattern={{version}},enable=${{ startsWith(github.ref, 'refs/tags/v') && !startsWith(github.ref, 'refs/tags/vdev') }} - type=semver,pattern={{major}}.{{minor}},enable=${{ startsWith(github.ref, 'refs/tags/v') && !startsWith(github.ref, 'refs/tags/vdev') }} - type=semver,pattern={{major}},enable=${{ startsWith(github.ref, 'refs/tags/v') && !startsWith(github.ref, 'refs/tags/vdev') }} + type=semver,pattern={{version}},enable=${{ startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, 'dev') }} + type=semver,pattern={{major}}.{{minor}},enable=${{ startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, 'dev') }} + type=semver,pattern={{major}},enable=${{ startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, 'dev') }} # For main branch type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }} type=raw,value=stable,enable=${{ github.ref == 'refs/heads/main' }} @@ -153,17 +226,15 @@ jobs: CURRENT_VERSION="${{ needs.prepare.outputs.version }}" BRANCH="$REF_NAME" - # Find the last tag based on branch - # For develop: look for vdev.* tags only - # For main/tags: look for v* tags (excluding vdev.*) + # Find the last tag for changelog comparison if [[ "$BRANCH" == "develop" ]]; then - # Get all dev tags, sort by version number, take the last one - LAST_TAG=$(git tag -l 'vdev.*' --sort=-version:refname | head -n1 || echo "") - echo "Looking for dev tags only" + # Get last dev tag (old vdev.* or new v*-dev format), sorted by creation date + LAST_TAG=$(git tag -l --sort=-creatordate | grep -E '(^vdev\.|.*-dev$)' | head -n1 || echo "") + echo "Looking for dev tags" else - # Get stable tags (v* but not vdev.*), sort by version, take the last one - LAST_TAG=$(git tag -l 'v*' --sort=-version:refname | grep -v '^vdev\.' | head -n1 || echo "") - echo "Looking for stable tags only" + # Get last stable tag (exclude all dev tags) + LAST_TAG=$(git tag -l 'v*' --sort=-version:refname | grep -vE '(^vdev\.|.*-dev$)' | head -n1 || echo "") + echo "Looking for stable tags" fi echo "Branch: $BRANCH" @@ -293,7 +364,7 @@ jobs: tag_name: ${{ startsWith(github.ref, 'refs/tags/') && github.ref_name || format('v{0}', needs.prepare.outputs.version) }} # CRITICAL: Create tag on the current commit, not the default branch target_commitish: ${{ github.sha }} - name: ${{ github.ref_name == 'develop' && format('Development Build {0}', needs.prepare.outputs.version) || format('Release {0}', needs.prepare.outputs.version) }} + name: ${{ needs.prepare.outputs.is_prerelease == 'true' && format('Development Build {0}', needs.prepare.outputs.version) || format('Release {0}', needs.prepare.outputs.version) }} body_path: release_notes.md draft: false prerelease: ${{ needs.prepare.outputs.is_prerelease }} diff --git a/.gitignore b/.gitignore index a0a11b7..064417f 100644 --- a/.gitignore +++ b/.gitignore @@ -28,7 +28,8 @@ venv.bak/ # IDE .idea/ -.vscode/ +.vscode/* +!.vscode/settings.json *.swp *.swo *~ diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..3a1d789 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "github.copilot.chat.commitMessageGeneration.instructions": [ + { + "text": "Use Conventional Commits format: (): . Types: feat (minor bump), fix (patch bump), docs, style, refactor, perf, test, chore, ci. Use ! after type for breaking changes (major bump). Scopes: api, ui, auth, db, docker, sync, rules, notifications, scheduler. Description must be lowercase, imperative mood, no period at end. English only." + } + ] +}