diff --git a/.vscode/launch.json b/.vscode/launch.json index 689385c..aa02b9d 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -50,6 +50,8 @@ "4000" ], "envFile": "${workspaceFolder}/backend/.env", + "preLaunchTask": "docker:mongo:start", + "postDebugTask": "docker:mongo:stop", "console": "internalConsole", "justMyCode": true } diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 31cf3db..b13c61f 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -55,6 +55,18 @@ } } }, + { + "label": "docker:mongo:start", + "type": "shell", + "command": "docker compose -f ${workspaceFolder}/docker-compose.yml up -d mongo", + "problemMatcher": [] + }, + { + "label": "docker:mongo:stop", + "type": "shell", + "command": "docker compose -f ${workspaceFolder}/docker-compose.yml stop mongo", + "problemMatcher": [] + }, { "label": "mcp:install", "type": "shell", diff --git a/apps/web/src/App.jsx b/apps/web/src/App.jsx index ce0c89f..a84bfbf 100644 --- a/apps/web/src/App.jsx +++ b/apps/web/src/App.jsx @@ -4,6 +4,7 @@ import ImportModal from './components/ImportModal.jsx'; import UploadModal from './components/UploadModal.jsx'; import RepoModal from './components/RepoModal.jsx'; import LibrarySummary from './components/LibrarySummary.jsx'; +import RepoScans from './components/RepoScans.jsx'; import heroLogo from './assets/logo.png'; export default function App() { @@ -15,6 +16,7 @@ export default function App() { const [isMenuOpen, setIsMenuOpen] = useState(false); const [isUploadModalOpen, setIsUploadModalOpen] = useState(false); const [isRepoModalOpen, setIsRepoModalOpen] = useState(false); + const [activePage, setActivePage] = useState('home'); // home | repo-scans const menuRef = useRef(null); const loadLibraries = async () => { @@ -60,88 +62,114 @@ export default function App() { return (
- - LicenGuard - -
-

LicenGuard

-

License Intelligence Platform

+
+ + LicenGuard + +
+

LicenGuard

+

License Intelligence Platform

+
-
- -
-
- setQuery(event.target.value)} - /> -
-
+ + + + {activePage === 'home' && ( + <> +
+
+ setQuery(event.target.value)} + /> +
+
+ {isMenuOpen && ( +
+ + + +
+ )}
- )} -
-
- + + + + setIsImportModalOpen(false)} onImported={loadLibraries} /> + { + setIsUploadModalOpen(false); + loadLibraries(); + }} + onImported={loadLibraries} + /> + setIsRepoModalOpen(false)} + onImported={loadLibraries} + /> - setIsImportModalOpen(false)} onImported={loadLibraries} /> - { - setIsUploadModalOpen(false); - loadLibraries(); - }} - onImported={loadLibraries} - /> - setIsRepoModalOpen(false)} - onImported={loadLibraries} - /> + {loading &&

Loading libraries...

} + {error &&

{error}

} + + )} - {loading &&

Loading libraries...

} - {error &&

{error}

} + {activePage === 'repo-scans' && ( + + )}
); diff --git a/apps/web/src/api/client.js b/apps/web/src/api/client.js index 820ce8c..ee15a7f 100644 --- a/apps/web/src/api/client.js +++ b/apps/web/src/api/client.js @@ -68,3 +68,12 @@ export async function analyzeFileUpload(file) { } return res.json(); } + +export function fetchRepositoryScans({ limit = 100, query } = {}) { + const params = new URLSearchParams(); + if (limit) params.append('limit', String(limit)); + if (query) params.append('q', query); + const searchPath = query ? '/repository-scans/search' : '/repository-scans'; + const suffix = params.toString() ? `?${params.toString()}` : ''; + return request(`${searchPath}${suffix}`); +} diff --git a/apps/web/src/components/RepoScans.jsx b/apps/web/src/components/RepoScans.jsx new file mode 100644 index 0000000..6a9ead2 --- /dev/null +++ b/apps/web/src/components/RepoScans.jsx @@ -0,0 +1,147 @@ +import { useEffect, useState } from 'react'; +import { fetchRepositoryScans } from '../api/client.js'; + +export default function RepoScans() { + const [repoScans, setRepoScans] = useState([]); + const [repoLoading, setRepoLoading] = useState(false); + const [repoError, setRepoError] = useState(null); + const [repoQuery, setRepoQuery] = useState(''); + const [formatter] = useState(() => { + const tz = Intl.DateTimeFormat().resolvedOptions().timeZone; + return new Intl.DateTimeFormat(undefined, { + year: 'numeric', + month: '2-digit', + day: '2-digit', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + timeZone: tz + }); + }); + + const loadRepoScans = async (queryText = '') => { + try { + setRepoLoading(true); + const data = await fetchRepositoryScans({ query: queryText || undefined }); + setRepoScans(data || []); + setRepoError(null); + } catch (err) { + setRepoError(err.message); + } finally { + setRepoLoading(false); + } + }; + + useEffect(() => { + loadRepoScans(repoQuery); + }, [repoQuery]); + + const handleSearch = (e) => { + e?.preventDefault(); + loadRepoScans(repoQuery); + }; + + return ( +
+
+
+

Repository Taramaları

+

+ Repo linki ile yapılan taramaları buradan listeleyip, hangi repoda hangi kütüphane var takip edeceğiz. + Detaylar geldiğinde bu sayfayı genişleteceğiz. +

+
+
+
+
+ setRepoQuery(e.target.value)} + /> + +
+ {repoLoading && ( +
+
+

Yükleniyor...

+

Repo kayıtları getiriliyor.

+
+
+ )} + {repoError && ( +
+
+

Bir hata oluştu.

+

{repoError}

+
+
+ )} + {!repoLoading && !repoError && repoScans.length === 0 && ( +
+
+

Henüz kayıt yok.

+

İlk repo taraması geldiğinde burada görünecek.

+
+
+ )} + {!repoLoading && !repoError && repoScans.length > 0 && ( +
+ {repoScans.map(scan => { + const deps = scan.dependencies || []; + const repoLabel = scan.repository_name || scan.repository_url; + const lastUpdated = scan.updatedAt || scan.updated_at || scan.updated_at; // fallback just in case + return ( +
+
+
+ + {scan.repository_url ? ( + {repoLabel} + ) : repoLabel} + + {scan.repository_platform && {scan.repository_platform}} +
+ {lastUpdated && ( + + Son tarama: {formatter.format(new Date(lastUpdated))} + + )} +
+
+ {deps.length === 0 && ( +
+
+

Dependency dosyası bulunamadı.

+
+
+ )} + {deps.map((dep, idx) => ( +
+
{dep.library_path}
+
    + {(dep.libraries || []).map((lib, lidx) => ( +
  • + {lib.library_name} + versiyon {lib.library_version || 'unknown'} +
  • + ))} + {(dep.libraries || []).length === 0 && ( +
  • Kütüphane bulunamadı
  • + )} +
+
+ ))} +
+
+ ); + })} +
+ )} +
+
+ ); +} diff --git a/apps/web/src/styles.css b/apps/web/src/styles.css index 7e5bb84..75237ef 100644 --- a/apps/web/src/styles.css +++ b/apps/web/src/styles.css @@ -17,15 +17,19 @@ body { color: #0f172a; } -header { +.hero { + display: flex; + align-items: center; + gap: 1.5rem; + margin-bottom: 1.5rem; color: #e2e8f0; + justify-content: space-between; } -.hero { +.hero__brand { display: flex; align-items: center; gap: 1.5rem; - margin-bottom: 1.5rem; } .hero-logo { @@ -38,6 +42,27 @@ header { display: inline-flex; } +.hero-nav { + display: inline-flex; + gap: 0.4rem; +} + +.hero-nav__item { + border: 1px solid rgba(255, 255, 255, 0.1); + background: rgba(255, 255, 255, 0.04); + color: #e2e8f0; + padding: 0.55rem 0.9rem; + border-radius: 12px; + cursor: pointer; + transition: background 0.2s ease, color 0.2s ease, border-color 0.2s ease, box-shadow 0.2s ease; +} + +.hero-nav__item.is-active { + background: linear-gradient(135deg, #6d28d9, #a21caf); + border-color: transparent; + box-shadow: 0 15px 35px rgba(82, 0, 148, 0.35); +} + .panel { background: #ffffff; border-radius: 16px; @@ -199,6 +224,215 @@ header { border-radius: 0; } +.repo-scans { + display: flex; + flex-direction: column; + gap: 1rem; +} + +.repo-scans__header { + display: flex; + justify-content: space-between; + gap: 1rem; + align-items: center; + border-bottom: 1px solid #e2e8f0; + padding-bottom: 0.75rem; +} + +.repo-scans__header h2 { + margin: 0.15rem 0; +} + +.eyebrow { + font-size: 0.85rem; + letter-spacing: 0.05em; + text-transform: uppercase; + color: #94a3b8; + margin: 0; +} + +.muted { + color: #94a3b8; + margin: 0.1rem 0 0; +} + +.primary-btn { + padding: 0.7rem 1rem; + border-radius: 12px; + border: none; + background: linear-gradient(135deg, #6d28d9, #a21caf); + color: #fff; + font-weight: 700; + cursor: pointer; + box-shadow: 0 15px 35px rgba(82, 0, 148, 0.35); +} + +.repo-scans__table { + display: flex; + flex-direction: column; + gap: 0.5rem; +} + +.repo-scans__search { + margin-bottom: 0.5rem; + display: flex; + gap: 0.4rem; +} + +.repo-scans__search input { + width: 100%; + border-radius: 10px; + border: 1px solid rgba(255, 255, 255, 0.16); + padding: 0.65rem 0.9rem; + background: rgba(255, 255, 255, 0.04); + color: ##1b1e22; + box-shadow: 0 12px 24px rgba(15, 23, 42, 0.25); +} + +.repo-scans__search button { + border: none; + border-radius: 10px; + padding: 0.65rem 1rem; + background: linear-gradient(135deg, #6d28d9, #a21caf); + color: white; + font-weight: 700; + cursor: pointer; + box-shadow: 0 12px 24px rgba(82, 0, 148, 0.35); +} + +.repo-scans__search button:disabled { + opacity: 0.7; + cursor: not-allowed; +} + +.repo-scans__row { + display: grid; + grid-template-columns: 2fr 1fr 1fr 1fr; + gap: 0.75rem; + padding: 0.75rem 0.9rem; + border-radius: 12px; + align-items: center; +} + +.repo-scans__row--head { + background: #f8fafc; + color: #0f172a; + font-weight: 700; +} + +.repo-scans__row--empty { + background: linear-gradient(135deg, rgba(109, 40, 217, 0.08), rgba(162, 28, 175, 0.06)); + border: 1px dashed rgba(109, 40, 217, 0.3); +} + +.repo-scans__empty { + grid-column: 1 / -1; +} + +.repo-scans__list { + display: flex; + flex-direction: column; + gap: 1rem; +} + +.repo-scans__group { + border: 1px solid rgba(226, 232, 240, 0.6); + border-radius: 12px; + padding: 0.75rem 0.9rem; + background: #fff; + box-shadow: 0 10px 30px rgba(15, 23, 42, 0.08); +} + +.repo-scans__repo-heading { + display: flex; + align-items: center; + gap: 0.5rem; + margin-bottom: 0.5rem; + flex-wrap: wrap; + justify-content: space-between; +} + +.repo-scans__repo-meta { + display: inline-flex; + align-items: center; + gap: 0.5rem; +} +.repo-scans__repo-title a { + color: #1d4ed8; + text-decoration: none; + font-weight: 800; + font-size: 1.05rem; +} + +.repo-scans__repo-title a:hover { + text-decoration: underline; +} + +.repo-scans__repo-title { + font-weight: 800; + font-size: 1.05rem; +} + +.repo-scans__platform { + background: #e0f2fe; + color: #0369a1; + padding: 0.15rem 0.5rem; + border-radius: 8px; + font-size: 0.85rem; + font-weight: 700; +} + +.repo-scans__date { + color: #475569; + font-size: 0.9rem; +} + +.repo-scans__date--right { + margin-left: auto; +} + +.repo-scans__deps { + display: flex; + flex-direction: column; + gap: 0.6rem; +} + +.repo-scans__dep { + padding: 0.6rem 0.75rem; + border-radius: 10px; + background: #f8fafc; + border: 1px solid #e2e8f0; +} + +.repo-scans__path { + font-family: 'SFMono-Regular', Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace; + color: #0f172a; + font-weight: 700; + word-break: break-all; +} + +.repo-scans__libs { + margin: 0.3rem 0 0; + padding-left: 1.1rem; + display: flex; + flex-direction: column; + gap: 0.25rem; +} + +.repo-scans__libs li { + list-style: disc; + color: #1f2937; +} + +.repo-scans__lib-name { + font-weight: 700; +} + +.repo-scans__lib-version { + margin-left: 0.35rem; + color: #475569; +} + .summary-list { list-style: none; margin: 0; diff --git a/backend/app/controllers/repository_scan_controller.py b/backend/app/controllers/repository_scan_controller.py new file mode 100644 index 0000000..99ea7a0 --- /dev/null +++ b/backend/app/controllers/repository_scan_controller.py @@ -0,0 +1,126 @@ +from datetime import datetime +from typing import List +import re +from bson import ObjectId +from fastapi import HTTPException +from pymongo import ReturnDocument +from ..database import get_database +from ..models.repository_scan import ( + RepositoryScanCreate, + RepositoryScanDocument, + RepositoryScanUpdate +) + + +collection = get_database()['repository_scans'] + + +async def list_repository_scans(limit: int | None = None) -> List[RepositoryScanDocument]: + cursor = collection.find().sort('updatedAt', -1) + if limit and limit > 0: + cursor = cursor.limit(limit) + docs = [RepositoryScanDocument(**doc) async for doc in cursor] + return docs + + +async def get_repository_scan(scan_id: str) -> RepositoryScanDocument: + try: + oid = ObjectId(scan_id) + except Exception: + raise HTTPException(status_code=400, detail='Invalid scan id') + doc = await collection.find_one({'_id': oid}) + if not doc: + raise HTTPException(status_code=404, detail='Scan not found') + return RepositoryScanDocument(**doc) + + +async def create_repository_scan(payload: RepositoryScanCreate) -> RepositoryScanDocument: + data = payload.model_dump(by_alias=True) + now = datetime.utcnow() + data['createdAt'] = data.get('createdAt') or now + data['updatedAt'] = now + + query = {'repository_url': data.get('repository_url')} + if not query['repository_url']: + query = { + 'repository_platform': data.get('repository_platform'), + 'repository_name': data.get('repository_name') + } + + # Upsert: replace dependencies with latest scan and refresh metadata + doc = await collection.find_one_and_update( + query, + { + '$set': { + 'repository_url': data.get('repository_url'), + 'repository_platform': data.get('repository_platform'), + 'repository_name': data.get('repository_name'), + 'dependencies': data.get('dependencies', []), + 'updatedAt': data['updatedAt'], + }, + '$setOnInsert': {'createdAt': data.get('createdAt')}, + }, + upsert=True, + return_document=ReturnDocument.AFTER + ) + + # If upsert inserted, doc may be None in Motor <3.4; fetch explicitly + if doc is None: + doc = await collection.find_one(query) + return RepositoryScanDocument(**doc) + + +async def update_repository_scan(scan_id: str, payload: RepositoryScanUpdate) -> RepositoryScanDocument: + try: + oid = ObjectId(scan_id) + except Exception: + raise HTTPException(status_code=400, detail='Invalid scan id') + update_data = {k: v for k, v in payload.model_dump(by_alias=True).items() if v is not None} + if not update_data: + doc = await collection.find_one({'_id': oid}) + if not doc: + raise HTTPException(status_code=404, detail='Scan not found') + return RepositoryScanDocument(**doc) + update_data['updatedAt'] = datetime.utcnow() + doc = await collection.find_one_and_update( + {'_id': oid}, + {'$set': update_data}, + return_document=ReturnDocument.AFTER + ) + if not doc: + raise HTTPException(status_code=404, detail='Scan not found') + return RepositoryScanDocument(**doc) + + +async def search_repository_scans(query: str, limit: int | None = None) -> List[RepositoryScanDocument]: + regex = {'$regex': query, '$options': 'i'} + cursor = collection.find({ + '$or': [ + {'repository_url': regex}, + {'repository_name': regex}, + {'repository_platform': regex}, + {'dependencies.library_path': regex}, + {'dependencies.libraries.library_name': regex}, + {'dependencies.libraries.library_version': regex}, + ] + }).sort('updatedAt', -1) + if limit and limit > 0: + cursor = cursor.limit(limit) + docs = [RepositoryScanDocument(**doc) async for doc in cursor] + + # Filter dependencies/libraries to only the matching rows so search results are concise + pattern = re.compile(query, re.IGNORECASE) + filtered_docs: List[RepositoryScanDocument] = [] + for doc in docs: + filtered_deps = [] + for dep in doc.dependencies or []: + matched_libs = [ + lib for lib in dep.libraries or [] + if pattern.search(lib.library_name or '') or pattern.search(lib.library_version or '') + ] + if matched_libs: + filtered_deps.append(type(dep)(library_path=dep.library_path, libraries=matched_libs)) + if filtered_deps: + doc.dependencies = filtered_deps + filtered_docs.append(doc) + return filtered_docs diff --git a/backend/app/main.py b/backend/app/main.py index 5931ed9..66bd295 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -6,6 +6,7 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from .views.library_view import router as library_router +from .views.repository_scan_view import router as repository_scan_router from dotenv import load_dotenv import os from pathlib import Path @@ -35,6 +36,7 @@ async def health(): return {'status': 'ok'} app.include_router(library_router) + app.include_router(repository_scan_router) return app diff --git a/backend/app/models/repository_scan.py b/backend/app/models/repository_scan.py new file mode 100644 index 0000000..9b0fffe --- /dev/null +++ b/backend/app/models/repository_scan.py @@ -0,0 +1,49 @@ +from datetime import datetime +from typing import List, Optional +from bson import ObjectId +from pydantic import BaseModel, Field, ConfigDict +from .library import PyObjectId + + +class RepoLibrary(BaseModel): + library_name: str = Field(..., description='Discovered library name') + library_version: str = Field(..., description='Discovered library version') + model_config = ConfigDict(populate_by_name=True) + + +class RepoDependency(BaseModel): + library_path: str = Field(..., description='Path of the dependency manifest inside the repository') + libraries: List[RepoLibrary] = Field(default_factory=list, description='Libraries found in this file') + + +class RepositoryScanBase(BaseModel): + repository_url: str = Field(..., description='Full repository URL (e.g., GitHub repo URL)') + repository_platform: str = Field(..., description='Platform of the repository (github/gitlab/bitbucket/etc)') + repository_name: str = Field(..., description='Repository name') + dependencies: List[RepoDependency] = Field(default_factory=list, description='Grouped dependencies by file') + model_config = ConfigDict(populate_by_name=True) + + +class RepositoryScanCreate(RepositoryScanBase): + createdAt: Optional[datetime] = Field(default=None, description='Creation timestamp (optional)') + updatedAt: Optional[datetime] = Field(default=None, description='Last update timestamp (optional)') + + +class RepositoryScanUpdate(BaseModel): + repository_url: Optional[str] = None + repository_platform: Optional[str] = None + repository_name: Optional[str] = None + dependencies: Optional[List[RepoDependency]] = None + model_config = ConfigDict(populate_by_name=True) + + +class RepositoryScanDocument(RepositoryScanBase): + id: PyObjectId = Field(default_factory=PyObjectId, alias='_id') + createdAt: datetime = Field(default_factory=datetime.utcnow) + updatedAt: datetime = Field(default_factory=datetime.utcnow) + model_config = ConfigDict( + populate_by_name=True, + arbitrary_types_allowed=True, + json_encoders={ObjectId: str, PyObjectId: str}, + extra='allow' + ) diff --git a/backend/app/views/library_view.py b/backend/app/views/library_view.py index 7a69756..172bdcd 100644 --- a/backend/app/views/library_view.py +++ b/backend/app/views/library_view.py @@ -11,6 +11,7 @@ search_libraries_local, update_library ) +from ..controllers.repository_scan_controller import create_repository_scan from ..models.library import ( LibraryCreate, LibraryDocument, @@ -18,8 +19,10 @@ LibraryUpdate, VersionModel ) +from ..models.repository_scan import RepositoryScanCreate from ..services.mcp_client import get_mcp_http_client, MCPClientError from ..services.repo_scanner import clone_and_scan +from urllib.parse import urlparse router = APIRouter(prefix='/libraries', tags=['libraries']) @@ -75,6 +78,14 @@ def compute_risk_from_license(license_name=None, license_summary=None, confidenc return {'level': level, 'score': score, 'explanation': explanation} +def _infer_repo_meta(repo_url: str) -> tuple[str, str]: + parsed = urlparse(repo_url) + platform = (parsed.hostname or '').split('.')[-2] if parsed.hostname else 'unknown' + path_parts = (parsed.path or '').strip('/').split('/') + repo_name = '/'.join(path_parts[:2]) if len(path_parts) >= 2 else (path_parts[0] if path_parts else 'unknown') + return platform or 'unknown', repo_name or 'unknown' + + def pick_version_match(versions, target): if not versions: return None @@ -340,6 +351,33 @@ async def handle_repo_scan(payload: dict): raise HTTPException(status_code=503, detail='MCP HTTP client not configured') scan_data = await perform_repo_scan(repo_url, client) + # Persist summarized scan to repository_scans collection + platform, repo_name = _infer_repo_meta(repo_url) + try: + payload = RepositoryScanCreate( + repository_url=repo_url, + repository_platform=platform, + repository_name=repo_name, + dependencies=[ + { + "library_path": file.get("path"), + "libraries": [ + { + "library_name": dep.get("name"), + "library_version": normalize_version(dep.get("version")) or dep.get("version") or "unknown" + } + for dep in (file.get("report", {}).get("dependencies") or []) + if dep.get("name") + ], + } + for file in scan_data.get("analyzed_files", []) + ], + ) + await create_repository_scan(payload) + except Exception as exc: + # Do not block response; just log. + print(f'{datetime.utcnow().isoformat()} [repo_scan] failed to persist scan: {exc}') + return {"url": repo_url, "files": scan_data["analyzed_files"], "dependencies": scan_data["dependencies"]} @@ -362,6 +400,32 @@ async def handle_repo_scan_highest_risk(payload: dict): dependencies = scan_data["dependencies"] analyzed_files = scan_data["analyzed_files"] + # Persist summarized scan to repository_scans collection + platform, repo_name = _infer_repo_meta(repo_url) + try: + payload = RepositoryScanCreate( + repository_url=repo_url, + repository_platform=platform, + repository_name=repo_name, + dependencies=[ + { + "library_path": file.get("path"), + "libraries": [ + { + "library_name": dep.get("name"), + "library_version": normalize_version(dep.get("version")) or dep.get("version") or "unknown" + } + for dep in (file.get("report", {}).get("dependencies") or []) + if dep.get("name") + ], + } + for file in analyzed_files + ], + ) + await create_repository_scan(payload) + except Exception as exc: + print(f'{datetime.utcnow().isoformat()} [repo_scan_highest] failed to persist scan: {exc}') + deps_with_scores = [d for d in dependencies if d.get('risk_score') is not None] if deps_with_scores: global_top_score = max(d['risk_score'] for d in deps_with_scores if d.get('risk_score') is not None) diff --git a/backend/app/views/repository_scan_view.py b/backend/app/views/repository_scan_view.py new file mode 100644 index 0000000..30bebf3 --- /dev/null +++ b/backend/app/views/repository_scan_view.py @@ -0,0 +1,53 @@ +from typing import List +from fastapi import APIRouter, HTTPException, Query, Path +from ..controllers.repository_scan_controller import ( + create_repository_scan, + get_repository_scan, + list_repository_scans, + search_repository_scans, + update_repository_scan +) +from ..models.repository_scan import ( + RepositoryScanCreate, + RepositoryScanDocument, + RepositoryScanUpdate +) + + +router = APIRouter(prefix='/repository-scans', tags=['repository-scans']) + + +@router.get('/', response_model=List[RepositoryScanDocument]) +async def handle_list_repository_scans(limit: int | None = Query(default=None, gt=0)): + return await list_repository_scans(limit) + + +@router.get('/search', response_model=List[RepositoryScanDocument]) +async def handle_search_repository_scans( + q: str = Query(..., min_length=1, description='Search text'), + limit: int | None = Query(default=None, gt=0) +): + return await search_repository_scans(q, limit) + + +# Backward compatibility: keep /{scan_id} but hide from docs to avoid /search collision +@router.get('/{scan_id}', include_in_schema=False, response_model=RepositoryScanDocument) +async def handle_get_repository_scan_legacy(scan_id: str = Path(..., pattern=r'^[0-9a-fA-F]{24}$')): + return await get_repository_scan(scan_id) + + +@router.get('/id/{scan_id}', response_model=RepositoryScanDocument) +async def handle_get_repository_scan(scan_id: str = Path(..., pattern=r'^[0-9a-fA-F]{24}$')): + return await get_repository_scan(scan_id) + + +@router.post('/', response_model=RepositoryScanDocument, status_code=201) +async def handle_create_repository_scan(payload: RepositoryScanCreate): + return await create_repository_scan(payload) + + +@router.patch('/id/{scan_id}', response_model=RepositoryScanDocument) +async def handle_update_repository_scan(scan_id: str, payload: RepositoryScanUpdate): + if payload is None: + raise HTTPException(status_code=400, detail='Body required') + return await update_repository_scan(scan_id, payload)