diff --git a/dream-server/dream-cli b/dream-server/dream-cli index af8e4175..9c0196f8 100755 --- a/dream-server/dream-cli +++ b/dream-server/dream-cli @@ -752,6 +752,30 @@ cmd_audit() { python3 "$INSTALL_DIR/scripts/audit-extensions.py" --project-dir "$INSTALL_DIR" "${script_args[@]}" } +cmd_audit() { + check_install + sr_load + + local -a script_args=() + while [[ $# -gt 0 ]]; do + case "$1" in + --json|--strict) + script_args+=("$1") + ;; + --help|-h) + python3 "$INSTALL_DIR/scripts/audit-extensions.py" --help + return 0 + ;; + *) + script_args+=("$(resolve_service "$1")") + ;; + esac + shift + done + + python3 "$INSTALL_DIR/scripts/audit-extensions.py" --project-dir "$INSTALL_DIR" "${script_args[@]}" +} + #============================================================================= # Extension Management Commands #============================================================================= @@ -1524,7 +1548,7 @@ ${CYAN}Commands:${NC} chat "" Quick chat with the LLM benchmark Run a quick performance test doctor [report] Run diagnostics and write JSON report - audit [extensions] Audit extension manifests and compose contracts + audit [service] Audit extension manifests and feature contracts help Show this help ${CYAN}Preset Commands:${NC} @@ -1591,8 +1615,7 @@ ${CYAN}Examples:${NC} dream restart stt # Restart Whisper (via alias) dream chat "What is 2+2?" # Quick LLM test dream config edit # Edit .env file - dream audit # Audit every extension contract - dream audit --json whisper # Audit one service as JSON + dream audit --json whisper # Audit one extension as JSON ${CYAN}Environment:${NC} DREAM_HOME Installation directory (default: ~/dream-server) diff --git a/dream-server/scripts/audit-extensions.py b/dream-server/scripts/audit-extensions.py index c8cd5d32..9d1738ef 100644 --- a/dream-server/scripts/audit-extensions.py +++ b/dream-server/scripts/audit-extensions.py @@ -1,8 +1,5 @@ #!/usr/bin/env python3 -""" -Audit Dream Server extensions for manifest, compose, overlay, and feature -contract consistency. -""" +"""Audit Dream Server extension manifests for registry consistency.""" from __future__ import annotations @@ -16,10 +13,10 @@ import yaml +MANIFEST_NAMES = ("manifest.yaml", "manifest.yml", "manifest.json") VALID_CATEGORIES = {"core", "recommended", "optional"} VALID_TYPES = {"docker", "host-systemd"} VALID_GPU_BACKENDS = {"amd", "nvidia", "apple", "all", "none"} -MANIFEST_NAMES = ("manifest.yaml", "manifest.yml", "manifest.json") OVERLAY_SUFFIXES = { "amd": ("compose.amd.yaml", "compose.amd.yml"), "nvidia": ("compose.nvidia.yaml", "compose.nvidia.yml"), @@ -59,21 +56,14 @@ class ServiceRecord: service_type: str issues: list[Issue] = field(default_factory=list) - def add_issue( - self, - severity: str, - code: str, - message: str, - *, - path: Path | None = None, - ) -> None: + def add_issue(self, severity: str, code: str, message: str) -> None: self.issues.append( Issue( severity=severity, code=code, message=message, service=self.service_id, - path=str(path) if path else None, + path=str(self.manifest_path), ) ) @@ -86,22 +76,17 @@ def status(self) -> str: return "pass" -def parse_args() -> argparse.Namespace: - default_project = Path(__file__).resolve().parent.parent +def parse_args(argv: list[str]) -> argparse.Namespace: parser = argparse.ArgumentParser( - description="Audit Dream Server extension manifests and compose fragments." + description="Audit Dream Server extension manifests and feature contracts." ) parser.add_argument( "--project-dir", type=Path, - default=default_project, - help="Dream Server project directory (defaults to the repo root).", - ) - parser.add_argument( - "--json", - action="store_true", - help="Emit JSON instead of the human-readable report.", + default=Path(__file__).resolve().parent.parent, + help="Dream Server project directory (defaults to this repo).", ) + parser.add_argument("--json", action="store_true", help="Emit JSON output.") parser.add_argument( "--strict", action="store_true", @@ -110,9 +95,9 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "services", nargs="*", - help="Optional service IDs to audit. Defaults to all discovered services.", + help="Optional service IDs to audit. Defaults to all services.", ) - return parser.parse_args() + return parser.parse_args(argv) def load_document(path: Path) -> Any: @@ -130,6 +115,41 @@ def find_manifest(service_dir: Path) -> Path | None: return None +def as_list(value: Any) -> list[Any]: + if value is None: + return [] + if isinstance(value, list): + return value + return [value] + + +def as_string_list(value: Any) -> list[str]: + return [str(item) for item in as_list(value) if str(item)] + + +def parse_positive_int(value: Any) -> int | None: + if value in (None, "") or isinstance(value, bool): + return None + try: + parsed = int(value) + except (TypeError, ValueError): + return None + return parsed if parsed > 0 else None + + +def collect_service_references(feature: dict[str, Any]) -> list[str]: + refs: list[str] = [] + for path in FEATURE_SERVICE_KEYS: + target: Any = feature + for key in path: + if not isinstance(target, dict): + target = None + break + target = target.get(key) + refs.extend(as_string_list(target)) + return refs + + def resolve_compose_path(service_dir: Path, compose_file: str) -> tuple[Path | None, bool]: if not compose_file: return None, False @@ -145,21 +165,70 @@ def resolve_compose_path(service_dir: Path, compose_file: str) -> tuple[Path | N return enabled, False +def load_compose_service(path: Path, service_id: str) -> Any: + document = load_document(path) + if not isinstance(document, dict): + raise ValueError("compose root must be a mapping") + services = document.get("services", {}) + if not isinstance(services, dict): + raise ValueError("compose services block must be a mapping") + if services == {}: + return {} + return services.get(service_id) + + +def extract_target_ports(service_def: Any) -> list[int]: + if not isinstance(service_def, dict): + return [] + + results: list[int] = [] + for port in as_list(service_def.get("ports")): + if isinstance(port, int) and port > 0: + results.append(port) + continue + if isinstance(port, str): + tail = port.rsplit(":", 1)[-1].split("/", 1)[0] + try: + results.append(int(tail)) + except ValueError: + continue + continue + if isinstance(port, dict): + target = parse_positive_int(port.get("target")) + if target: + results.append(target) + return results + + +def ports_reference_env(service_def: Any, env_name: str) -> bool: + if not isinstance(service_def, dict) or not env_name: + return False + + needle = f"${{{env_name}" + for port in as_list(service_def.get("ports")): + if isinstance(port, str) and needle in port: + return True + if isinstance(port, dict): + published = port.get("published") + if isinstance(published, str) and env_name in published: + return True + return False + + def discover_services(project_dir: Path) -> tuple[list[ServiceRecord], list[Issue]]: ext_dir = project_dir / "extensions" / "services" records: list[ServiceRecord] = [] - global_issues: list[Issue] = [] + issues: list[Issue] = [] if not ext_dir.exists(): - global_issues.append( + return records, [ Issue( severity="error", code="extensions-dir-missing", message="extensions/services directory not found", path=str(ext_dir), ) - ) - return records, global_issues + ] for service_dir in sorted(ext_dir.iterdir()): if not service_dir.is_dir(): @@ -167,7 +236,7 @@ def discover_services(project_dir: Path) -> tuple[list[ServiceRecord], list[Issu manifest_path = find_manifest(service_dir) if manifest_path is None: - global_issues.append( + issues.append( Issue( severity="warning", code="manifest-missing", @@ -181,7 +250,7 @@ def discover_services(project_dir: Path) -> tuple[list[ServiceRecord], list[Issu try: manifest = load_document(manifest_path) except Exception as exc: - global_issues.append( + issues.append( Issue( severity="error", code="manifest-invalid", @@ -193,7 +262,7 @@ def discover_services(project_dir: Path) -> tuple[list[ServiceRecord], list[Issu continue if not isinstance(manifest, dict): - global_issues.append( + issues.append( Issue( severity="error", code="manifest-shape-invalid", @@ -206,7 +275,7 @@ def discover_services(project_dir: Path) -> tuple[list[ServiceRecord], list[Issu service = manifest.get("service") if not isinstance(service, dict): - global_issues.append( + issues.append( Issue( severity="error", code="service-section-missing", @@ -217,25 +286,33 @@ def discover_services(project_dir: Path) -> tuple[list[ServiceRecord], list[Issu ) continue - service_id = str(service.get("id") or service_dir.name) features = manifest.get("features") or [] if not isinstance(features, list): + issues.append( + Issue( + severity="warning", + code="features-invalid", + message="features should be a list", + service=service.get("id", service_dir.name), + path=str(manifest_path), + ) + ) features = [] compose_path, compose_enabled = resolve_compose_path( service_dir, str(service.get("compose_file") or "") ) - overlay_paths: dict[str, Path] = {} - for backend, names in OVERLAY_SUFFIXES.items(): - for name in names: - candidate = service_dir / name - if candidate.exists(): - overlay_paths[backend] = candidate - break + overlay_paths = { + backend: next( + (service_dir / name for name in names if (service_dir / name).exists()), + None, + ) + for backend, names in OVERLAY_SUFFIXES.items() + } records.append( ServiceRecord( - service_id=service_id, + service_id=str(service.get("id") or service_dir.name), directory_name=service_dir.name, directory=service_dir, manifest_path=manifest_path, @@ -250,160 +327,53 @@ def discover_services(project_dir: Path) -> tuple[list[ServiceRecord], list[Issu ) ) - return records, global_issues - - -def as_list(value: Any) -> list[Any]: - if value is None: - return [] - if isinstance(value, list): - return value - return [value] + records[-1].overlay_paths = { + backend: path + for backend, path in records[-1].overlay_paths.items() + if path is not None + } + return records, issues -def as_string_list(value: Any) -> list[str]: - return [str(item) for item in as_list(value) if str(item)] - - -def parse_positive_int(value: Any) -> int | None: - if value is None or value == "": - return None - if isinstance(value, bool): - return None - try: - integer = int(value) - except (TypeError, ValueError): - return None - return integer if integer > 0 else None - - -def collect_service_references(feature: dict[str, Any]) -> list[str]: - refs: list[str] = [] - for path in FEATURE_SERVICE_KEYS: - target: Any = feature - for key in path: - if not isinstance(target, dict): - target = None - break - target = target.get(key) - refs.extend(as_string_list(target)) - return refs - - -def load_compose_definitions(record: ServiceRecord) -> tuple[dict[str, Any], dict[str, Path]]: - definitions: dict[str, Any] = {} - source_paths: dict[str, Path] = {} - - def capture(label: str, path: Path) -> None: - try: - doc = load_document(path) - except Exception as exc: - record.add_issue("error", "compose-invalid", f"failed to parse compose file: {exc}", path=path) - return - - if doc is None: - doc = {} - if not isinstance(doc, dict): - record.add_issue("error", "compose-shape-invalid", "compose root must be a mapping", path=path) - return - services = doc.get("services", {}) - if not isinstance(services, dict): - record.add_issue("error", "compose-services-invalid", "compose services block must be a mapping", path=path) - return - - definitions[label] = services.get(record.service_id) - source_paths[label] = path - - if record.compose_path and record.compose_path.exists(): - capture("base", record.compose_path) - - for backend, path in record.overlay_paths.items(): - capture(backend, path) - - return definitions, source_paths +def filter_records(records: list[ServiceRecord], requested: list[str]) -> tuple[list[ServiceRecord], list[Issue]]: + if not requested: + return records, [] + selected = [record for record in records if record.service_id in set(requested)] + known = {record.service_id for record in records} + missing = [ + Issue( + severity="error", + code="service-not-found", + message=f"requested service '{service_id}' was not found", + service=service_id, + ) + for service_id in requested + if service_id not in known + ] + return selected, missing -def extract_target_ports(service_def: Any) -> list[int]: - if not isinstance(service_def, dict): - return [] - results: list[int] = [] - for port in as_list(service_def.get("ports")): - if isinstance(port, int): - if port > 0: - results.append(port) - continue +def validate_records(records: list[ServiceRecord], global_issues: list[Issue], reference_records: list[ServiceRecord]) -> None: + known_services = {record.service_id for record in reference_records} + alias_owners: dict[str, set[str]] = {} + feature_owners: dict[str, set[str]] = {} + service_id_owners: dict[str, set[str]] = {} - if isinstance(port, str): - tail = port.rsplit(":", 1)[-1] - tail = tail.split("/", 1)[0] - try: - results.append(int(tail)) - except ValueError: + for record in reference_records: + service_id_owners.setdefault(record.service_id, set()).add(record.directory_name) + for alias in as_string_list(record.service.get("aliases")): + alias_owners.setdefault(alias, set()).add(record.service_id) + for feature in record.features: + if not isinstance(feature, dict): continue - continue - - if isinstance(port, dict): - target = port.get("target") - target_int = parse_positive_int(target) - if target_int: - results.append(target_int) - return results - - -def ports_reference_env(service_def: Any, env_name: str) -> bool: - if not isinstance(service_def, dict) or not env_name: - return False - - needle = f"${{{env_name}" - for port in as_list(service_def.get("ports")): - if isinstance(port, str) and needle in port: - return True - if isinstance(port, dict): - published = port.get("published") - if isinstance(published, str) and env_name in published: - return True - return False - - -def service_has_runtime_definition(definitions: dict[str, Any]) -> bool: - for definition in definitions.values(): - if isinstance(definition, dict): - return True - return False - - -def base_is_stub(definitions: dict[str, Any]) -> bool: - base = definitions.get("base") - return base == {} or base is None - + feature_id = str(feature.get("id") or "") + if feature_id: + feature_owners.setdefault(feature_id, set()).add(record.service_id) -def validate_records( - records: list[ServiceRecord], - global_issues: list[Issue], - *, - reference_records: list[ServiceRecord] | None = None, -) -> None: - reference_records = reference_records or records - known_services = {record.service_id: record for record in reference_records} - selected_ids = set(known_services) - alias_owners: dict[str, set[str]] = {} - feature_owners: dict[str, set[str]] = {} - id_owners: dict[str, set[str]] = {} - - for ref in reference_records: - id_owners.setdefault(ref.service_id, set()).add(ref.directory_name) - for alias in as_string_list(ref.service.get("aliases")): - alias_owners.setdefault(alias, set()).add(ref.service_id) - for feature in ref.features: - if isinstance(feature, dict): - feature_id = str(feature.get("id") or "") - if feature_id: - feature_owners.setdefault(feature_id, set()).add(ref.service_id) - - for service_id, directories in sorted(id_owners.items()): - if len(directories) > 1: + for service_id, owners in service_id_owners.items(): + if len(owners) > 1: global_issues.append( Issue( severity="error", @@ -418,380 +388,179 @@ def validate_records( service = record.service if manifest.get("schema_version") != "dream.services.v1": - record.add_issue( - "error", - "schema-version-invalid", - "schema_version must be dream.services.v1", - path=record.manifest_path, - ) + record.add_issue("error", "schema-version-invalid", "schema_version must be dream.services.v1") if record.directory_name != record.service_id: - record.add_issue( - "error", - "service-id-directory-mismatch", - f"directory '{record.directory_name}' does not match service.id '{record.service_id}'", - path=record.manifest_path, - ) + record.add_issue("error", "service-id-directory-mismatch", "service.id must match its directory name") - name = str(service.get("name") or "").strip() - if not name: - record.add_issue("error", "service-name-missing", "service.name is required", path=record.manifest_path) + if not str(service.get("name") or "").strip(): + record.add_issue("error", "service-name-missing", "service.name is required") if record.category not in VALID_CATEGORIES: - record.add_issue( - "error", - "service-category-invalid", - f"service.category must be one of {sorted(VALID_CATEGORIES)}", - path=record.manifest_path, - ) + record.add_issue("error", "service-category-invalid", "service.category is invalid") if record.service_type not in VALID_TYPES: - record.add_issue( - "error", - "service-type-invalid", - f"service.type must be one of {sorted(VALID_TYPES)}", - path=record.manifest_path, - ) + record.add_issue("error", "service-type-invalid", "service.type is invalid") - port = parse_positive_int(service.get("port")) - if port is None: - record.add_issue("error", "service-port-invalid", "service.port must be a positive integer", path=record.manifest_path) + if parse_positive_int(service.get("port")) is None: + record.add_issue("error", "service-port-invalid", "service.port must be a positive integer") health = str(service.get("health") or "") if not health.startswith("/"): - record.add_issue( - "error", - "service-health-invalid", - "service.health must start with '/'", - path=record.manifest_path, - ) - - ext_port_default = service.get("external_port_default") - if ext_port_default not in (None, "") and parse_positive_int(ext_port_default) is None: - record.add_issue( - "error", - "service-external-port-invalid", - "service.external_port_default must be a positive integer when set", - path=record.manifest_path, - ) + record.add_issue("error", "service-health-invalid", "service.health must start with '/'") - external_port_env = str(service.get("external_port_env") or "") - if external_port_env and not external_port_env.replace("_", "").isalnum(): - record.add_issue( - "error", - "service-port-env-invalid", - "service.external_port_env must be shell-friendly", - path=record.manifest_path, - ) - - gpu_backends = as_string_list(service.get("gpu_backends") or ["amd", "nvidia"]) - invalid_backends = [backend for backend in gpu_backends if backend not in VALID_GPU_BACKENDS] + backends = as_string_list(service.get("gpu_backends") or ["amd", "nvidia"]) + invalid_backends = [backend for backend in backends if backend not in VALID_GPU_BACKENDS] if invalid_backends: - record.add_issue( - "error", - "service-gpu-backends-invalid", - f"unknown gpu_backends values: {', '.join(sorted(invalid_backends))}", - path=record.manifest_path, - ) + record.add_issue("error", "service-gpu-backends-invalid", f"unknown gpu_backends values: {', '.join(invalid_backends)}") - alias_list = as_string_list(service.get("aliases")) - seen_local_aliases: set[str] = set() - for alias in alias_list: - if alias in seen_local_aliases: - record.add_issue( - "error", - "alias-duplicate-local", - f"alias '{alias}' is listed more than once", - path=record.manifest_path, - ) + aliases = as_string_list(service.get("aliases")) + seen_aliases: set[str] = set() + for alias in aliases: + if alias in seen_aliases: + record.add_issue("error", "alias-duplicate-local", f"alias '{alias}' is listed more than once") continue - seen_local_aliases.add(alias) + seen_aliases.add(alias) owners = alias_owners.get(alias, set()) if owners - {record.service_id}: - record.add_issue( - "error", - "alias-collision", - f"alias '{alias}' already belongs to service '{sorted(owners - {record.service_id})[0]}'", - path=record.manifest_path, - ) - - for dep in as_string_list(service.get("depends_on")): - if dep not in selected_ids: - record.add_issue( - "error", - "dependency-missing", - f"depends_on references unknown service '{dep}'", - path=record.manifest_path, - ) + owner = sorted(owners - {record.service_id})[0] + record.add_issue("error", "alias-collision", f"alias '{alias}' already belongs to '{owner}'") env_vars = service.get("env_vars") if env_vars is not None and not isinstance(env_vars, list): - record.add_issue( - "error", - "env-vars-invalid", - "service.env_vars must be a list when present", - path=record.manifest_path, - ) - elif isinstance(env_vars, list): - for item in env_vars: - if not isinstance(item, dict) or not str(item.get("key") or "").strip(): - record.add_issue( - "error", - "env-var-entry-invalid", - "each service.env_vars entry must contain a non-empty key", - path=record.manifest_path, - ) + record.add_issue("error", "env-vars-invalid", "service.env_vars must be a list when present") + + if record.service_type == "docker": + compose_file = str(service.get("compose_file") or "") + if record.category != "core" and not compose_file: + record.add_issue("error", "compose-file-missing", "non-core docker services must declare service.compose_file") + elif compose_file and (record.compose_path is None or not record.compose_path.exists()): + record.add_issue("error", "compose-file-missing", f"compose file '{compose_file}' was not found") + elif record.compose_path and record.compose_path.exists(): + try: + base_service = load_compose_service(record.compose_path, record.service_id) + except Exception as exc: + record.add_issue("error", "compose-invalid", f"failed to parse compose file: {exc}") + base_service = None + + if record.category != "core" and base_service is None: + record.add_issue("error", "compose-service-missing", f"compose file does not define service '{record.service_id}'") + + backends_for_stub = [backend for backend in backends if backend in {"amd", "nvidia", "apple"}] + if base_service == {}: + for backend in backends_for_stub: + if backend not in record.overlay_paths: + record.add_issue("error", "overlay-required", f"stub compose requires compose.{backend}.yaml") + + definitions = [definition for definition in [base_service] if isinstance(definition, dict)] + for overlay_path in record.overlay_paths.values(): + try: + overlay_service = load_compose_service(overlay_path, record.service_id) + except Exception as exc: + record.add_issue("error", "overlay-invalid", f"failed to parse {overlay_path.name}: {exc}") + continue + if overlay_service is None: + record.add_issue("error", "overlay-service-missing", f"{overlay_path.name} does not define service '{record.service_id}'") + continue + definitions.append(overlay_service) + + container_name = str(service.get("container_name") or "") + if container_name and definitions: + if not any(definition.get("container_name") == container_name for definition in definitions): + record.add_issue("error", "container-name-mismatch", f"container_name '{container_name}' was not found in compose definitions") + + port = parse_positive_int(service.get("port")) + if port is not None and definitions: + if not any(port in extract_target_ports(definition) for definition in definitions): + record.add_issue("error", "compose-port-mismatch", f"no compose port mapping targets manifest service.port {port}") + + external_port_env = str(service.get("external_port_env") or "") + if external_port_env and definitions: + if not any(ports_reference_env(definition, external_port_env) for definition in definitions): + record.add_issue("warning", "compose-port-env-unused", f"compose ports do not reference '{external_port_env}'") + + if definitions and not any("healthcheck" in definition for definition in definitions): + record.add_issue("warning", "healthcheck-missing", "docker service has no healthcheck stanza in compose definitions") + + for dep in as_string_list(service.get("depends_on")): + if dep not in known_services: + record.add_issue("error", "dependency-missing", f"depends_on references unknown service '{dep}'") for feature in record.features: if not isinstance(feature, dict): - record.add_issue( - "error", - "feature-invalid", - "each feature entry must be a mapping", - path=record.manifest_path, - ) + record.add_issue("error", "feature-invalid", "each feature entry must be a mapping") continue for required in ("id", "name", "description", "category", "priority"): if feature.get(required) in (None, ""): - record.add_issue( - "error", - "feature-field-missing", - f"feature is missing required field '{required}'", - path=record.manifest_path, - ) + record.add_issue("error", "feature-field-missing", f"feature is missing required field '{required}'") feature_id = str(feature.get("id") or "") - if feature_id: - owners = feature_owners.get(feature_id, set()) - if owners - {record.service_id}: - record.add_issue( - "error", - "feature-id-collision", - f"feature id '{feature_id}' already belongs to service '{sorted(owners - {record.service_id})[0]}'", - path=record.manifest_path, - ) + owners = feature_owners.get(feature_id, set()) + if feature_id and owners - {record.service_id}: + owner = sorted(owners - {record.service_id})[0] + record.add_issue("error", "feature-id-collision", f"feature id '{feature_id}' already belongs to '{owner}'") for ref in collect_service_references(feature): - if ref not in selected_ids: - record.add_issue( - "error", - "feature-service-reference-invalid", - f"feature references unknown service '{ref}'", - path=record.manifest_path, - ) - - if record.service_type == "host-systemd" and service.get("compose_file"): - record.add_issue( - "warning", - "compose-file-unexpected", - "host-systemd service usually should not declare compose_file", - path=record.manifest_path, - ) + if ref not in known_services: + record.add_issue("error", "feature-service-reference-invalid", f"feature references unknown service '{ref}'") - if record.service_type != "docker": - continue - compose_file = str(service.get("compose_file") or "") - if record.category != "core" and not compose_file: - record.add_issue( - "error", - "compose-file-missing", - "non-core docker services must declare service.compose_file", - path=record.manifest_path, - ) - continue +def build_payload(project_dir: Path, records: list[ServiceRecord], global_issues: list[Issue], strict: bool, requested: list[str]) -> dict[str, Any]: + errors = sum(1 for issue in global_issues if issue.severity == "error") + warnings = sum(1 for issue in global_issues if issue.severity == "warning") - if compose_file and (record.compose_path is None or not record.compose_path.exists()): - record.add_issue( - "error", - "compose-file-missing", - f"compose file '{compose_file}' was not found (enabled or disabled)", - path=record.manifest_path, - ) - continue - - definitions, source_paths = load_compose_definitions(record) - if not service_has_runtime_definition(definitions): - if record.category != "core": - record.add_issue( - "error", - "compose-service-missing", - f"no compose definition found for service '{record.service_id}'", - path=record.compose_path or record.manifest_path, - ) - continue - - if base_is_stub(definitions): - for backend in gpu_backends: - if backend in {"all", "none", "apple"}: - continue - if backend not in record.overlay_paths: - record.add_issue( - "error", - "overlay-required", - f"stub compose requires compose.{backend}.yaml because gpu_backends includes '{backend}'", - path=record.compose_path or record.manifest_path, - ) - - for backend, overlay_path in record.overlay_paths.items(): - if gpu_backends and "all" not in gpu_backends and backend not in gpu_backends: - record.add_issue( - "warning", - "overlay-backend-extra", - f"{overlay_path.name} exists but service.gpu_backends does not include '{backend}'", - path=overlay_path, - ) - - container_name = str(service.get("container_name") or "") - if container_name: - matched = False - for label, definition in definitions.items(): - if isinstance(definition, dict) and definition.get("container_name") == container_name: - matched = True - break - if not matched and record.category != "core": - record.add_issue( - "error", - "container-name-mismatch", - f"container_name '{container_name}' was not found in compose definitions", - path=source_paths.get("base", record.manifest_path), - ) - - if port is not None: - port_matches = False - for definition in definitions.values(): - if port in extract_target_ports(definition): - port_matches = True - break - if not port_matches and record.category != "core": - record.add_issue( - "error", - "compose-port-mismatch", - f"no compose port mapping targets manifest service.port {port}", - path=source_paths.get("base", record.manifest_path), - ) - - if external_port_env: - env_ref_found = False - for definition in definitions.values(): - if ports_reference_env(definition, external_port_env): - env_ref_found = True - break - if not env_ref_found and record.category != "core": - record.add_issue( - "warning", - "compose-port-env-unused", - f"compose ports do not reference service.external_port_env '{external_port_env}'", - path=source_paths.get("base", record.manifest_path), - ) - - healthcheck_found = False - for definition in definitions.values(): - if isinstance(definition, dict) and "healthcheck" in definition: - healthcheck_found = True - break - if not healthcheck_found and record.category != "core": - record.add_issue( - "warning", - "healthcheck-missing", - "docker service has no healthcheck stanza in its compose definitions", - path=source_paths.get("base", record.manifest_path), - ) - -def filter_records(records: list[ServiceRecord], requested_services: list[str]) -> tuple[list[ServiceRecord], list[Issue]]: - if not requested_services: - return records, [] - - requested = set(requested_services) - available = {record.service_id for record in records} - missing = sorted(requested - available) - filtered = [record for record in records if record.service_id in requested] - issues = [ - Issue( - severity="error", - code="service-not-found", - message=f"requested service '{service_id}' was not found", - service=service_id, - ) - for service_id in missing - ] - return filtered, issues - - -def build_payload( - project_dir: Path, - records: list[ServiceRecord], - global_issues: list[Issue], - strict: bool, - requested_services: list[str], -) -> dict[str, Any]: - error_count = sum(1 for issue in global_issues if issue.severity == "error") - warning_count = sum(1 for issue in global_issues if issue.severity == "warning") - - service_items = [] + services = [] for record in records: - error_count += sum(1 for issue in record.issues if issue.severity == "error") - warning_count += sum(1 for issue in record.issues if issue.severity == "warning") - service_items.append( + errors += sum(1 for issue in record.issues if issue.severity == "error") + warnings += sum(1 for issue in record.issues if issue.severity == "warning") + services.append( { "service_id": record.service_id, - "directory": str(record.directory), "category": record.category, "type": record.service_type, - "compose_enabled": record.compose_enabled, "status": record.status, "issues": [asdict(issue) for issue in record.issues], } ) - failed = error_count > 0 or (strict and warning_count > 0) + result = "fail" if errors > 0 or (strict and warnings > 0) else "pass" return { "project_dir": str(project_dir), - "requested_services": requested_services, + "requested_services": requested, "summary": { "services_audited": len(records), - "errors": error_count, - "warnings": warning_count, + "errors": errors, + "warnings": warnings, "strict": strict, - "result": "fail" if failed else "pass", + "result": result, }, "global_issues": [asdict(issue) for issue in global_issues], - "services": service_items, + "services": services, } def print_human_report(payload: dict[str, Any]) -> None: summary = payload["summary"] - requested = payload["requested_services"] - print("Dream Server Extension Audit") print(f"Project: {payload['project_dir']}") - print( - f"Scope: {', '.join(requested)}" - if requested - else f"Scope: all extensions ({summary['services_audited']})" - ) + if payload["requested_services"]: + print(f"Scope: {', '.join(payload['requested_services'])}") + else: + print(f"Scope: all extensions ({summary['services_audited']})") print("") for issue in payload["global_issues"]: prefix = "ERROR" if issue["severity"] == "error" else "WARN" - location = f" [{issue['path']}]" if issue.get("path") else "" - print(f"{prefix} global {issue['code']}: {issue['message']}{location}") - + print(f"{prefix} global {issue['code']}: {issue['message']}") if payload["global_issues"]: print("") - for item in payload["services"]: - label = item["status"].upper() - print(f"{label:4} {item['service_id']} ({item['category']}, {item['type']})") - for issue in item["issues"]: + for service in payload["services"]: + print(f"{service['status'].upper():4} {service['service_id']} ({service['category']}, {service['type']})") + for issue in service["issues"]: prefix = "ERROR" if issue["severity"] == "error" else "WARN" - location = f" [{issue['path']}]" if issue.get("path") else "" - print(f" {prefix} {issue['code']}: {issue['message']}{location}") - - if not payload["services"] and not payload["global_issues"]: - print("No services matched the requested scope.") + print(f" {prefix} {issue['code']}: {issue['message']}") print("") print( @@ -803,22 +572,14 @@ def print_human_report(payload: dict[str, Any]) -> None: ) -def main() -> int: - args = parse_args() +def main(argv: list[str]) -> int: + args = parse_args(argv) project_dir = args.project_dir.resolve() - records, global_issues = discover_services(project_dir) - filtered_records, filter_issues = filter_records(records, args.services) - global_issues.extend(filter_issues) - validate_records(filtered_records, global_issues, reference_records=records) - - payload = build_payload( - project_dir=project_dir, - records=filtered_records, - global_issues=global_issues, - strict=args.strict, - requested_services=args.services, - ) + selected, missing = filter_records(records, args.services) + global_issues.extend(missing) + validate_records(selected, global_issues, records) + payload = build_payload(project_dir, selected, global_issues, args.strict, args.services) if args.json: json.dump(payload, sys.stdout, indent=2) @@ -826,13 +587,12 @@ def main() -> int: else: print_human_report(payload) - summary = payload["summary"] - if summary["errors"] > 0: + if payload["summary"]["errors"] > 0: return 1 - if args.strict and summary["warnings"] > 0: + if args.strict and payload["summary"]["warnings"] > 0: return 1 return 0 if __name__ == "__main__": - raise SystemExit(main()) + raise SystemExit(main(sys.argv[1:])) diff --git a/dream-server/scripts/extension-catalog.py b/dream-server/scripts/extension-catalog.py new file mode 100644 index 00000000..3f181c49 --- /dev/null +++ b/dream-server/scripts/extension-catalog.py @@ -0,0 +1,554 @@ +#!/usr/bin/env python3 +"""Export Dream Server extension metadata as JSON, Markdown, or NDJSON.""" + +from __future__ import annotations + +import argparse +import json +import sys +from collections import Counter +from dataclasses import asdict, dataclass, field +from pathlib import Path +from typing import Any + +import yaml + + +MANIFEST_NAMES = ("manifest.yaml", "manifest.yml", "manifest.json") +VALID_CATEGORIES = {"core", "recommended", "optional"} +VALID_TYPES = {"docker", "host-systemd"} +VALID_STATUSES = {"always-on", "enabled", "disabled", "missing"} + + +@dataclass +class CatalogIssue: + code: str + message: str + severity: str = "error" + service: str | None = None + path: str | None = None + + +@dataclass +class FeatureEntry: + id: str + name: str + category: str + priority: int + description: str = "" + + +@dataclass +class ServiceEntry: + id: str + name: str + category: str + type: str + status: str + aliases: list[str] + depends_on: list[str] + gpu_backends: list[str] + feature_count: int + path: str + compose_file: str + features: list[FeatureEntry] = field(default_factory=list) + + def to_dict(self, include_features: bool) -> dict[str, Any]: + payload: dict[str, Any] = { + "id": self.id, + "name": self.name, + "category": self.category, + "type": self.type, + "status": self.status, + "aliases": self.aliases, + "depends_on": self.depends_on, + "gpu_backends": self.gpu_backends, + "feature_count": self.feature_count, + "path": self.path, + "compose_file": self.compose_file, + } + if include_features: + payload["features"] = [asdict(feature) for feature in self.features] + return payload + + +def parse_args(argv: list[str] | None = None) -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Export a catalog of Dream Server extensions." + ) + parser.add_argument( + "--project-dir", + type=Path, + default=Path(__file__).resolve().parent.parent, + help="Dream Server project directory (defaults to this repo).", + ) + parser.add_argument( + "--format", + choices=("json", "markdown", "ndjson"), + default="json", + help="Output format (default: json).", + ) + parser.add_argument( + "--category", + choices=sorted(VALID_CATEGORIES), + help="Filter services by category.", + ) + parser.add_argument( + "--status", + choices=sorted(VALID_STATUSES), + help="Filter services by runtime status.", + ) + parser.add_argument( + "--service-type", + choices=sorted(VALID_TYPES), + help="Filter services by service.type.", + ) + parser.add_argument( + "--gpu-backend", + action="append", + default=[], + help="Filter services that include one or more gpu_backends values.", + ) + parser.add_argument( + "--service", + action="append", + default=[], + help="Include only specific service IDs (repeatable).", + ) + parser.add_argument( + "--include-features", + action="store_true", + help="Include full feature payload in JSON/NDJSON output.", + ) + parser.add_argument( + "--sort", + choices=("id", "name", "category", "status", "feature_count"), + default="id", + help="Sort key for services output.", + ) + parser.add_argument( + "--strict", + action="store_true", + help="Return non-zero if catalog issues are found.", + ) + parser.add_argument( + "--summary-only", + action="store_true", + help="Print only summary output.", + ) + parser.add_argument( + "--output", + type=Path, + help="Write output to file instead of stdout.", + ) + parser.add_argument( + "--compact", + action="store_true", + help="Compact JSON output (json/ndjson only).", + ) + return parser.parse_args(argv) + + +def load_document(path: Path) -> Any: + with path.open("r", encoding="utf-8") as handle: + if path.suffix == ".json": + return json.load(handle) + return yaml.safe_load(handle) + + +def find_manifest(service_dir: Path) -> Path | None: + for name in MANIFEST_NAMES: + candidate = service_dir / name + if candidate.exists(): + return candidate + return None + + +def as_string_list(value: Any) -> list[str]: + if value is None: + return [] + if isinstance(value, list): + return [str(item) for item in value if str(item)] + return [str(value)] + + +def as_int(value: Any, default: int = 0) -> int: + try: + if isinstance(value, bool): + return default + return int(value) + except (TypeError, ValueError): + return default + + +def service_status(service_dir: Path, compose_file: str) -> str: + if not compose_file: + return "always-on" + enabled = service_dir / compose_file + disabled = service_dir / f"{compose_file}.disabled" + if enabled.exists(): + return "enabled" + if disabled.exists(): + return "disabled" + return "missing" + + +def collect_features(document: dict[str, Any], issues: list[CatalogIssue], service_id: str, manifest_path: Path) -> list[FeatureEntry]: + raw_features = document.get("features") + if raw_features is None: + return [] + if not isinstance(raw_features, list): + issues.append( + CatalogIssue( + code="features-not-list", + message="features should be a list", + severity="warning", + service=service_id, + path=str(manifest_path), + ) + ) + return [] + + features: list[FeatureEntry] = [] + for idx, raw in enumerate(raw_features): + if not isinstance(raw, dict): + issues.append( + CatalogIssue( + code="feature-invalid", + message=f"feature[{idx}] is not an object", + severity="warning", + service=service_id, + path=str(manifest_path), + ) + ) + continue + feature_id = str(raw.get("id") or f"feature-{idx}") + features.append( + FeatureEntry( + id=feature_id, + name=str(raw.get("name") or feature_id), + category=str(raw.get("category") or "uncategorized"), + priority=as_int(raw.get("priority"), 0), + description=str(raw.get("description") or ""), + ) + ) + return features + + +def build_service_entry(service_dir: Path, manifest_path: Path, document: dict[str, Any], issues: list[CatalogIssue]) -> ServiceEntry | None: + service = document.get("service") + if not isinstance(service, dict): + issues.append( + CatalogIssue( + code="service-section-missing", + message="manifest missing service mapping", + service=service_dir.name, + path=str(manifest_path), + ) + ) + return None + + service_id = str(service.get("id") or service_dir.name) + category = str(service.get("category") or "optional") + service_type = str(service.get("type") or "docker") + compose_file = str(service.get("compose_file") or "") + aliases = as_string_list(service.get("aliases")) + depends_on = as_string_list(service.get("depends_on")) + gpu_backends = as_string_list(service.get("gpu_backends") or ["amd", "nvidia"]) + features = collect_features(document, issues, service_id, manifest_path) + + if document.get("schema_version") != "dream.services.v1": + issues.append( + CatalogIssue( + code="schema-version-invalid", + message="schema_version should be dream.services.v1", + service=service_id, + path=str(manifest_path), + ) + ) + + if service_dir.name != service_id: + issues.append( + CatalogIssue( + code="service-id-dir-mismatch", + message=f"service.id '{service_id}' differs from directory '{service_dir.name}'", + severity="warning", + service=service_id, + path=str(manifest_path), + ) + ) + + if category not in VALID_CATEGORIES: + issues.append( + CatalogIssue( + code="category-invalid", + message=f"unknown category '{category}'", + service=service_id, + path=str(manifest_path), + ) + ) + + if service_type not in VALID_TYPES: + issues.append( + CatalogIssue( + code="type-invalid", + message=f"unknown service.type '{service_type}'", + service=service_id, + path=str(manifest_path), + ) + ) + + return ServiceEntry( + id=service_id, + name=str(service.get("name") or service_id), + category=category, + type=service_type, + status=service_status(service_dir, compose_file), + aliases=aliases, + depends_on=depends_on, + gpu_backends=gpu_backends, + feature_count=len(features), + path=str(service_dir.relative_to(service_dir.parents[2])), + compose_file=compose_file, + features=features, + ) + + +def discover_services(project_dir: Path) -> tuple[list[ServiceEntry], list[CatalogIssue]]: + services_dir = project_dir / "extensions" / "services" + issues: list[CatalogIssue] = [] + entries: list[ServiceEntry] = [] + + if not services_dir.exists(): + issues.append( + CatalogIssue( + code="extensions-dir-missing", + message=f"missing directory: {services_dir}", + path=str(services_dir), + ) + ) + return entries, issues + + for service_dir in sorted(services_dir.iterdir()): + if not service_dir.is_dir(): + continue + + manifest_path = find_manifest(service_dir) + if manifest_path is None: + issues.append( + CatalogIssue( + code="manifest-missing", + message="service directory has no manifest file", + severity="warning", + service=service_dir.name, + path=str(service_dir), + ) + ) + continue + + try: + document = load_document(manifest_path) + except Exception as exc: + issues.append( + CatalogIssue( + code="manifest-parse-failed", + message=str(exc), + service=service_dir.name, + path=str(manifest_path), + ) + ) + continue + + if not isinstance(document, dict): + issues.append( + CatalogIssue( + code="manifest-root-invalid", + message="manifest root must be an object", + service=service_dir.name, + path=str(manifest_path), + ) + ) + continue + + entry = build_service_entry(service_dir, manifest_path, document, issues) + if entry is not None: + entries.append(entry) + + return entries, issues + + +def apply_filters( + entries: list[ServiceEntry], + *, + category: str | None, + status: str | None, + service_type: str | None, + gpu_backends: list[str], + service_ids: list[str], +) -> list[ServiceEntry]: + filtered = entries + if category: + filtered = [entry for entry in filtered if entry.category == category] + if status: + filtered = [entry for entry in filtered if entry.status == status] + if service_type: + filtered = [entry for entry in filtered if entry.type == service_type] + if gpu_backends: + required = set(gpu_backends) + filtered = [ + entry + for entry in filtered + if required.intersection(set(entry.gpu_backends)) + ] + if service_ids: + allowed = set(service_ids) + filtered = [entry for entry in filtered if entry.id in allowed] + return filtered + + +def sort_entries(entries: list[ServiceEntry], sort_key: str) -> list[ServiceEntry]: + if sort_key == "feature_count": + return sorted(entries, key=lambda item: (item.feature_count, item.id)) + if sort_key == "name": + return sorted(entries, key=lambda item: (item.name.lower(), item.id)) + if sort_key == "category": + return sorted(entries, key=lambda item: (item.category, item.id)) + if sort_key == "status": + return sorted(entries, key=lambda item: (item.status, item.id)) + return sorted(entries, key=lambda item: item.id) + + +def make_summary(entries: list[ServiceEntry], issues: list[CatalogIssue]) -> dict[str, Any]: + categories = Counter(entry.category for entry in entries) + statuses = Counter(entry.status for entry in entries) + service_types = Counter(entry.type for entry in entries) + feature_count = sum(entry.feature_count for entry in entries) + issue_counts = Counter(issue.severity for issue in issues) + + return { + "service_count": len(entries), + "feature_count": feature_count, + "categories": dict(sorted(categories.items())), + "statuses": dict(sorted(statuses.items())), + "types": dict(sorted(service_types.items())), + "issues": { + "total": len(issues), + "errors": issue_counts.get("error", 0), + "warnings": issue_counts.get("warning", 0), + }, + } + + +def build_payload(entries: list[ServiceEntry], issues: list[CatalogIssue], include_features: bool) -> dict[str, Any]: + return { + "summary": make_summary(entries, issues), + "issues": [asdict(issue) for issue in issues], + "services": [entry.to_dict(include_features=include_features) for entry in entries], + } + + +def render_markdown(payload: dict[str, Any]) -> str: + summary = payload["summary"] + lines = [ + "# Dream Server Extension Catalog", + "", + f"- Services: {summary['service_count']}", + f"- Features: {summary['feature_count']}", + f"- Categories: {json.dumps(summary['categories'], sort_keys=True)}", + f"- Statuses: {json.dumps(summary['statuses'], sort_keys=True)}", + "", + "| ID | Category | Status | Type | Features | GPU | Aliases | Depends On |", + "|---|---|---|---|---:|---|---|---|", + ] + + for service in payload["services"]: + aliases = ", ".join(service["aliases"]) or "-" + deps = ", ".join(service["depends_on"]) or "-" + backends = ", ".join(service["gpu_backends"]) or "-" + lines.append( + "| {id} | {category} | {status} | {type} | {feature_count} | {gpu} | {aliases} | {depends_on} |".format( + id=service["id"], + category=service["category"], + status=service["status"], + type=service["type"], + feature_count=service["feature_count"], + gpu=backends, + aliases=aliases, + depends_on=deps, + ) + ) + + if payload["issues"]: + lines.extend( + [ + "", + "## Catalog Issues", + "", + "| Severity | Code | Service | Message |", + "|---|---|---|---|", + ] + ) + for issue in payload["issues"]: + lines.append( + "| {severity} | {code} | {service} | {message} |".format( + severity=issue["severity"], + code=issue["code"], + service=issue.get("service") or "-", + message=str(issue["message"]).replace("|", "\\|"), + ) + ) + + return "\n".join(lines) + "\n" + + +def render_ndjson(payload: dict[str, Any], compact: bool) -> str: + separators = (",", ":") if compact else (",", ": ") + lines = [] + for service in payload["services"]: + lines.append(json.dumps(service, separators=separators)) + return "\n".join(lines) + ("\n" if lines else "") + + +def emit_output(text: str, output: Path | None) -> None: + if output is None: + sys.stdout.write(text) + return + output.parent.mkdir(parents=True, exist_ok=True) + output.write_text(text, encoding="utf-8") + + +def main(argv: list[str] | None = None) -> int: + args = parse_args(argv) + project_dir = args.project_dir.resolve() + + entries, issues = discover_services(project_dir) + entries = apply_filters( + entries, + category=args.category, + status=args.status, + service_type=args.service_type, + gpu_backends=args.gpu_backend, + service_ids=args.service, + ) + entries = sort_entries(entries, args.sort) + payload = build_payload(entries, issues, include_features=bool(args.include_features)) + + if args.summary_only: + content = json.dumps(payload["summary"], indent=None if args.compact else 2) + emit_output(content + "\n", args.output) + elif args.format == "markdown": + emit_output(render_markdown(payload), args.output) + elif args.format == "ndjson": + emit_output(render_ndjson(payload, compact=bool(args.compact)), args.output) + else: + indent = None if args.compact else 2 + separators = (",", ":") if args.compact else None + emit_output(json.dumps(payload, indent=indent, separators=separators) + "\n", args.output) + + if args.strict and payload["summary"]["issues"]["errors"] > 0: + return 2 + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/dream-server/tests/test-extension-audit.sh b/dream-server/tests/test-extension-audit.sh index 68809b32..964657cd 100644 --- a/dream-server/tests/test-extension-audit.sh +++ b/dream-server/tests/test-extension-audit.sh @@ -1,12 +1,5 @@ #!/bin/bash -# ============================================================================ -# Dream Server — Extension Audit Test Suite -# ============================================================================ -# Exercises scripts/audit-extensions.py against controlled fixture projects. -# -# Usage: bash tests/test-extension-audit.sh -# Exit 0 if all pass, 1 if any fail -# ============================================================================ +# Regression tests for scripts/audit-extensions.py set -euo pipefail @@ -14,32 +7,11 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" PROJECT_DIR="$(dirname "$SCRIPT_DIR")" AUDIT_SCRIPT="$PROJECT_DIR/scripts/audit-extensions.py" -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -CYAN='\033[0;36m' -BOLD='\033[1m' -NC='\033[0m' - PASS=0 FAIL=0 -pass() { - echo -e " ${GREEN}PASS${NC} $1" - PASS=$((PASS + 1)) -} - -fail() { - echo -e " ${RED}FAIL${NC} $1" - [[ -n "${2:-}" ]] && echo -e " ${RED}→ $2${NC}" - FAIL=$((FAIL + 1)) -} - -header() { - echo "" - echo -e "${BOLD}${CYAN}[$1/6]${NC} ${BOLD}$2${NC}" - echo -e "${CYAN}$(printf '%.0s─' {1..60})${NC}" -} +pass() { echo "PASS $1"; PASS=$((PASS + 1)); } +fail() { echo "FAIL $1"; FAIL=$((FAIL + 1)); } make_fixture_root() { local root @@ -57,7 +29,7 @@ write_service() { "$@" "$dir" } -service_core_llm() { +service_llama() { local dir="$1" cat > "$dir/manifest.yaml" <<'EOF' schema_version: dream.services.v1 @@ -78,7 +50,7 @@ service: EOF } -service_search_valid() { +service_search() { local dir="$1" cat > "$dir/manifest.yaml" <<'EOF' schema_version: dream.services.v1 @@ -101,7 +73,7 @@ service: features: - id: search-ui name: Search UI - description: Search the web privately + description: Private search category: productivity priority: 3 requirements: @@ -120,7 +92,7 @@ services: EOF } -service_image_valid() { +service_image_gen() { local dir="$1" cat > "$dir/manifest.yaml" <<'EOF' schema_version: dream.services.v1 @@ -165,191 +137,82 @@ services: EOF } -service_host_valid() { - local dir="$1" - cat > "$dir/manifest.yaml" <<'EOF' -schema_version: dream.services.v1 - -service: - id: opencode - name: OpenCode - aliases: [code] - container_name: "" - port: 3003 - external_port_default: 3003 - health: / - type: host-systemd - gpu_backends: [amd, nvidia] - category: optional - depends_on: [] -EOF -} - create_valid_project() { local root="$1" - write_service "$root" "llama-server" service_core_llm - write_service "$root" "search" service_search_valid - write_service "$root" "image-gen" service_image_valid - write_service "$root" "opencode" service_host_valid + write_service "$root" "llama-server" service_llama + write_service "$root" "search" service_search + write_service "$root" "image-gen" service_image_gen } run_audit() { python3 "$AUDIT_SCRIPT" --project-dir "$1" "${@:2}" } -assert_json_value() { +assert_json_expr() { local file="$1" local expr="$2" python3 - "$file" "$expr" <<'PY' import json import sys - payload = json.load(open(sys.argv[1], encoding="utf-8")) expr = sys.argv[2] value = eval(expr, {"payload": payload}) -if isinstance(value, bool): - raise SystemExit(0 if value else 1) -print(value) +raise SystemExit(0 if value else 1) PY } -header "1" "Valid Project Passes Cleanly" -root=$(make_fixture_root) -trap 'rm -rf "$root" "${root2:-}" "${root3:-}" "${root4:-}" "${root5:-}" "${root6:-}"' EXIT -create_valid_project "$root" -report=$(mktemp) -if run_audit "$root" --json > "$report"; then - pass "valid fixture audits successfully" -else - fail "valid fixture should pass" -fi -if assert_json_value "$report" "payload['summary']['result'] == 'pass'" >/dev/null; then - pass "valid fixture reports pass" -else - fail "valid fixture JSON did not report pass" -fi -if assert_json_value "$report" "payload['summary']['warnings'] == 0" >/dev/null; then - pass "valid fixture reports zero warnings" +ROOT_A=$(make_fixture_root) +ROOT_B=$(make_fixture_root) +ROOT_C=$(make_fixture_root) +ROOT_D=$(make_fixture_root) +trap 'rm -rf "$ROOT_A" "$ROOT_B" "$ROOT_C" "$ROOT_D"' EXIT + +create_valid_project "$ROOT_A" +if run_audit "$ROOT_A" --json > /tmp/ext-audit-a.json; then + pass "valid fixture passes" else - fail "valid fixture unexpectedly reported warnings" + fail "valid fixture passes" fi +assert_json_expr /tmp/ext-audit-a.json "payload['summary']['result'] == 'pass'" && pass "valid fixture reports pass" || fail "valid fixture reports pass" -header "2" "Missing Dependency Is Rejected" -root2=$(make_fixture_root) -create_valid_project "$root2" -python3 - "$root2/extensions/services/search/manifest.yaml" <<'PY' -import yaml -import sys +create_valid_project "$ROOT_B" +python3 - "$ROOT_B/extensions/services/search/manifest.yaml" <<'PY' +import yaml, sys path = sys.argv[1] doc = yaml.safe_load(open(path, encoding="utf-8")) doc["service"]["depends_on"] = ["missing-service"] -with open(path, "w", encoding="utf-8") as handle: - yaml.safe_dump(doc, handle, sort_keys=False) +yaml.safe_dump(doc, open(path, "w", encoding="utf-8"), sort_keys=False) PY -report2=$(mktemp) -if run_audit "$root2" --json > "$report2" 2>/dev/null; then +if run_audit "$ROOT_B" --json > /tmp/ext-audit-b.json 2>/dev/null; then fail "missing dependency should fail" else - pass "missing dependency fails audit" -fi -if assert_json_value "$report2" "any(issue['code'] == 'dependency-missing' for svc in payload['services'] for issue in svc['issues'])" >/dev/null; then - pass "missing dependency is reported with the right code" -else - fail "missing dependency code was not reported" + pass "missing dependency fails" fi +assert_json_expr /tmp/ext-audit-b.json "any(issue['code'] == 'dependency-missing' for svc in payload['services'] for issue in svc['issues'])" && pass "missing dependency is reported" || fail "missing dependency is reported" -header "3" "Alias Collisions Are Rejected" -root3=$(make_fixture_root) -create_valid_project "$root3" -python3 - "$root3/extensions/services/opencode/manifest.yaml" <<'PY' -import yaml -import sys -path = sys.argv[1] -doc = yaml.safe_load(open(path, encoding="utf-8")) -doc["service"]["aliases"] = ["search-ui"] -with open(path, "w", encoding="utf-8") as handle: - yaml.safe_dump(doc, handle, sort_keys=False) -PY -report3=$(mktemp) -if run_audit "$root3" --json > "$report3" 2>/dev/null; then - fail "alias collision should fail" -else - pass "alias collision fails audit" -fi -if assert_json_value "$report3" "any(issue['code'] == 'alias-collision' for svc in payload['services'] for issue in svc['issues'])" >/dev/null; then - pass "alias collision is reported" -else - fail "alias collision code was not reported" -fi - -header "4" "GPU Stub Requires Matching Overlays" -root4=$(make_fixture_root) -create_valid_project "$root4" -rm -f "$root4/extensions/services/image-gen/compose.nvidia.yaml" -report4=$(mktemp) -if run_audit "$root4" --json > "$report4" 2>/dev/null; then +create_valid_project "$ROOT_C" +rm -f "$ROOT_C/extensions/services/image-gen/compose.nvidia.yaml" +if run_audit "$ROOT_C" --json > /tmp/ext-audit-c.json 2>/dev/null; then fail "missing overlay should fail" else - pass "missing overlay fails audit" -fi -if assert_json_value "$report4" "any(issue['code'] == 'overlay-required' for svc in payload['services'] for issue in svc['issues'])" >/dev/null; then - pass "missing overlay is reported" -else - fail "missing overlay code was not reported" + pass "missing overlay fails" fi +assert_json_expr /tmp/ext-audit-c.json "any(issue['code'] == 'overlay-required' for svc in payload['services'] for issue in svc['issues'])" && pass "missing overlay is reported" || fail "missing overlay is reported" -header "5" "Compose Port Mismatch Is Rejected" -root5=$(make_fixture_root) -create_valid_project "$root5" -python3 - "$root5/extensions/services/search/compose.yaml" <<'PY' -import yaml -import sys +create_valid_project "$ROOT_D" +python3 - "$ROOT_D/extensions/services/search/compose.yaml" <<'PY' +import yaml, sys path = sys.argv[1] doc = yaml.safe_load(open(path, encoding="utf-8")) doc["services"]["search"]["ports"] = ["127.0.0.1:${SEARCH_PORT:-8888}:9090"] -with open(path, "w", encoding="utf-8") as handle: - yaml.safe_dump(doc, handle, sort_keys=False) +yaml.safe_dump(doc, open(path, "w", encoding="utf-8"), sort_keys=False) PY -report5=$(mktemp) -if run_audit "$root5" --json > "$report5" 2>/dev/null; then +if run_audit "$ROOT_D" --json > /tmp/ext-audit-d.json 2>/dev/null; then fail "port mismatch should fail" else - pass "port mismatch fails audit" -fi -if assert_json_value "$report5" "any(issue['code'] == 'compose-port-mismatch' for svc in payload['services'] for issue in svc['issues'])" >/dev/null; then - pass "port mismatch is reported" -else - fail "port mismatch code was not reported" -fi - -header "6" "Strict Mode Fails On Warnings" -root6=$(make_fixture_root) -create_valid_project "$root6" -cp "$root6/extensions/services/image-gen/compose.nvidia.yaml" \ - "$root6/extensions/services/image-gen/compose.apple.yaml" -report6=$(mktemp) -if run_audit "$root6" --json > "$report6"; then - pass "extra overlay only warns in normal mode" -else - fail "normal mode should tolerate warning-only fixture" + pass "port mismatch fails" fi -if assert_json_value "$report6" "payload['summary']['warnings'] > 0" >/dev/null; then - pass "warning count is reported" -else - fail "warning fixture did not report warnings" -fi -if run_audit "$root6" --strict >/dev/null 2>&1; then - fail "strict mode should fail on warnings" -else - pass "strict mode converts warnings into failure" -fi - -echo "" -echo -e "${BOLD}${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" -echo -e "${BOLD} Results: ${GREEN}$PASS passed${NC}, ${RED}$FAIL failed${NC}${BOLD}${NC}" -echo -e "${BOLD}${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" -echo "" +assert_json_expr /tmp/ext-audit-d.json "any(issue['code'] == 'compose-port-mismatch' for svc in payload['services'] for issue in svc['issues'])" && pass "port mismatch is reported" || fail "port mismatch is reported" -if [[ $FAIL -gt 0 ]]; then - exit 1 -fi +echo "Result: $PASS passed, $FAIL failed" +[[ "$FAIL" -eq 0 ]] diff --git a/dream-server/tests/test-extension-catalog.sh b/dream-server/tests/test-extension-catalog.sh new file mode 100644 index 00000000..d0d75816 --- /dev/null +++ b/dream-server/tests/test-extension-catalog.sh @@ -0,0 +1,185 @@ +#!/bin/bash +# Regression tests for scripts/extension-catalog.py + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(dirname "$SCRIPT_DIR")" +CATALOG_SCRIPT="$PROJECT_DIR/scripts/extension-catalog.py" + +PASS=0 +FAIL=0 + +pass() { + echo "PASS $1" + PASS=$((PASS + 1)) +} + +fail() { + echo "FAIL $1" + FAIL=$((FAIL + 1)) +} + +run_expect() { + local expected_exit="$1" + local label="$2" + shift 2 + + set +e + "$@" >/tmp/dream-catalog-test.out 2>/tmp/dream-catalog-test.err + local exit_code=$? + set -e + + if [[ "$exit_code" -eq "$expected_exit" ]]; then + pass "$label" + else + fail "$label (expected $expected_exit, got $exit_code)" + sed -n '1,20p' /tmp/dream-catalog-test.err + fi +} + +assert_json_expr() { + local file="$1" + local expr="$2" + python3 - "$file" "$expr" <<'PY' +import json +import sys + +payload = json.load(open(sys.argv[1], encoding="utf-8")) +expr = sys.argv[2] +value = eval(expr, {"payload": payload}) +raise SystemExit(0 if value else 1) +PY +} + +[[ -f "$CATALOG_SCRIPT" ]] || { echo "missing $CATALOG_SCRIPT"; exit 1; } +python3 -m py_compile "$CATALOG_SCRIPT" +pass "extension-catalog.py compiles" + +run_expect 0 "--help exits 0" python3 "$CATALOG_SCRIPT" --help + +run_expect 0 "default JSON output succeeds" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" >/tmp/dream-catalog.json +if assert_json_expr /tmp/dream-catalog.json "payload['summary']['service_count'] > 0"; then + pass "default payload has services" +else + fail "default payload has services" +fi + +run_expect 0 "category filter works" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --category core +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --category core >/tmp/dream-catalog-core.json +if assert_json_expr /tmp/dream-catalog-core.json "all(s['category'] == 'core' for s in payload['services'])"; then + pass "category filter only returns core" +else + fail "category filter only returns core" +fi + +run_expect 0 "status filter works" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --status enabled +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --status enabled >/tmp/dream-catalog-enabled.json +if assert_json_expr /tmp/dream-catalog-enabled.json "all(s['status'] == 'enabled' for s in payload['services'])"; then + pass "status filter only returns enabled" +else + fail "status filter only returns enabled" +fi + +run_expect 0 "service filter works" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --service whisper +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --service whisper >/tmp/dream-catalog-whisper.json +if assert_json_expr /tmp/dream-catalog-whisper.json "payload['summary']['service_count'] == 1 and payload['services'][0]['id'] == 'whisper'"; then + pass "service filter returns whisper" +else + fail "service filter returns whisper" +fi + +run_expect 0 "gpu backend filter works" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --gpu-backend amd +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --gpu-backend amd >/tmp/dream-catalog-amd.json +if assert_json_expr /tmp/dream-catalog-amd.json "all('amd' in s['gpu_backends'] for s in payload['services'])"; then + pass "gpu filter includes only amd-capable services" +else + fail "gpu filter includes only amd-capable services" +fi + +run_expect 0 "include-features adds feature payload" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --service whisper --include-features +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --service whisper --include-features >/tmp/dream-catalog-features.json +if assert_json_expr /tmp/dream-catalog-features.json "'features' in payload['services'][0]"; then + pass "include-features returns features list" +else + fail "include-features returns features list" +fi + +run_expect 0 "summary-only JSON works" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --summary-only +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --summary-only >/tmp/dream-catalog-summary.json +if assert_json_expr /tmp/dream-catalog-summary.json "'service_count' in payload and 'categories' in payload"; then + pass "summary-only has expected keys" +else + fail "summary-only has expected keys" +fi + +run_expect 0 "markdown output works" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --format markdown +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --format markdown >/tmp/dream-catalog-markdown.txt +if grep -q "| ID | Category | Status | Type | Features | GPU | Aliases | Depends On |" /tmp/dream-catalog-markdown.txt; then + pass "markdown table header present" +else + fail "markdown table header present" +fi + +run_expect 0 "ndjson output works" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --format ndjson --service whisper +python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --format ndjson --service whisper >/tmp/dream-catalog.ndjson +if python3 - <<'PY' +import json +line = open("/tmp/dream-catalog.ndjson", encoding="utf-8").read().strip() +obj = json.loads(line) +raise SystemExit(0 if obj["id"] == "whisper" else 1) +PY +then + pass "ndjson emits valid object lines" +else + fail "ndjson emits valid object lines" +fi + +run_expect 0 "output file option writes content" \ + python3 "$CATALOG_SCRIPT" --project-dir "$PROJECT_DIR" --output /tmp/dream-catalog-output.json +if [[ -s /tmp/dream-catalog-output.json ]]; then + pass "output file created" +else + fail "output file created" +fi + +FIXTURE_ROOT=$(mktemp -d) +trap 'rm -rf "$FIXTURE_ROOT" /tmp/dream-catalog-test.out /tmp/dream-catalog-test.err /tmp/dream-catalog.json /tmp/dream-catalog-core.json /tmp/dream-catalog-enabled.json /tmp/dream-catalog-whisper.json /tmp/dream-catalog-amd.json /tmp/dream-catalog-features.json /tmp/dream-catalog-summary.json /tmp/dream-catalog-markdown.txt /tmp/dream-catalog.ndjson /tmp/dream-catalog-output.json /tmp/dream-catalog-fixture.json' EXIT + +mkdir -p "$FIXTURE_ROOT/extensions/services/bad-service" +cat > "$FIXTURE_ROOT/extensions/services/bad-service/manifest.yaml" <<'EOF' +schema_version: dream.services.v1 +service: + id: bad-service + name: Bad Service + category: invalid-category + type: docker + compose_file: compose.yaml +EOF +cat > "$FIXTURE_ROOT/extensions/services/bad-service/compose.yaml" <<'EOF' +services: + bad-service: + image: example/bad:latest +EOF + +run_expect 2 "strict mode fails on catalog issues" \ + python3 "$CATALOG_SCRIPT" --project-dir "$FIXTURE_ROOT" --strict +python3 "$CATALOG_SCRIPT" --project-dir "$FIXTURE_ROOT" >/tmp/dream-catalog-fixture.json +if assert_json_expr /tmp/dream-catalog-fixture.json "payload['summary']['issues']['errors'] >= 1"; then + pass "fixture reports catalog errors" +else + fail "fixture reports catalog errors" +fi + +echo "Result: $PASS passed, $FAIL failed" +[[ "$FAIL" -eq 0 ]]