diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..063ac29 --- /dev/null +++ b/.env.example @@ -0,0 +1,6 @@ +# Database Schema Spec Generator Configuration + +# Base URL for generated schema files +# This URL will be used in the generated schema files as the base for references +# Example: https://api.example.com/schemas +BASE_URL=https://example.com \ No newline at end of file diff --git a/.github/actions/setup/action.yaml b/.github/actions/setup/action.yaml new file mode 100644 index 0000000..119c03d --- /dev/null +++ b/.github/actions/setup/action.yaml @@ -0,0 +1,16 @@ +name: Setup UV and python +runs: + using: "composite" + steps: + # Setup Python + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version-file: ".python-version" # Use .python-version file to set Python version + + # Setup uv + - name: Setup uv + uses: astral-sh/setup-uv@v6 + with: + version: "0.8.0" # Specify the version of uv to use + enable-cache: true # Enable caching for uv dependencies diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..c213fcb --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,69 @@ +name: ci + +on: + pull_request: + branches: + - main # Run on every PR to main + - staging # Run on every PR to staging + - dev # Run on every PR to dev + - feature/* # Run on every PR to feature branches + workflow_call: # This makes it reusable in other workflows +jobs: + install_dependencies: + runs-on: ubuntu-latest + steps: + # Checkout code + - name: Checkout code + uses: actions/checkout@v4 + - uses: ./.github/actions/setup + # "Installs project dependencies failing if `uv.lock` is missing or out of sync" + - name: Install project dependencies + run: uv sync --dev --locked --all-extras + + linting: + runs-on: ubuntu-latest + needs: [install_dependencies] + steps: + # Checkout code + - name: Checkout code + uses: actions/checkout@v4 + - uses: ./.github/actions/setup + # Run linting check + - name: Run linting check + run: uv run ruff check . + + formatting: + runs-on: ubuntu-latest + needs: [install_dependencies] + steps: + # Checkout code + - name: Checkout code + uses: actions/checkout@v4 + - uses: ./.github/actions/setup + # Run formatting check + - name: Run formatting check + run: uv run ruff format --check . + + type_checking: + runs-on: ubuntu-latest + needs: [install_dependencies] + steps: + # Checkout code + - name: Checkout code + uses: actions/checkout@v4 + - uses: ./.github/actions/setup + # Run type checking + - name: Run type checking + run: uv run pyright . + + tests: + runs-on: ubuntu-latest + needs: [install_dependencies] + steps: + # Checkout code + - name: Checkout code + uses: actions/checkout@v4 + - uses: ./.github/actions/setup + # Run tests + - name: Run tests + run: uv run pytest diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..24f3c0f --- /dev/null +++ b/.gitignore @@ -0,0 +1,209 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[codz] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py.cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock +#poetry.toml + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. +# https://pdm-project.org/en/latest/usage/project/#working-with-version-control +#pdm.lock +#pdm.toml +.pdm-python +.pdm-build/ + +# pixi +# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. +#pixi.lock +# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one +# in the .venv directory. It is recommended not to include this directory in version control. +.pixi + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.envrc +.venv +env/ +venv/ +.venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Abstra +# Abstra is an AI-powered process automation framework. +# Ignore directories containing user credentials, local state, and settings. +# Learn more at https://abstra.io/docs +.abstra/ + +# Visual Studio Code +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore +# and can be added to the global gitignore or merged into this file. However, if you prefer, +# you could uncomment the following to ignore the entire vscode folder +# .vscode/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Marimo +marimo/_static/ +marimo/_lsp/ +__marimo__/ + +# Streamlit +.streamlit/secrets.toml + +# Project or developer specific files +AGENTS.md +.kilocode/ +output/ \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..53c8986 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,31 @@ +repos: + - repo: https://github.com/astral-sh/uv-pre-commit + rev: 0.8.0 + hooks: + # Update the uv lockfile + - id: uv-lock + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.12.5 + hooks: + # Run the linter with fixes (excluding import sorting) + - id: ruff + name: "ruff-lint" + args: [--fix, --exit-non-zero-on-fix] + # Ignores I rules to separate linting from import sorting + + # Sort imports (matches `ruff check --select I --fix`) + # Selects only I (import sorting) rules and fixes them + - id: ruff + name: "ruff-sort-imports" + args: [--select, "I", --fix, --exit-non-zero-on-fix] + + # Format (matches `ruff format`) + - id: ruff-format + name: "ruff-format" + + - repo: https://github.com/RobertCraigie/pyright-python + rev: v1.1.403 + hooks: + - id: pyright + name: "pyright" \ No newline at end of file diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..24ee5b1 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/README.md b/README.md index ba4bde1..a7addd9 100644 --- a/README.md +++ b/README.md @@ -1,27 +1,154 @@ -# Database Schema Specification +# Database Schema Spec Generator -๐Ÿข **Standardized, modular JSON Schema specification** for database structure definition and validation. Designed for scalability, maintainability, and seamless integration with AI systems. +A Python package for generating unified JSON documentation files for database schemas by resolving JSON Schema references and handling oneOf variants. This tool processes modular database schema specifications and generates consolidated documentation for different database engines and versions. -## ๐Ÿ—๏ธ Architecture +## ๐Ÿš€ Quick Start -Our modular architecture prevents code duplication and enables effortless database version management: +### Prerequisites +- [uv](https://docs.astral.sh/uv/) package manager + +### Installation + +1. Clone the repository: + +```bash +git clone https://github.com/Bfloo-App/database-schema-spec.git +cd database-schema-spec ``` -specs.json # ๐ŸŽฏ Main orchestrator with $schema/$id -schemas/ -โ”œโ”€โ”€ base/ -โ”‚ โ”œโ”€โ”€ database.json # Database engine definitions -โ”‚ โ””โ”€โ”€ schema.json # Core schema structure -โ””โ”€โ”€ engines/ - โ””โ”€โ”€ postgresql/ - โ””โ”€โ”€ v15.0/ # Version-specific isolation - โ”œโ”€โ”€ schema.json # PostgreSQL 15.0 rules - โ””โ”€โ”€ components/ # Version-specific components - โ”œโ”€โ”€ table.json # Table definitions for v15.0 - โ”œโ”€โ”€ column.json # Column types for v15.0 - โ””โ”€โ”€ constraint.json # Constraints for v15.0 + +2. Install dependencies using uv: + +```bash +uv sync --frozen +``` + +3. Set up environment variables by creating a `.env` file: + +```bash +cp .env.example .env +# Edit .env and set BASE_URL to your desired URL +``` + +**Note:** The `BASE_URL` environment variable is **required**. The application will fail to start if it's not set. + +## ๐Ÿƒโ€โ™‚๏ธ Running the Application + +### Using uv (Recommended) + +```bash +# Run the schema generator +uv run main.py ``` -## FSD +### Using Python directly -- **FSD**: [Full Specification Document](https://www.notion.so/Database-Engines-Support-237bed96279c80ee85c1e69cf2abc42f) - Comprehensive guide to the database schema specification. +```bash +# Activate the virtual environment first +source .venv/bin/activate # On Unix/macOS +# or +.venv\Scripts\activate # On Windows + +# Then run +python main.py +``` + +## ๐Ÿ“ Project Structure + +``` +database-schema-spec/ +โ”œโ”€โ”€ main.py # Entry point +โ”œโ”€โ”€ .env # Environment configuration +โ”œโ”€โ”€ pyproject.toml # Project dependencies +โ”œโ”€โ”€ database_schema_spec/ # Main package +โ”‚ โ”œโ”€โ”€ cli/ # Command-line interface +โ”‚ โ”œโ”€โ”€ core/ # Core functionality +โ”‚ โ”‚ โ”œโ”€โ”€ config.py # Configuration management +โ”‚ โ”‚ โ”œโ”€โ”€ exceptions.py # Custom exceptions +โ”‚ โ”‚ โ””โ”€โ”€ schemas.py # Data models +โ”‚ โ”œโ”€โ”€ io/ # Input/output handling +โ”‚ โ”œโ”€โ”€ logger/ # Logging configuration +โ”‚ โ”œโ”€โ”€ resolution/ # Schema resolution logic +โ”‚ โ””โ”€โ”€ validation/ # Schema validation +โ”œโ”€โ”€ docs/ # Input schema files +โ”‚ โ”œโ”€โ”€ specs.json # Main schema file +โ”‚ โ””โ”€โ”€ schemas/ # Schema definitions +โ””โ”€โ”€ output/ # Generated output files + โ”œโ”€โ”€ vmap.json # Version mapping + โ””โ”€โ”€ postgresql/ # Database-specific outputs +``` + +## ๐Ÿงช Development + +### Running Tests + +```bash +# Run all tests: NOTE some default flags are already set on puproject.toml +uv run pytest + +# Run specific test file example +uv run pytest tests/test_integration.py + +``` + +### Code Quality + +```bash +# Lint code +uv run ruff check + +# Format code +uv run ruff format + +# Type checking +uv run pyright +``` + +### Pre-commit Hooks + +```bash +# Install pre-commit hooks +uv run pre-commit install + +# Run pre-commit *manually* on all files +# Once pre-commit is installed it should run everytime you attempt to commit changes on the changed files +uv run pre-commit run --all-files + +``` + +## ๐Ÿ“ Environment Variables + +| Variable | Required | Description | Example | +| ---------- | -------- | ----------------------------------- | --------------------------------- | +| `BASE_URL` | โœ… Yes | Base URL for generated schema files | `https://api.example.com/schemas` | + +## ๐Ÿ”ง Configuration + +The application can be configured through: + +1. **Environment Variables**: Set in `.env` file or system environment +2. **Configuration Constants**: Defined in `database_schema_spec/core/config.py` + +### Default Paths + +- **Input Directory**: `docs/` (contains source schema files) +- **Output Directory**: `output/` (generated files are written here) +- **Root Schema File**: `docs/specs.json` + +## ๐Ÿ“ค Output + +The generator creates: + +- **Unified Schema Files**: Consolidated schemas for each database variant +- **Version Map** (`vmap.json`): Mapping of available database versions +- **Database-Specific Directories**: Organized by engine and version + +Example output structure: + +``` +output/ +โ”œโ”€โ”€ vmap.json +โ””โ”€โ”€ postgresql/ + โ””โ”€โ”€ 15.0/ + โ””โ”€โ”€ unified_schema.json +``` diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..dfb2333 --- /dev/null +++ b/__init__.py @@ -0,0 +1 @@ +# Makes the project root a package for test discovery diff --git a/database_schema_spec/__init__.py b/database_schema_spec/__init__.py new file mode 100644 index 0000000..cb567d6 --- /dev/null +++ b/database_schema_spec/__init__.py @@ -0,0 +1,10 @@ +""" +Database Schema Spec Generator + +A Python package for generating unified JSON documentation files for database schemas +by resolving JSON Schema references and handling oneOf variants. +""" + +from database_schema_spec.cli.generator import SchemaGenerator + +__all__ = ["SchemaGenerator"] diff --git a/database_schema_spec/cli/__init__.py b/database_schema_spec/cli/__init__.py new file mode 100644 index 0000000..aef4126 --- /dev/null +++ b/database_schema_spec/cli/__init__.py @@ -0,0 +1,5 @@ +"""Command-line interface components.""" + +from database_schema_spec.cli.generator import SchemaGenerator + +__all__ = ["SchemaGenerator"] diff --git a/database_schema_spec/cli/generator.py b/database_schema_spec/cli/generator.py new file mode 100644 index 0000000..f152943 --- /dev/null +++ b/database_schema_spec/cli/generator.py @@ -0,0 +1,143 @@ +"""Main class that orchestrates the schema generation process.""" + +from __future__ import annotations + +import sys +from pathlib import Path + +from database_schema_spec.core.config import config +from database_schema_spec.core.exceptions import SchemaGenerationError, ValidationError +from database_schema_spec.core.schemas import DatabaseVariantSpec +from database_schema_spec.io.output_manager import OutputManager +from database_schema_spec.logger import logger, setup_logger +from database_schema_spec.resolution.conditional_merger import ConditionalMerger +from database_schema_spec.resolution.resolver import JSONRefResolver +from database_schema_spec.resolution.variant_extractor import VariantExtractor +from database_schema_spec.validation.schema_validator import SchemaValidator + + +class SchemaGenerator: + """Main class that orchestrates the schema generation process. + + This class coordinates all components to extract database variants, + resolve references, apply conditional logic, and generate final schemas. + """ + + def __init__( + self, docs_path: Path = config.docs_dir, output_path: Path = config.output_dir + ) -> None: + """Initialize the schema generator. + + Args: + docs_path: Path to documentation/schema files + output_path: Path for generated output files + """ + self.docs_path = docs_path + self.output_path = output_path + self.resolver = JSONRefResolver(docs_path) + self.variant_extractor = VariantExtractor(self.resolver) + self.output_manager = OutputManager(output_path) + self.validator = SchemaValidator() + + def run(self) -> None: + """Run the complete schema generation process. + + Raises: + SystemExit: If any critical error occurs during generation + """ + try: + setup_logger() + logger.info("Generating database schema specifications...") + generated_files = self.generate_all_variants() + logger.info( + "Generation completed successfully! Generated %d unified schema file(s).", + len(generated_files), + ) + except SchemaGenerationError as e: + logger.error("Schema generation error: %s", e, exc_info=True) + sys.exit(config.exit_codes.error_invalid_schema) + except FileNotFoundError as e: + logger.error("Missing required input file: %s", e, exc_info=True) + sys.exit(config.exit_codes.error_file_not_found) + except Exception: + logger.exception("Unexpected error occurred") + sys.exit(config.exit_codes.error_file_system) + + def run_for_testing(self) -> list[Path]: + """Run the complete schema generation process for testing. + + Unlike run(), this method raises exceptions instead of calling sys.exit(), + making it suitable for unit tests. + + Returns: + List of paths where schemas were written + + Raises: + SchemaGenerationError: If any critical error occurs during generation + FileNotFoundError: If required input files are missing + """ + logger.info("Generating database schema specifications...") + generated_files = self.generate_all_variants() + return generated_files + + def generate_all_variants(self) -> list[Path]: + """Generate unified schemas for all database variants. + + Returns: + List of paths where schemas were written + """ + # Create output directory structure + self.output_manager.create_output_structure() + + # Extract all database variants + variants = self.variant_extractor.extract_variants() + + # Generate schema for each variant + generated_files: list[Path] = [] + for variant in variants: + logger.info("Generating schema for %s", variant) + file_path = self.generate_variant(variant) + generated_files.append(file_path) + + # Generate version map after all variants are created + logger.info("Generating version map...") + vmap_path = self.output_manager.write_version_map(config.base_url) + generated_files.append(vmap_path) + logger.info("Version map written to: %s", vmap_path) + + return generated_files + + def generate_variant(self, variant: DatabaseVariantSpec) -> Path: + """Generate unified schema for a specific database variant. + + Args: + variant: Database variant to generate schema for + + Returns: + Path where the schema was written + """ + # Create a variant-aware resolver for this specific variant + variant_resolver = JSONRefResolver(self.docs_path, variant) + + # Create variant-aware conditional merger + variant_conditional_merger = ConditionalMerger(variant_resolver) + + # Load the root schema with variant-aware resolution + base_schema = variant_resolver.resolve_file(config.file_names.root_schema_file) + + # Apply conditional logic for this variant + unified_schema = variant_conditional_merger.apply_conditional_logic( + base_schema, variant + ) + + # Validate the resulting schema + validation_result = self.validator.validate_schema(unified_schema) + if not validation_result.is_valid: + raise ValidationError(validation_result.errors) + + # Write the schema to output file + output_path = self.output_manager.write_schema( + unified_schema, variant.engine, variant.version + ) + + return output_path diff --git a/database_schema_spec/core/__init__.py b/database_schema_spec/core/__init__.py new file mode 100644 index 0000000..0c322fa --- /dev/null +++ b/database_schema_spec/core/__init__.py @@ -0,0 +1,24 @@ +"""Core data models and shared types.""" + +from database_schema_spec.core.config import config +from database_schema_spec.core.exceptions import ( + CircularReferenceError, + ConfigurationError, + ReferenceResolutionError, + SchemaGenerationError, + ValidationError, + VariantExtractionError, +) +from database_schema_spec.core.schemas import DatabaseVariantSpec, ValidationResult + +__all__ = [ + "DatabaseVariantSpec", + "ValidationResult", + "SchemaGenerationError", + "ReferenceResolutionError", + "CircularReferenceError", + "ConfigurationError", + "VariantExtractionError", + "ValidationError", + "config", +] diff --git a/database_schema_spec/core/config.py b/database_schema_spec/core/config.py new file mode 100644 index 0000000..3978fff --- /dev/null +++ b/database_schema_spec/core/config.py @@ -0,0 +1,83 @@ +"""Configuration constants for the database schema spec generator.""" + +from pathlib import Path + +from pydantic import BaseModel, Field, ValidationError +from pydantic_settings import BaseSettings + +from .exceptions import ConfigurationError + + +class FileNamesConfig(BaseModel): + """Configuration for file names.""" + + root_schema_file: str = "specs.json" + database_schema_file: str = "schemas/base/database.json" + + +class JSONSchemaFieldsConfig(BaseModel): + """Configuration for JSON Schema field names.""" + + ref_field: str = "$ref" + oneof_field: str = "oneOf" + schema_field: str = "$schema" + id_field: str = "$id" + + +class ExitCodesConfig(BaseModel): + """Configuration for exit codes.""" + + success: int = 0 + error_file_not_found: int = 1 + error_invalid_schema: int = 2 + error_circular_reference: int = 3 + error_validation_failed: int = 4 + error_file_system: int = 5 + + +class Config(BaseSettings): + """Main configuration class for the database schema spec generator.""" + + # Directory paths + docs_dir: Path = Field( + default=Path("docs"), description="Path to documentation/schema files" + ) + output_dir: Path = Field( + default=Path("output"), description="Path for generated output files" + ) + + # Base URL for generated spec files (required from environment) + base_url: str = Field(..., description="Base URL for generated spec files") + + # Nested configurations + file_names: FileNamesConfig = Field(default_factory=FileNamesConfig) + json_schema_fields: JSONSchemaFieldsConfig = Field( + default_factory=JSONSchemaFieldsConfig + ) + exit_codes: ExitCodesConfig = Field(default_factory=ExitCodesConfig) + + model_config = { + "env_file": ".env", + "env_file_encoding": "utf-8", + "case_sensitive": False, + } + + def __init__(self, **data): + """Initialize config with custom error handling for missing required fields.""" + try: + super().__init__(**data) + except ValidationError as e: + # Only handle missing field errors, let other validation errors bubble up + for error in e.errors(): + if error["type"] == "missing": + field_name = error["loc"][0] if error["loc"] else "unknown" + # Convert field name to environment variable name format + env_var_name = str(field_name).upper() + raise ConfigurationError( + variable_name=env_var_name, + ) from e + # Re-raise the original ValidationError for non-missing errors + raise + + +config = Config() diff --git a/database_schema_spec/core/exceptions.py b/database_schema_spec/core/exceptions.py new file mode 100644 index 0000000..6c80ccb --- /dev/null +++ b/database_schema_spec/core/exceptions.py @@ -0,0 +1,83 @@ +"""Custom exception classes for the database schema spec generator.""" + +from __future__ import annotations + + +class SchemaGenerationError(Exception): + """Base exception for schema generation errors. + + All custom exceptions in the database schema spec generator inherit from this class. + """ + + pass + + +class ReferenceResolutionError(SchemaGenerationError): + """Error during JSON reference resolution. + + Raised when a JSON reference ($ref) cannot be resolved to a valid schema. + + Args: + ref_path: The reference path that failed to resolve + cause: The underlying exception that caused the resolution failure + """ + + def __init__(self, ref_path: str, cause: Exception) -> None: + self.ref_path = ref_path + self.cause = cause + super().__init__(f"Failed to resolve reference '{ref_path}': {cause}") + + +class CircularReferenceError(SchemaGenerationError): + """Error when circular reference detected. + + Raised when a circular dependency is detected in JSON references, + which would cause infinite recursion during resolution. + + Args: + reference_chain: List of references showing the circular dependency path + """ + + def __init__(self, reference_chain: list[str]) -> None: + self.reference_chain = reference_chain + super().__init__(f"Circular reference detected: {' -> '.join(reference_chain)}") + + +class VariantExtractionError(SchemaGenerationError): + """Error during database variant extraction. + + Raised when database-specific variants cannot be extracted from conditional schemas. + """ + + pass + + +class ValidationError(SchemaGenerationError): + """Error during schema validation. + + Raised when schema validation fails with one or more validation errors. + + Args: + errors: List of validation error messages + """ + + def __init__(self, errors: list[str]) -> None: + self.errors = errors + super().__init__(f"Schema validation failed: {'; '.join(errors)}") + + +class ConfigurationError(SchemaGenerationError): + """Error in application configuration. + + Raised when required configuration values are missing or invalid, + such as missing environment variables or invalid configuration settings. + + Args: + variable_name: The name of the configuration variable that caused the error + message: Optional custom error message + """ + + def __init__(self, variable_name: str) -> None: + self.variable_name = variable_name + message = f"Required configuration variable '{variable_name}' is not set" + super().__init__(message) diff --git a/database_schema_spec/core/schemas.py b/database_schema_spec/core/schemas.py new file mode 100644 index 0000000..56282bc --- /dev/null +++ b/database_schema_spec/core/schemas.py @@ -0,0 +1,74 @@ +"""Pydantic models for type-safe data validation and parsing.""" + +from __future__ import annotations + +from pydantic import BaseModel, Field, field_validator + + +class DatabaseVariantSpec(BaseModel): + """Pydantic model for database variant specifications. + + Provides type safety and validation for database engine and version data. + """ + + engine: str = Field(..., min_length=1, description="Database engine name") + version: str = Field(..., min_length=1, description="Database version") + engine_spec_path: str | None = Field( + None, description="Path to engine specification file" + ) + + @field_validator("engine") + @classmethod + def validate_engine(cls, v: str) -> str: + """Validate engine name format.""" + if not v.replace("_", "").replace("-", "").replace(" ", "").isalnum(): + raise ValueError( + "Engine name must contain only alphanumeric characters, hyphens, underscores, and spaces" + ) + return v + + @field_validator("version") + @classmethod + def validate_version(cls, v: str) -> str: + """Validate version format.""" + if not v.replace(".", "").replace("-", "").replace("_", "").isalnum(): + raise ValueError( + "Version must contain only alphanumeric characters, dots, hyphens, and underscores" + ) + return v + + def __str__(self) -> str: + """Return string representation in format 'engine version'.""" + return f"{self.engine} {self.version}" + + def output_path(self) -> str: + """Generate the output directory path for this variant. + + Returns: + Path in format 'engine/version' with lowercase engine name + """ + return f"{self.engine.lower()}/{self.version}" + + +class ValidationResult(BaseModel): + """Result of schema validation with type safety. + + Provides validated results for schema validation operations. + """ + + is_valid: bool = Field(..., description="Whether the schema passed validation") + errors: list[str] = Field( + default_factory=list, description="Validation error messages" + ) + warnings: list[str] = Field( + default_factory=list, description="Validation warning messages" + ) + + def add_error(self, message: str) -> None: + """Add an error message to the validation result.""" + self.errors.append(message) + self.is_valid = False + + def add_warning(self, message: str) -> None: + """Add a warning message to the validation result.""" + self.warnings.append(message) diff --git a/database_schema_spec/io/__init__.py b/database_schema_spec/io/__init__.py new file mode 100644 index 0000000..0290478 --- /dev/null +++ b/database_schema_spec/io/__init__.py @@ -0,0 +1,5 @@ +"""File I/O operations for schema output.""" + +from database_schema_spec.io.output_manager import OutputManager + +__all__ = ["OutputManager"] diff --git a/database_schema_spec/io/output_manager.py b/database_schema_spec/io/output_manager.py new file mode 100644 index 0000000..e15aa05 --- /dev/null +++ b/database_schema_spec/io/output_manager.py @@ -0,0 +1,160 @@ +"""File system operations for output generation.""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Any + +from database_schema_spec.core.config import config + + +class OutputManager: + """Manages file system operations for output generation. + + This class handles creating directory structures and writing + resolved schemas to the appropriate output locations. + """ + + def __init__(self, output_dir: Path = config.output_dir) -> None: + """Initialize the output manager. + + Args: + output_dir: Base directory for output files + """ + self.output_dir = output_dir + + def create_output_structure(self) -> None: + """Create the base output directory structure. + + Raises: + PermissionError: If unable to create directories + """ + try: + self.output_dir.mkdir(parents=True, exist_ok=True) + except Exception as e: + raise PermissionError( + f"Failed to create output directory {self.output_dir}: {e}" + ) from e + + def write_schema(self, schema: dict[str, Any], engine: str, version: str) -> Path: + """Write a resolved schema to the appropriate output file. + + Args: + schema: Fully resolved schema to write + engine: Database engine name + version: Database version + + Returns: + Path where the file was written + + Raises: + PermissionError: If unable to write file + """ + output_path = self._get_output_path(engine, version) + + try: + # Create directory structure if it doesn't exist + output_path.parent.mkdir(parents=True, exist_ok=True) + + # Write the schema to the file + with open(output_path, "w", encoding="utf-8") as f: + json.dump(schema, f, indent=2, ensure_ascii=False) + + return output_path + + except Exception as e: + raise PermissionError( + f"Failed to write schema to {output_path}: {e}" + ) from e + + def _get_output_path(self, engine: str, version: str) -> Path: + """Get the output path for a specific engine/version combination. + + Args: + engine: Database engine name + version: Database version + + Returns: + Path where the spec should be written + """ + return self.output_dir / engine.lower() / version / "spec.json" + + def _get_spec_url(self, engine: str, version: str, base_url: str = "") -> str: + """Get the URL for a specific engine/version spec file. + + Args: + engine: Database engine name + version: Database version + base_url: Base URL to prepend (optional) + + Returns: + URL pointing to the spec file + """ + relative_path = f"{engine.lower()}/{version}/spec.json" + if base_url: + return f"{base_url.rstrip('/')}/{relative_path}" + return relative_path + + def _generate_version_map(self, base_url: str = "") -> dict[str, dict[str, str]]: + """Generate a version map of all available engines and versions. + + Args: + base_url: Base URL to prepend to spec URLs (optional) + + Returns: + Dictionary mapping engines to versions to URLs + """ + version_map: dict[str, dict[str, str]] = {} + + if not self.output_dir.exists(): + return version_map + + # Iterate through all engine directories + for engine_dir in self.output_dir.iterdir(): + if engine_dir.is_dir(): + engine_name = engine_dir.name + version_map[engine_name] = {} + + # Iterate through all version directories for this engine + for version_dir in engine_dir.iterdir(): + if version_dir.is_dir(): + spec_file = version_dir / "spec.json" + if spec_file.exists(): + version_name = version_dir.name + spec_url = self._get_spec_url( + engine_name, version_name, base_url + ) + version_map[engine_name][version_name] = spec_url + + return version_map + + def write_version_map(self, base_url: str = "") -> Path: + """Write the version map to vmap.json in the output root. + + Args: + base_url: Base URL to prepend to spec URLs (optional) + + Returns: + Path where the vmap.json file was written + + Raises: + PermissionError: If unable to write file + """ + version_map = self._generate_version_map(base_url) + vmap_path = self.output_dir / "vmap.json" + + try: + # Ensure output directory exists + self.output_dir.mkdir(parents=True, exist_ok=True) + + # Write the version map to the file + with open(vmap_path, "w", encoding="utf-8") as f: + json.dump(version_map, f, indent=2, ensure_ascii=False) + + return vmap_path + + except Exception as e: + raise PermissionError( + f"Failed to write version map to {vmap_path}: {e}" + ) from e diff --git a/database_schema_spec/logger/__init__.py b/database_schema_spec/logger/__init__.py new file mode 100644 index 0000000..998d173 --- /dev/null +++ b/database_schema_spec/logger/__init__.py @@ -0,0 +1,17 @@ +"""Centralized logging configuration for the database schema specification tool. + +This module provides a configured logger instance that can be imported and used +throughout the application. The logger is configured with both console and file +handlers using settings from logging_config.json. + +Usage: + from database_schema_spec.logger import logger + + logger.info("This is an info message") + logger.error("This is an error message") + logger.debug("This is a debug message") +""" + +from .logger import logger, setup_logger + +__all__ = ["logger", "setup_logger"] diff --git a/database_schema_spec/logger/logger.py b/database_schema_spec/logger/logger.py new file mode 100644 index 0000000..ef60176 --- /dev/null +++ b/database_schema_spec/logger/logger.py @@ -0,0 +1,31 @@ +import atexit +import json +import logging +import logging.config +from pathlib import Path + +# Configure logging +logger = logging.getLogger("SchemaGenerator") + + +def setup_logger(): + config_file = Path(__file__).parent / "logging_config.json" + with open(config_file) as f: + config = json.load(f) + logging.config.dictConfig(config) + queue_handler = logging.getHandlerByName("queue_handler") + if queue_handler is not None and hasattr(queue_handler, "listener"): + # Type checker doesn't understand hasattr, so we access listener safely + listener = getattr(queue_handler, "listener", None) + if listener is not None: + listener.start() + atexit.register(listener.stop) + + +if __name__ == "__main__": + setup_logger() + # If this module is run directly, set up the logger + logger.debug("This is a debug message.") + logger.info("This is an info message.") + logger.warning("This is a warning message.") + logger.error("This is an error message.") diff --git a/database_schema_spec/logger/logging_config.json b/database_schema_spec/logger/logging_config.json new file mode 100644 index 0000000..7885643 --- /dev/null +++ b/database_schema_spec/logger/logging_config.json @@ -0,0 +1,34 @@ +{ + "version": 1, + "disable_existing_loggers": false, + + "formatters": { + "simple": { + "format": "%(levelname)s - %(message)s" + }, + "detailed": { + "format": "[%(levelname)s - %(module)s - L%(lineno)d] %(asctime)s: %(message)s", + "datefmt": "%Y-%m-%dT%H:%M:%S%z" + } + }, + + "handlers": { + "stderr": { + "class": "logging.StreamHandler", + "level": "INFO", + "formatter": "detailed", + "stream": "ext://sys.stderr" + }, + "queue_handler": { + "class": "logging.handlers.QueueHandler", + "handlers": ["stderr"], + "respect_handler_level": true + } + }, + "loggers": { + "root": { + "level": "INFO", + "handlers": ["queue_handler"] + } + } +} diff --git a/database_schema_spec/resolution/__init__.py b/database_schema_spec/resolution/__init__.py new file mode 100644 index 0000000..9b2ad6c --- /dev/null +++ b/database_schema_spec/resolution/__init__.py @@ -0,0 +1,7 @@ +"""JSON Schema reference resolution components.""" + +from database_schema_spec.resolution.conditional_merger import ConditionalMerger +from database_schema_spec.resolution.resolver import JSONRefResolver +from database_schema_spec.resolution.variant_extractor import VariantExtractor + +__all__ = ["JSONRefResolver", "VariantExtractor", "ConditionalMerger"] diff --git a/database_schema_spec/resolution/conditional_merger.py b/database_schema_spec/resolution/conditional_merger.py new file mode 100644 index 0000000..bd27fc4 --- /dev/null +++ b/database_schema_spec/resolution/conditional_merger.py @@ -0,0 +1,363 @@ +"""Conditional oneOf logic and engine-specific merging with validation.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from database_schema_spec.core.config import config +from database_schema_spec.core.exceptions import ValidationError +from database_schema_spec.core.schemas import DatabaseVariantSpec + +if TYPE_CHECKING: + from database_schema_spec.resolution.interfaces import IJSONRefResolver + + +class ConditionalMerger: + """Handles conditional oneOf logic and engine-specific merging with validation. + + This class processes oneOf conditional blocks in JSON schemas to resolve + them to database-specific configurations. It supports two formats: + 1. if/then conditional structure (specs.json style) + 2. Direct property constraints (database.json style) + """ + + def __init__(self, resolver: "IJSONRefResolver") -> None: + """Initialize the conditional merger. + + Args: + resolver: JSON reference resolver for handling nested references + """ + self.resolver = resolver + + def apply_conditional_logic( + self, base_schema: dict[str, Any], variant: DatabaseVariantSpec + ) -> dict[str, Any]: + """Apply conditional oneOf logic for a specific database variant. + + Args: + base_schema: The resolved base schema + variant: Database variant to apply conditions for + + Returns: + Schema with oneOf conditions resolved for the variant + + Raises: + ValidationError: If no matching condition found or multiple matches + """ + # Make a copy of the schema to avoid modifying the original + result_schema = dict(base_schema) + + # Check if this schema has oneOf conditions to process + oneof_data = result_schema.get(config.json_schema_fields.oneof_field) + if not oneof_data or not isinstance(oneof_data, list): + return result_schema + + # Track matching conditions + matching_conditions = [] + + # First pass: find all matching conditions + for condition in oneof_data: + if not isinstance(condition, dict): + continue + + # Check if this condition matches our variant + if self._matches_variant_condition(condition, variant): + matching_conditions.append(condition) + + # Validate that exactly one condition matched + if len(matching_conditions) == 0: + supported_variants = self._get_supported_variants(base_schema) + raise ValidationError( + [ + f"No matching oneOf condition found for {variant.engine} {variant.version}. " + f"Supported variants: {', '.join(supported_variants)}" + ] + ) + elif len(matching_conditions) > 1: + raise ValidationError( + [ + f"Multiple matching conditions found for {variant.engine} {variant.version}. " + f"oneOf conditions should be mutually exclusive." + ] + ) + + # Apply the single matching condition + condition = matching_conditions[0] + merged_schema = self._merge_condition_schema(result_schema, condition, variant) + result_schema = merged_schema + + # Remove the oneOf block since we've resolved it + if config.json_schema_fields.oneof_field in result_schema: + del result_schema[config.json_schema_fields.oneof_field] + + return result_schema + + def _matches_variant_condition( + self, condition: dict[str, Any], variant: DatabaseVariantSpec + ) -> bool: + """Check if a oneOf condition matches the given database variant.""" + if self._is_if_then_condition(condition): + return self._matches_if_then_condition(condition, variant) + if self._is_direct_properties_condition(condition): + return self._matches_direct_properties_condition(condition, variant) + return False + + def _is_if_then_condition(self, condition: dict[str, Any]) -> bool: + return "if" in condition and isinstance(condition["if"], dict) + + def _matches_if_then_condition( + self, condition: dict[str, Any], variant: DatabaseVariantSpec + ) -> bool: + if_condition = condition["if"] + return self._check_if_condition_match(if_condition, variant) + + def _is_direct_properties_condition(self, condition: dict[str, Any]) -> bool: + return "properties" in condition and isinstance(condition["properties"], dict) + + def _matches_direct_properties_condition( + self, condition: dict[str, Any], variant: DatabaseVariantSpec + ) -> bool: + properties = condition["properties"] + return self._check_properties_match(properties, variant) + + def _check_if_condition_match( + self, if_condition: dict[str, Any], variant: DatabaseVariantSpec + ) -> bool: + """Check if an 'if' condition matches the variant. + + Args: + if_condition: The if condition to evaluate + variant: Database variant to match against + + Returns: + True if condition matches, False otherwise + """ + # Look for database properties in the if condition + if "properties" not in if_condition: + return False + + properties = if_condition["properties"] + if not isinstance(properties, dict): + return False + + # Check for database property constraints (nested style) + if "database" in properties: + db_props = properties["database"] + if not isinstance(db_props, dict): + return False + + if "properties" in db_props: + db_properties = db_props["properties"] + if isinstance(db_properties, dict): + return self._check_properties_match(db_properties, variant) + + # Check for direct property constraints (direct style) + elif "engine" in properties or "version" in properties: + return self._check_properties_match(properties, variant) + + return False + + def _check_properties_match( + self, properties: dict[str, Any], variant: DatabaseVariantSpec + ) -> bool: + """Check if property constraints match the variant.""" + if not self._engine_matches(properties, variant.engine): + return False + if not self._version_matches(properties, variant.version): + return False + return True + + def _engine_matches(self, properties: dict[str, Any], engine: str) -> bool: + if "engine" not in properties: + return True + engine_constraint = properties["engine"] + if isinstance(engine_constraint, dict) and "const" in engine_constraint: + const_val = engine_constraint["const"] + return isinstance(const_val, str) and const_val == engine + if isinstance(engine_constraint, str): + return engine_constraint == engine + return True + + def _version_matches(self, properties: dict[str, Any], version: str) -> bool: + if "version" not in properties: + return True + version_constraint = properties["version"] + if isinstance(version_constraint, dict) and "const" in version_constraint: + const_val = version_constraint["const"] + return isinstance(const_val, str) and const_val == version + if isinstance(version_constraint, str): + return version_constraint == version + return True + + def _merge_condition_schema( + self, + base_schema: dict[str, Any], + condition: dict[str, Any], + variant: DatabaseVariantSpec, + ) -> dict[str, Any]: + """Merge a matching condition into the base schema. + + Args: + base_schema: The base schema to merge into + condition: The matching oneOf condition + variant: Database variant being processed + + Returns: + Merged schema with condition applied + + Raises: + ValidationError: If merging fails + """ + result_schema = dict(base_schema) + + # Handle if/then conditional structure + if "then" in condition: + then_clause = condition["then"] + if not isinstance(then_clause, dict): + return result_schema + + # If then clause has a $ref, resolve it first + if config.json_schema_fields.ref_field in then_clause: + try: + resolved_then = self.resolver.resolve_references(then_clause) + result_schema = self._deep_merge_schemas( + result_schema, resolved_then + ) + except Exception as e: + raise ValidationError( + [f"Failed to resolve then clause reference: {e}"] + ) from e + else: + result_schema = self._deep_merge_schemas(result_schema, then_clause) + + # Handle direct property structure (database.json style) + elif "properties" in condition: + # For direct properties, just merge them in + if "properties" not in result_schema: + result_schema["properties"] = {} + + condition_props = condition["properties"] + if isinstance(condition_props, dict): + if isinstance(result_schema["properties"], dict): + result_schema["properties"] = self._deep_merge_schemas( + result_schema["properties"], condition_props + ) + + return result_schema + + def _deep_merge_schemas( + self, base: dict[str, Any], overlay: dict[str, Any] + ) -> dict[str, Any]: + """Deep merge two schema dictionaries. + + Args: + base: Base schema dictionary + overlay: Schema dictionary to overlay + + Returns: + Merged schema with overlay taking precedence + """ + result = dict(base) + + for key, value in overlay.items(): + if key in result: + if isinstance(result[key], dict) and isinstance(value, dict): + result[key] = self._deep_merge_schemas(result[key], value) + else: + result[key] = value + else: + result[key] = value + + return result + + def _get_supported_variants(self, schema: dict[str, Any]) -> list[str]: + """Extract supported variants from oneOf conditions for error reporting. + + Args: + schema: Schema containing oneOf conditions + + Returns: + List of supported variant strings in "Engine Version" format + """ + variants: list[str] = [] + oneof_data = schema.get(config.json_schema_fields.oneof_field, []) + + if not isinstance(oneof_data, list): + return variants + + for condition in oneof_data: + if not isinstance(condition, dict): + continue + + # Try to extract variant info from different formats + variant_info = self._extract_variant_from_condition(condition) + if variant_info: + variants.append(variant_info) + + return variants + + def _extract_variant_from_condition(self, condition: dict[str, Any]) -> str | None: + """Extract variant string from a oneOf condition.""" + variant = self._extract_variant_from_if_then(condition) + if variant is not None: + return variant + return self._extract_variant_from_direct_properties(condition) + + def _extract_variant_from_if_then(self, condition: dict[str, Any]) -> str | None: + """Extract variant from if/then format.""" + if "if" in condition: + if_condition = condition["if"] + if isinstance(if_condition, dict) and "properties" in if_condition: + if_props = if_condition["properties"] + if isinstance(if_props, dict) and "database" in if_props: + db_props = if_props["database"] + if isinstance(db_props, dict) and "properties" in db_props: + db_properties = db_props["properties"] + if isinstance(db_properties, dict): + return self._extract_variant_from_properties(db_properties) + return None + + def _extract_variant_from_direct_properties( + self, condition: dict[str, Any] + ) -> str | None: + """Extract variant from direct properties format.""" + if "properties" in condition: + props = condition["properties"] + if isinstance(props, dict): + return self._extract_variant_from_properties(props) + return None + + def _extract_variant_from_properties( + self, properties: dict[str, Any] + ) -> str | None: + """Extract variant from property constraints. + + Args: + properties: Property constraints + + Returns: + Variant string or None if not extractable + """ + engine = None + version = None + + # Extract engine + if "engine" in properties: + engine_prop = properties["engine"] + if isinstance(engine_prop, dict) and "const" in engine_prop: + const_val = engine_prop["const"] + if isinstance(const_val, str): + engine = const_val + + # Extract version + if "version" in properties: + version_prop = properties["version"] + if isinstance(version_prop, dict) and "const" in version_prop: + const_val = version_prop["const"] + if isinstance(const_val, str): + version = const_val + + if engine and version: + return f"{engine} {version}" + + return None diff --git a/database_schema_spec/resolution/interfaces.py b/database_schema_spec/resolution/interfaces.py new file mode 100644 index 0000000..4533138 --- /dev/null +++ b/database_schema_spec/resolution/interfaces.py @@ -0,0 +1,7 @@ +from typing import Any, Protocol + + +class IJSONRefResolver(Protocol): + def resolve_references( + self, schema: dict[str, Any], current_file: str | None = None + ) -> dict[str, Any]: ... diff --git a/database_schema_spec/resolution/resolver.py b/database_schema_spec/resolution/resolver.py new file mode 100644 index 0000000..428d37e --- /dev/null +++ b/database_schema_spec/resolution/resolver.py @@ -0,0 +1,233 @@ +"""JSON Schema reference resolver with variant-aware oneOf resolution.""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Any + +from database_schema_spec.core.config import config +from database_schema_spec.core.exceptions import ( + CircularReferenceError, + ReferenceResolutionError, + ValidationError, +) +from database_schema_spec.core.schemas import DatabaseVariantSpec +from database_schema_spec.logger import logger +from database_schema_spec.resolution.conditional_merger import ConditionalMerger + + +class JSONRefResolver: + """Handles JSON Schema $ref reference resolution with variant-aware oneOf resolution. + + This resolver processes JSON Schema files and resolves all $ref references, + with special handling for oneOf conditional blocks based on database variants. + """ + + def __init__( + self, + base_path: Path = config.docs_dir, + current_variant: DatabaseVariantSpec | None = None, + ) -> None: + """Initialize the JSON reference resolver. + + Args: + base_path: Base directory for resolving relative references + current_variant: Database variant for conditional oneOf resolution + """ + self.base_path = base_path + self.current_variant = current_variant + self.resolution_stack: list[str] = [] + + def resolve_references( + self, schema: dict[str, Any], current_file: str | None = None + ) -> dict[str, Any]: + """Recursively resolve all $ref references in a schema.""" + if not isinstance(schema, dict): + return schema + + if config.json_schema_fields.ref_field in schema: + return self._resolve_ref(schema, current_file) + return self._resolve_nested(schema, current_file) + + def _resolve_ref( + self, schema: dict[str, Any], current_file: str | None + ) -> dict[str, Any]: + ref_path = schema[config.json_schema_fields.ref_field] + if self.detect_circular_reference(ref_path): + raise CircularReferenceError(self.resolution_stack + [ref_path]) + self.resolution_stack.append(ref_path) + try: + referenced_content = self.load_referenced_file(ref_path, current_file) + referenced_content = self.resolve_oneof_for_variant( + referenced_content, ref_path + ) + new_current_file = self._get_new_current_file(current_file, ref_path) + resolved_content = self.resolve_references( + referenced_content, new_current_file + ) + return self._merge_schema_with_ref(schema, resolved_content) + finally: + self.resolution_stack.pop() + + def _get_new_current_file(self, current_file: str | None, ref_path: str) -> str: + if current_file: + current_dir = (self.base_path / current_file).parent + return str((current_dir / ref_path).relative_to(self.base_path)) + return ref_path + + def _merge_schema_with_ref( + self, schema: dict[str, Any], resolved_content: dict[str, Any] + ) -> dict[str, Any]: + result = dict(resolved_content) + for key, value in schema.items(): + if key == config.json_schema_fields.ref_field: + continue + if key in result: + if isinstance(result[key], dict) and isinstance(value, dict): + result[key] = {**result[key], **value} + else: + result[key] = value + else: + result[key] = value + return result + + def _resolve_nested( + self, schema: dict[str, Any], current_file: str | None + ) -> dict[str, Any]: + result: dict[str, Any] = {} + for key, value in schema.items(): + if isinstance(value, dict): + result[key] = self.resolve_references(value, current_file) + elif isinstance(value, list): + result[key] = [ + self.resolve_references(item, current_file) + if isinstance(item, dict) + else item + for item in value + ] + else: + result[key] = value + return result + + def detect_circular_reference(self, ref_path: str) -> bool: + """Check if adding this reference would create a circular dependency. + + Args: + ref_path: The reference path to check + + Returns: + True if this would create a circular reference, False otherwise + """ + return ref_path in self.resolution_stack + + def load_referenced_file( + self, ref_path: str, current_file: str | None = None + ) -> dict[str, Any]: + """Load a JSON file from a reference path. + + Args: + ref_path: Relative path to the referenced file + current_file: Path of the file making the reference (for relative resolution) + + Returns: + Parsed JSON content + + Raises: + ReferenceResolutionError: If file doesn't exist or invalid JSON + """ + try: + # If we have a current file path, resolve relative to it + if current_file: + current_dir = (self.base_path / current_file).parent + full_path = current_dir / ref_path + else: + # Resolve relative to base_path + full_path = self.base_path / ref_path + + # Normalize the path + full_path = full_path.resolve() + + # Check if file exists + if not full_path.exists(): + raise FileNotFoundError(f"Referenced file not found: {full_path}") + + # Read and parse JSON + with open(full_path, "r", encoding="utf-8") as f: + content: dict[str, Any] = json.load(f) + + return content + + except FileNotFoundError as e: + raise ReferenceResolutionError(ref_path, e) + except json.JSONDecodeError as e: + raise ReferenceResolutionError(ref_path, e) + except Exception as e: + raise ReferenceResolutionError(ref_path, e) + + def resolve_oneof_for_variant( + self, schema: dict[str, Any], schema_path: str + ) -> dict[str, Any]: + """Resolve oneOf blocks for target variant if applicable. + + This method applies conditional logic to oneOf blocks found in referenced files + to resolve them to the appropriate variant-specific schema. + + Args: + schema: Schema to process + schema_path: Path to the schema file + + Returns: + Processed schema with oneOf blocks resolved for the target variant + """ + # If no target variant is set, return schema unchanged + if not self.current_variant: + return schema + + # If schema doesn't have oneOf blocks, return unchanged + if config.json_schema_fields.oneof_field not in schema: + return schema + + # Create a conditional merger to resolve oneOf blocks + merger: ConditionalMerger = ConditionalMerger(self) + + # Apply conditional logic to resolve oneOf block for our target variant + try: + resolved_schema = merger.apply_conditional_logic( + schema, self.current_variant + ) + return resolved_schema + except ValidationError: + # If no matching condition found, just return original schema + # This allows oneOf blocks that don't apply to this variant to pass through + return schema + + def resolve_file(self, file_path: str) -> dict[str, Any]: + """Load and resolve a JSON file with all $ref references resolved. + + Args: + file_path: Path to the JSON file to load + + Returns: + Fully resolved JSON schema + + Raises: + ReferenceResolutionError: If file cannot be loaded or resolved + """ + try: + # Load the file + full_path = self.base_path / file_path + if not full_path.exists(): + logger.error("File not found: %s", full_path) + raise FileNotFoundError(f"File not found: {full_path}") + + with open(full_path, "r", encoding="utf-8") as f: + schema: dict[str, Any] = json.load(f) + + # Resolve all references + resolved_schema = self.resolve_references(schema, file_path) + return resolved_schema + + except Exception as e: + logger.exception("Error resolving file '%s': %s", file_path, e) + raise ReferenceResolutionError(file_path, e) from e diff --git a/database_schema_spec/resolution/variant_extractor.py b/database_schema_spec/resolution/variant_extractor.py new file mode 100644 index 0000000..a467d82 --- /dev/null +++ b/database_schema_spec/resolution/variant_extractor.py @@ -0,0 +1,124 @@ +"""Database variant extraction from oneOf blocks.""" + +from __future__ import annotations + +from typing import Any + +from pydantic import ValidationError + +from database_schema_spec.core.config import config +from database_schema_spec.core.exceptions import VariantExtractionError +from database_schema_spec.core.schemas import DatabaseVariantSpec +from database_schema_spec.resolution.resolver import JSONRefResolver + + +class VariantExtractor: + """Extracts database variants from oneOf blocks in database schema files. + + This class parses oneOf conditional blocks to identify all supported + database engine and version combinations. + """ + + def __init__(self, resolver: JSONRefResolver) -> None: + """Initialize the variant extractor. + + Args: + resolver: JSON reference resolver for loading schema files + """ + self.resolver = resolver + + def extract_variants(self) -> list[DatabaseVariantSpec]: + """Extract all database variants from the database schema file. + + Returns: + List of DatabaseVariantSpec objects representing all variants + + Raises: + VariantExtractionError: If variants cannot be extracted + """ + try: + # Load the database schema file + database_schema = self.resolver.resolve_file( + config.file_names.database_schema_file + ) + + # Extract oneOf items + oneof_items = database_schema.get(config.json_schema_fields.oneof_field, []) + if not isinstance(oneof_items, list): + raise VariantExtractionError( + f"Invalid oneOf structure in {config.file_names.database_schema_file}" + ) + + # Parse each oneOf item to extract variants + variants = self.parse_oneof_block(oneof_items) + + if not variants: + raise VariantExtractionError( + f"No variants found in {config.file_names.database_schema_file}" + ) + + return variants + + except Exception as e: + if isinstance(e, VariantExtractionError): + raise + raise VariantExtractionError( + f"Failed to extract variants from {config.file_names.database_schema_file}: {e}" + ) from e + + def parse_oneof_block(self, oneof_items: list[Any]) -> list[DatabaseVariantSpec]: + """Parse oneOf items to extract database variants. + + Args: + oneof_items: List of oneOf condition objects + + Returns: + List of extracted DatabaseVariantSpec objects + + Raises: + VariantExtractionError: If parsing fails + """ + variants: list[DatabaseVariantSpec] = [] + + for item in oneof_items: + if not isinstance(item, dict): + continue + + # Extract properties from the oneOf item + properties = item.get("properties") + if not isinstance(properties, dict): + continue + + # Extract engine and version from properties + engine = None + version = None + + # Get engine constraint + if "engine" in properties: + engine_prop = properties["engine"] + if isinstance(engine_prop, dict) and "const" in engine_prop: + const_value = engine_prop["const"] + if isinstance(const_value, str): + engine = const_value + + # Get version constraint + if "version" in properties: + version_prop = properties["version"] + if isinstance(version_prop, dict) and "const" in version_prop: + const_value = version_prop["const"] + if isinstance(const_value, str): + version = const_value + + # Create variant if we have both engine and version + if engine and version: + try: + variant = DatabaseVariantSpec( + engine=engine, version=version, engine_spec_path=None + ) + variants.append(variant) + except ValidationError as e: + raise VariantExtractionError( + f"Invalid variant data - engine: {engine}, version: {version}: {e}" + ) from e + + return variants diff --git a/database_schema_spec/validation/__init__.py b/database_schema_spec/validation/__init__.py new file mode 100644 index 0000000..0ff0713 --- /dev/null +++ b/database_schema_spec/validation/__init__.py @@ -0,0 +1,5 @@ +"""Schema validation components.""" + +from database_schema_spec.validation.schema_validator import SchemaValidator + +__all__ = ["SchemaValidator"] diff --git a/database_schema_spec/validation/schema_validator.py b/database_schema_spec/validation/schema_validator.py new file mode 100644 index 0000000..080116a --- /dev/null +++ b/database_schema_spec/validation/schema_validator.py @@ -0,0 +1,143 @@ +"""Schema validation against JSON Schema standards.""" + +from __future__ import annotations + +from typing import Any + +import jsonschema +from jsonschema import Draft7Validator + +from database_schema_spec.core.config import config +from database_schema_spec.core.schemas import ValidationResult + + +class SchemaValidator: + """Validates generated schemas against JSON Schema standards. + + This validator checks that generated schemas conform to JSON Schema Draft 7 + standard and includes additional checks for project-specific requirements. + """ + + def validate_schema(self, schema: dict[str, Any]) -> ValidationResult: + """Validate a schema against JSON Schema Draft 7 standard. + + Args: + schema: Schema to validate + + Returns: + ValidationResult with validation status and any errors/warnings + """ + errors: list[str] = [] + warnings: list[str] = [] + + # Validate against JSON Schema Draft 7 + try: + Draft7Validator.check_schema(schema) + except jsonschema.SchemaError as e: + errors.append(f"JSON Schema validation failed: {e.message}") + + # Perform additional custom validations + self._validate_required_fields(schema, errors) + self._validate_schema_structure(schema, warnings) + + return ValidationResult( + is_valid=len(errors) == 0, errors=errors, warnings=warnings + ) + + def _validate_required_fields( + self, schema: dict[str, Any], errors: list[str] + ) -> None: + """Validate that required fields are present in the schema. + + Args: + schema: Schema to validate + errors: List to append errors to + """ + # For a JSON Schema, we should validate it has the basic structure + # Check that it's a proper JSON Schema with properties (if it's an object schema) + if schema.get("type") == "object" and "properties" not in schema: + errors.append( + "Missing 'properties' field - object type schemas should have properties" + ) + return + + # Only validate project-specific requirements for root schemas that have both database and schema refs + properties = schema.get("properties", {}) + if not isinstance(properties, dict): + if "properties" in schema: + errors.append("'properties' field must be an object") + return + + # Only require database/schema properties if this looks like a root project schema + # (has $schema field and references both database and schema) + is_root_schema = ( + "$schema" in schema + and isinstance(properties, dict) + and any( + prop.get("$ref") in ["database.json", "schema.json"] + for prop in properties.values() + if isinstance(prop, dict) + ) + ) + + if is_root_schema: + # Check for database property definition + if "database" not in properties: + errors.append("Missing 'database' property definition in schema") + elif not isinstance(properties["database"], dict): + errors.append("'database' property definition must be an object") + + # Check for schema property definition + if "schema" not in properties: + errors.append("Missing 'schema' property definition in schema") + elif not isinstance(properties["schema"], dict): + errors.append("'schema' property definition must be an object") + + def _validate_schema_structure( + self, schema: dict[str, Any], warnings: list[str] + ) -> None: + """Validate the overall structure of the schema. + + Args: + schema: Schema to validate + warnings: List to append warnings to + """ + # Check for recommended fields + if config.json_schema_fields.schema_field not in schema: + warnings.append( + "Missing '$schema' field - recommended for schema validation" + ) + + if config.json_schema_fields.id_field not in schema: + warnings.append( + "Missing '$id' field - recommended for schema identification" + ) + + if "title" not in schema: + warnings.append("Missing 'title' field - recommended for documentation") + + # Check for unresolved references + self._check_unresolved_refs(schema, warnings, "") + + def _check_unresolved_refs(self, obj: Any, warnings: list[str], path: str) -> None: + """Recursively check for unresolved $ref references. + Args: + obj: Object to check + warnings: List to append warnings to + path: Current path in the object tree for error reporting + """ + if isinstance(obj, dict): + for key, value in obj.items(): + current_path = f"{path}.{key}" if path else str(key) + if key == config.json_schema_fields.ref_field and isinstance( + value, str + ): + warnings.append( + f"Unresolved reference found at {current_path}: {value}" + ) + else: + self._check_unresolved_refs(value, warnings, current_path) + elif isinstance(obj, list): + for i, item in enumerate(obj): + current_path = f"{path}[{i}]" if path else f"[{i}]" + self._check_unresolved_refs(item, warnings, current_path) diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..ba4bde1 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,27 @@ +# Database Schema Specification + +๐Ÿข **Standardized, modular JSON Schema specification** for database structure definition and validation. Designed for scalability, maintainability, and seamless integration with AI systems. + +## ๐Ÿ—๏ธ Architecture + +Our modular architecture prevents code duplication and enables effortless database version management: + +``` +specs.json # ๐ŸŽฏ Main orchestrator with $schema/$id +schemas/ +โ”œโ”€โ”€ base/ +โ”‚ โ”œโ”€โ”€ database.json # Database engine definitions +โ”‚ โ””โ”€โ”€ schema.json # Core schema structure +โ””โ”€โ”€ engines/ + โ””โ”€โ”€ postgresql/ + โ””โ”€โ”€ v15.0/ # Version-specific isolation + โ”œโ”€โ”€ schema.json # PostgreSQL 15.0 rules + โ””โ”€โ”€ components/ # Version-specific components + โ”œโ”€โ”€ table.json # Table definitions for v15.0 + โ”œโ”€โ”€ column.json # Column types for v15.0 + โ””โ”€โ”€ constraint.json # Constraints for v15.0 +``` + +## FSD + +- **FSD**: [Full Specification Document](https://www.notion.so/Database-Engines-Support-237bed96279c80ee85c1e69cf2abc42f) - Comprehensive guide to the database schema specification. diff --git a/examples/example_1.json b/docs/examples/example_1.json similarity index 100% rename from examples/example_1.json rename to docs/examples/example_1.json diff --git a/examples/example_2.json b/docs/examples/example_2.json similarity index 100% rename from examples/example_2.json rename to docs/examples/example_2.json diff --git a/examples/example_3.json b/docs/examples/example_3.json similarity index 100% rename from examples/example_3.json rename to docs/examples/example_3.json diff --git a/schemas/base/database.json b/docs/schemas/base/database.json similarity index 73% rename from schemas/base/database.json rename to docs/schemas/base/database.json index a9ba64b..a80c3a7 100644 --- a/schemas/base/database.json +++ b/docs/schemas/base/database.json @@ -1,6 +1,4 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://schemas.bfloo.com/database-schema-spec/v1/schemas/base/database.json", "title": "Database Configuration", "type": "object", "required": ["engine", "version"], @@ -16,7 +14,7 @@ "version": { "type": "string", "description": "The version of the PostgreSQL database engine", - "enum": ["15.0"] + "const": "15.0" } } } diff --git a/schemas/base/schema.json b/docs/schemas/base/schema.json similarity index 93% rename from schemas/base/schema.json rename to docs/schemas/base/schema.json index 7c67736..8c8e534 100644 --- a/schemas/base/schema.json +++ b/docs/schemas/base/schema.json @@ -1,6 +1,4 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://schemas.bfloo.com/database-schema-spec/v1/schemas/base/schema.json", "title": "Schema Base Definition", "type": "object", "properties": { diff --git a/schemas/engines/postgresql/v15.0/components/column.json b/docs/schemas/engines/postgresql/v15.0/components/column.json similarity index 98% rename from schemas/engines/postgresql/v15.0/components/column.json rename to docs/schemas/engines/postgresql/v15.0/components/column.json index 373b418..7c29105 100644 --- a/schemas/engines/postgresql/v15.0/components/column.json +++ b/docs/schemas/engines/postgresql/v15.0/components/column.json @@ -1,6 +1,4 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://schemas.bfloo.com/database-schema-spec/v1/schemas/engines/postgresql/v15.0/components/column.json", "title": "PostgreSQL Column Definition", "type": "object", "properties": { diff --git a/schemas/engines/postgresql/v15.0/components/constraint.json b/docs/schemas/engines/postgresql/v15.0/components/constraint.json similarity index 95% rename from schemas/engines/postgresql/v15.0/components/constraint.json rename to docs/schemas/engines/postgresql/v15.0/components/constraint.json index 4fe74d7..977a266 100644 --- a/schemas/engines/postgresql/v15.0/components/constraint.json +++ b/docs/schemas/engines/postgresql/v15.0/components/constraint.json @@ -1,6 +1,4 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://schemas.bfloo.com/database-schema-spec/v1/schemas/engines/postgresql/v15.0/components/constraint.json", "title": "PostgreSQL Constraint Definition", "type": "object", "properties": { diff --git a/schemas/engines/postgresql/v15.0/components/table.json b/docs/schemas/engines/postgresql/v15.0/components/table.json similarity index 89% rename from schemas/engines/postgresql/v15.0/components/table.json rename to docs/schemas/engines/postgresql/v15.0/components/table.json index c552bc9..044af70 100644 --- a/schemas/engines/postgresql/v15.0/components/table.json +++ b/docs/schemas/engines/postgresql/v15.0/components/table.json @@ -1,6 +1,4 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://schemas.bfloo.com/database-schema-spec/v1/schemas/engines/postgresql/v15.0/components/table.json", "title": "PostgreSQL Table Definition", "type": "object", "properties": { diff --git a/schemas/engines/postgresql/v15.0/spec.json b/docs/schemas/engines/postgresql/v15.0/spec.json similarity index 65% rename from schemas/engines/postgresql/v15.0/spec.json rename to docs/schemas/engines/postgresql/v15.0/spec.json index 1a52723..f93cf53 100644 --- a/schemas/engines/postgresql/v15.0/spec.json +++ b/docs/schemas/engines/postgresql/v15.0/spec.json @@ -1,7 +1,6 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://schemas.bfloo.com/database-schema-spec/v1/schemas/engines/postgresql/v15.0/schema.json", "title": "PostgreSQL 15.0 Schema Rules", + "$id": "https://schemas.bfloo.com/postgresql/15.0/spec.json", "properties": { "schema": { "properties": { diff --git a/specs.json b/docs/specs.json similarity index 93% rename from specs.json rename to docs/specs.json index 8b4f38e..438f1de 100644 --- a/specs.json +++ b/docs/specs.json @@ -27,7 +27,7 @@ } }, "then": { - "$ref": "schemas/engines/postgresql/v15.0/schema.json" + "$ref": "schemas/engines/postgresql/v15.0/spec.json" } } ] diff --git a/main.py b/main.py new file mode 100644 index 0000000..09742e1 --- /dev/null +++ b/main.py @@ -0,0 +1,21 @@ +""" +Database Schema Spec Generator + +Entry point for the schema generator script. +""" + +from database_schema_spec import SchemaGenerator + + +def main() -> None: + """ + Entry point for the schema generator script. + + Creates SchemaGenerator instance and runs generation process. + """ + generator = SchemaGenerator() + generator.run() + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..04268c8 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,128 @@ +[project] +name = "database-schema-spec" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "jsonschema>=4.25.0", + "pydantic>=2.11.7", + "pydantic-settings>=2.10.1", + "python-dotenv>=1.1.1", +] + +[dependency-groups] +dev = [ + "pre-commit>=4.2.0", + "pyright>=1.1.403", + "pytest>=8.4.1", + "pytest-cov>=6.2.1", + "pytest-mock>=3.14.1", + "ruff>=0.12.7", +] + +[tool.ruff] +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +# Same as Black. +line-length = 88 +indent-width = 4 + +# Assume Python 3.9 +target-version = "py313" + +[tool.ruff.lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or +# McCabe complexity (`C901`) by default. +select = ["E4", "E7", "E9", "F", "I"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = false + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" + +[tool.pyright] +typeCheckingMode = "standard" +include = ["database_schema_spec", "main.py", "tests"] +exclude = ["**/__pycache__/**", "build", "dist", ".venv"] +# The following lines are for pre-commit configuration +venvPath = "." +venv = ".venv" + +[tool.pytest.ini_options] +minversion = "6.0" +testpaths = ["tests"] +python_files = "test_*.py" +pythonpath = ["."] +addopts = [ + "--strict-config", + "--verbose", + "-ra", + "--cov", + "--durations=0" +] + +[tool.coverage.run] +omit = [ + "tests/*", + "*/__init__.py" +] diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..e2d905f --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,283 @@ +"""Production-quality test fixtures and configuration.""" + +# Set test environment variables BEFORE any imports that might trigger config loading +import os # noqa: E402 + +os.environ.setdefault("BASE_URL", "https://test.example.com/schemas") + +import json +import shutil +import tempfile +from pathlib import Path +from typing import Any, Dict +from unittest.mock import Mock + +import pytest + +from database_schema_spec.core.schemas import DatabaseVariantSpec +from database_schema_spec.resolution.interfaces import IJSONRefResolver + + +def pytest_configure(config): + """Configure pytest environment before any imports happen.""" + # Environment variable is already set at module level above + pass + + +@pytest.fixture(scope="session", autouse=True) +def mock_config(): + """Ensure config uses test environment variables.""" + # At this point, the config should already be loaded with our test env vars + # This fixture just serves as a placeholder for future config customization + yield + + +@pytest.fixture +def temp_docs_dir(): + """Create a temporary docs directory with realistic test data.""" + temp_dir = Path(tempfile.mkdtemp()) + + # Create directory structure + (temp_dir / "schemas" / "base").mkdir(parents=True) + (temp_dir / "schemas" / "engines" / "postgresql" / "v15.0").mkdir(parents=True) + (temp_dir / "schemas" / "engines" / "mysql" / "v8.0").mkdir(parents=True) + + # Create database.json with oneOf conditions (this is what the variant extractor expects) + database_schema = { + "title": "Database Configuration", + "type": "object", + "required": ["engine", "version"], + "additionalProperties": False, + "oneOf": [ + { + "properties": { + "engine": { + "type": "string", + "description": "The type of database engine used", + "const": "postgresql", + }, + "version": { + "type": "string", + "description": "The version of the PostgreSQL database engine", + "const": "15.0", + }, + } + }, + { + "properties": { + "engine": { + "type": "string", + "description": "The type of database engine used", + "const": "mysql", + }, + "version": { + "type": "string", + "description": "The version of the MySQL database engine", + "const": "8.0", + }, + } + }, + ], + } + + # Create a simple schema.json for references + schema_schema = { + "type": "object", + "properties": {"name": {"type": "string"}, "description": {"type": "string"}}, + "required": ["name"], + } + + # Write test files - database.json should contain the oneOf schema + with open(temp_dir / "schemas" / "base" / "database.json", "w") as f: + json.dump(database_schema, f, indent=2) + + with open(temp_dir / "schemas" / "base" / "schema.json", "w") as f: + json.dump(schema_schema, f, indent=2) + + # Create engine-specific schema directories and files + # PostgreSQL v15.0 + postgresql_dir = temp_dir / "schemas" / "engines" / "postgresql" / "v15.0" + + with open(postgresql_dir / "spec.json", "w") as f: + json.dump( + { + "title": "PostgreSQL 15.0 Schema Rules", + "properties": { + "postgres_features": { + "type": "object", + "description": "PostgreSQL-specific features", + } + }, + }, + f, + indent=2, + ) + + # MySQL v8.0 + mysql_dir = temp_dir / "schemas" / "engines" / "mysql" / "v8.0" + + with open(mysql_dir / "spec.json", "w") as f: + json.dump( + { + "title": "MySQL 8.0 Schema Rules", + "properties": { + "mysql_features": { + "type": "object", + "description": "MySQL-specific features", + } + }, + }, + f, + indent=2, + ) + + with open(temp_dir / "specs.json", "w") as f: + json.dump( + { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Database Schema Specification Test", + "description": "Test schema specification", + "type": "object", + "properties": { + "database": {"$ref": "schemas/base/database.json"}, + "schema": {"$ref": "schemas/base/schema.json"}, + }, + "required": ["database", "schema"], + "additionalProperties": False, + "variants": [ + {"engine": "postgresql", "version": "15.0"}, + {"engine": "mysql", "version": "8.0"}, + ], + }, + f, + indent=2, + ) + + yield temp_dir + + # Cleanup + shutil.rmtree(temp_dir) + + +@pytest.fixture +def temp_output_dir(): + """Create a temporary output directory.""" + temp_dir = Path(tempfile.mkdtemp()) + yield temp_dir + shutil.rmtree(temp_dir) + + +@pytest.fixture +def postgresql_variant(): + """Standard PostgreSQL variant for testing.""" + return DatabaseVariantSpec( + engine="postgresql", + version="15.0", + engine_spec_path="schemas/engines/postgresql/v15.0", + ) + + +@pytest.fixture +def mysql_variant(): + """MySQL variant for testing.""" + return DatabaseVariantSpec( + engine="mysql", version="8.0", engine_spec_path="schemas/engines/mysql/v8.0" + ) + + +@pytest.fixture +def mock_resolver(): + """Mock resolver that implements the interface properly.""" + mock = Mock(spec=IJSONRefResolver) + mock.resolve_references.return_value = {} + return mock + + +class SchemaTestHelper: + """Helper class for creating test schemas.""" + + @staticmethod + def create_conditional_schema( + engine: str, version: str, then_properties: Dict[str, Any] + ) -> Dict[str, Any]: + """Create a conditional schema for testing.""" + return { + "oneOf": [ + { + "if": { + "properties": { + "database": { + "properties": { + "engine": {"const": engine}, + "version": {"const": version}, + } + } + } + }, + "then": {"properties": then_properties}, + } + ] + } + + @staticmethod + def create_direct_conditional_schema( + engine: str, version: str, then_properties: Dict[str, Any] + ) -> Dict[str, Any]: + """Create a direct conditional schema (database.json style).""" + return { + "oneOf": [ + { + "if": { + "properties": { + "engine": {"const": engine}, + "version": {"const": version}, + } + }, + "then": {"properties": then_properties}, + } + ] + } + + @staticmethod + def create_invalid_schema() -> Dict[str, Any]: + """Create an invalid schema for error testing.""" + return { + "oneOf": [ + { + "if": "invalid_structure", # Should be object + "then": {"type": "object"}, + } + ] + } + + @staticmethod + def create_multiple_match_schema() -> Dict[str, Any]: + """Create a schema that would match multiple conditions.""" + return { + "oneOf": [ + { + "if": { + "properties": { + "database": { + "properties": {"engine": {"const": "postgresql"}} + } + } + }, + "then": {"properties": {"feature1": {"type": "string"}}}, + }, + { + "if": { + "properties": { + "database": {"properties": {"version": {"const": "15.0"}}} + } + }, + "then": {"properties": {"feature2": {"type": "integer"}}}, + }, + ] + } + + +@pytest.fixture +def schema_helper(): + """Provide schema helper for tests.""" + return SchemaTestHelper() diff --git a/tests/test_conditional_merger.py b/tests/test_conditional_merger.py new file mode 100644 index 0000000..8b86a37 --- /dev/null +++ b/tests/test_conditional_merger.py @@ -0,0 +1,51 @@ +from database_schema_spec.core.schemas import DatabaseVariantSpec +from database_schema_spec.resolution.conditional_merger import ConditionalMerger + + +class DummyResolver: + def resolve_references(self, schema, current_file=None): + return schema + + +def make_variant(engine="postgresql", version="15.0"): + return DatabaseVariantSpec(engine=engine, version=version, engine_spec_path=None) + + +def test_matches_variant_condition_if_then(): + variant = make_variant() + merger = ConditionalMerger(DummyResolver()) + condition = { + "if": { + "properties": { + "database": { + "properties": { + "engine": {"const": "postgresql"}, + "version": {"const": "15.0"}, + } + } + } + } + } + assert merger._matches_variant_condition(condition, variant) + + +def test_matches_variant_condition_direct_properties(): + variant = make_variant() + merger = ConditionalMerger(DummyResolver()) + condition = { + "properties": { + "engine": {"const": "postgresql"}, + "version": {"const": "15.0"}, + } + } + assert merger._matches_variant_condition(condition, variant) + + +def test_check_properties_match_false(): + variant = make_variant() + merger = ConditionalMerger(DummyResolver()) + properties = { + "engine": {"const": "mysql"}, + "version": {"const": "15.0"}, + } + assert not merger._check_properties_match(properties, variant) diff --git a/tests/test_conditional_merger_improved.py b/tests/test_conditional_merger_improved.py new file mode 100644 index 0000000..1f9e021 --- /dev/null +++ b/tests/test_conditional_merger_improved.py @@ -0,0 +1,56 @@ +"""Production-quality tests for ConditionalMerger.""" + +import pytest + +from database_schema_spec.core.exceptions import ValidationError +from database_schema_spec.resolution.conditional_merger import ConditionalMerger + + +class TestConditionalMerger: + """Test ConditionalMerger public interface with comprehensive coverage.""" + + def test_apply_conditional_logic_matching_postgresql( + self, mock_resolver, postgresql_variant, schema_helper + ): + """Should apply conditions for matching PostgreSQL variant.""" + merger = ConditionalMerger(mock_resolver) + + base_schema = schema_helper.create_conditional_schema( + engine="postgresql", + version="15.0", + then_properties={"id": {"type": "integer", "format": "int4"}}, + ) + + result = merger.apply_conditional_logic(base_schema, postgresql_variant) + + # Should contain the then properties + assert "properties" in result + assert "id" in result["properties"] + assert result["properties"]["id"]["format"] == "int4" + # Should not contain oneOf anymore (resolved) + assert "oneOf" not in result + + def test_apply_conditional_logic_non_matching_variant( + self, mock_resolver, mysql_variant, schema_helper + ): + """Should not apply conditions for non-matching variant.""" + merger = ConditionalMerger(mock_resolver) + + base_schema = schema_helper.create_conditional_schema( + engine="postgresql", + version="15.0", + then_properties={"postgres_column": {"type": "string"}}, + ) + + with pytest.raises(ValidationError, match="No matching oneOf condition found"): + merger.apply_conditional_logic(base_schema, mysql_variant) + + def test_apply_conditional_logic_no_oneof(self, mock_resolver, postgresql_variant): + """Should return schema unchanged when no oneOf present.""" + merger = ConditionalMerger(mock_resolver) + + base_schema = {"type": "object", "properties": {"name": {"type": "string"}}} + + result = merger.apply_conditional_logic(base_schema, postgresql_variant) + + assert result == base_schema diff --git a/tests/test_conditional_merger_production.py b/tests/test_conditional_merger_production.py new file mode 100644 index 0000000..a3a10d4 --- /dev/null +++ b/tests/test_conditional_merger_production.py @@ -0,0 +1,235 @@ +"""Production-quality tests for ConditionalMerger.""" + +import pytest + +from database_schema_spec.core.exceptions import ValidationError +from database_schema_spec.resolution.conditional_merger import ConditionalMerger + + +class TestConditionalMerger: + """Test ConditionalMerger public interface with comprehensive coverage.""" + + def test_apply_conditional_logic_matching_postgresql( + self, mock_resolver, postgresql_variant, schema_helper + ): + """Should apply conditions for matching PostgreSQL variant.""" + merger = ConditionalMerger(mock_resolver) + + base_schema = schema_helper.create_conditional_schema( + engine="postgresql", + version="15.0", + then_properties={"id": {"type": "integer", "format": "int4"}}, + ) + + result = merger.apply_conditional_logic(base_schema, postgresql_variant) + + # Should contain the then properties + assert "properties" in result + assert "id" in result["properties"] + assert result["properties"]["id"]["format"] == "int4" + # Should not contain oneOf anymore (resolved) + assert "oneOf" not in result + + def test_apply_conditional_logic_non_matching_variant( + self, mock_resolver, mysql_variant, schema_helper + ): + """Should raise ValidationError for non-matching variant.""" + merger = ConditionalMerger(mock_resolver) + + base_schema = schema_helper.create_conditional_schema( + engine="postgresql", + version="15.0", + then_properties={"postgres_column": {"type": "string"}}, + ) + + with pytest.raises(ValidationError, match="No matching oneOf condition found"): + merger.apply_conditional_logic(base_schema, mysql_variant) + + def test_apply_conditional_logic_no_oneof(self, mock_resolver, postgresql_variant): + """Should return schema unchanged when no oneOf present.""" + merger = ConditionalMerger(mock_resolver) + + base_schema = {"type": "object", "properties": {"name": {"type": "string"}}} + + result = merger.apply_conditional_logic(base_schema, postgresql_variant) + + assert result == base_schema + + def test_apply_conditional_logic_empty_oneof( + self, mock_resolver, postgresql_variant + ): + """Should handle empty oneOf arrays.""" + merger = ConditionalMerger(mock_resolver) + + base_schema = {"oneOf": []} + + result = merger.apply_conditional_logic(base_schema, postgresql_variant) + + assert result == base_schema + + def test_apply_conditional_logic_invalid_condition_structure( + self, mock_resolver, postgresql_variant, schema_helper + ): + """Should raise ValidationError for invalid condition structure.""" + merger = ConditionalMerger(mock_resolver) + + invalid_schema = schema_helper.create_invalid_schema() + + with pytest.raises(ValidationError): + merger.apply_conditional_logic(invalid_schema, postgresql_variant) + + def test_apply_conditional_logic_multiple_matches_error( + self, mock_resolver, postgresql_variant, schema_helper + ): + """Should raise ValidationError when multiple conditions match.""" + merger = ConditionalMerger(mock_resolver) + + schema_with_multiple_matches = schema_helper.create_multiple_match_schema() + + with pytest.raises(ValidationError, match="Multiple matching conditions"): + merger.apply_conditional_logic( + schema_with_multiple_matches, postgresql_variant + ) + + @pytest.mark.parametrize( + "engine,version,expected_property", + [ + ("postgresql", "15.0", "bigint_support"), + ("postgresql", "14.0", "json_support"), + ("mysql", "8.0", "mysql_specific"), + ], + ) + def test_apply_conditional_logic_multiple_variants( + self, mock_resolver, engine, version, expected_property, schema_helper + ): + """Should correctly resolve conditions for different variants.""" + from database_schema_spec.core.schemas import DatabaseVariantSpec + + variant = DatabaseVariantSpec( + engine=engine, version=version, engine_spec_path=None + ) + merger = ConditionalMerger(mock_resolver) + + multi_variant_schema = { + "oneOf": [ + { + "if": { + "properties": { + "database": { + "properties": { + "engine": {"const": "postgresql"}, + "version": {"const": "15.0"}, + } + } + } + }, + "then": {"properties": {"bigint_support": {"type": "boolean"}}}, + }, + { + "if": { + "properties": { + "database": { + "properties": { + "engine": {"const": "postgresql"}, + "version": {"const": "14.0"}, + } + } + } + }, + "then": {"properties": {"json_support": {"type": "boolean"}}}, + }, + { + "if": { + "properties": { + "database": {"properties": {"engine": {"const": "mysql"}}} + } + }, + "then": {"properties": {"mysql_specific": {"type": "string"}}}, + }, + ] + } + + result = merger.apply_conditional_logic(multi_variant_schema, variant) + + assert expected_property in result["properties"] + + def test_apply_conditional_logic_preserves_original_properties( + self, mock_resolver, postgresql_variant + ): + """Should preserve original schema properties when merging conditions.""" + merger = ConditionalMerger(mock_resolver) + + base_schema = { + "type": "object", + "properties": {"existing_prop": {"type": "string"}}, + "oneOf": [ + { + "if": { + "properties": { + "database": { + "properties": {"engine": {"const": "postgresql"}} + } + } + }, + "then": {"properties": {"new_prop": {"type": "integer"}}}, + } + ], + } + + result = merger.apply_conditional_logic(base_schema, postgresql_variant) + + # Should have both original and new properties + assert "existing_prop" in result["properties"] + assert "new_prop" in result["properties"] + assert result["type"] == "object" + + def test_apply_conditional_logic_with_nested_refs(self, postgresql_variant): + """Should handle schemas with nested $ref resolution.""" + from unittest.mock import Mock + + mock_resolver = Mock() + mock_resolver.resolve_references.return_value = { + "properties": {"resolved_prop": {"type": "string"}} + } + + merger = ConditionalMerger(mock_resolver) + + schema_with_ref = { + "oneOf": [ + { + "if": { + "properties": { + "database": { + "properties": {"engine": {"const": "postgresql"}} + } + } + }, + "then": {"$ref": "nested_schema.json"}, + } + ] + } + + result = merger.apply_conditional_logic(schema_with_ref, postgresql_variant) + + # Should have called resolver for the $ref + mock_resolver.resolve_references.assert_called_once() + assert "resolved_prop" in result["properties"] + + def test_apply_conditional_logic_direct_properties_style( + self, mock_resolver, postgresql_variant, schema_helper + ): + """Should handle direct properties style conditions (database.json style).""" + merger = ConditionalMerger(mock_resolver) + + # This is the style used in database.json where if/then is at the top level + direct_schema = schema_helper.create_direct_conditional_schema( + engine="postgresql", + version="15.0", + then_properties={"postgres_column": {"type": "bigint"}}, + ) + + result = merger.apply_conditional_logic(direct_schema, postgresql_variant) + + assert "properties" in result + assert "postgres_column" in result["properties"] + assert result["properties"]["postgres_column"]["type"] == "bigint" diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..475bf6e --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,30 @@ +"""Test custom configuration error handling.""" + +import os +from unittest.mock import patch + +import pytest + +from database_schema_spec.core.config import Config +from database_schema_spec.core.exceptions import ConfigurationError + + +def test_missing_base_url_raises_configuration_error(): + """Test that missing BASE_URL raises ConfigurationError instead of ValidationError.""" + # Remove BASE_URL from environment for this test + with patch.dict(os.environ, {}, clear=True): + with pytest.raises(ConfigurationError) as exc_info: + Config() + + error = exc_info.value + assert error.variable_name == "BASE_URL" + assert "Required configuration variable 'BASE_URL' is not set" in str(error) + + +def test_config_with_valid_base_url(): + """Test that Config works correctly when BASE_URL is provided.""" + with patch.dict(os.environ, {"BASE_URL": "https://example.com/api"}): + config = Config() + assert config.base_url == "https://example.com/api" + assert config.docs_dir.name == "docs" + assert config.output_dir.name == "output" diff --git a/tests/test_integration.py b/tests/test_integration.py new file mode 100644 index 0000000..b807f77 --- /dev/null +++ b/tests/test_integration.py @@ -0,0 +1,20 @@ +from pathlib import Path + +from database_schema_spec.cli.generator import SchemaGenerator + + +def test_schema_generation(tmp_path): + # Use a temporary output directory + docs_path = Path("docs") + output_path = tmp_path / "output" + generator = SchemaGenerator(docs_path=docs_path, output_path=output_path) + try: + generator.run() + except SystemExit as e: + # Accept exit code 0 (success) + assert e.code == 0 + # Check that output directory was created + assert output_path.exists() + # Optionally, check for expected files + files = list(output_path.rglob("*.json")) + assert files, "No schema files generated" diff --git a/tests/test_integration_production.py b/tests/test_integration_production.py new file mode 100644 index 0000000..daf5f78 --- /dev/null +++ b/tests/test_integration_production.py @@ -0,0 +1,259 @@ +"""Production-quality integration tests.""" + +import json +from pathlib import Path + +import pytest + +from database_schema_spec.cli.generator import SchemaGenerator +from database_schema_spec.core.exceptions import SchemaGenerationError + + +class TestSchemaGenerationIntegration: + """Test complete schema generation workflow.""" + + def test_schema_generation_with_real_data(self, temp_docs_dir, temp_output_dir): + """Should generate valid schemas from realistic test data.""" + generator = SchemaGenerator( + docs_path=temp_docs_dir, output_path=temp_output_dir + ) + + # This should not raise any exceptions + generator.run() + + # Verify output structure + assert temp_output_dir.exists() + + # Check for generated schema files + postgres_output = temp_output_dir / "postgresql" / "15.0" / "spec.json" + mysql_output = temp_output_dir / "mysql" / "8.0" / "spec.json" + + assert postgres_output.exists(), "PostgreSQL schema should be generated" + assert mysql_output.exists(), "MySQL schema should be generated" + + # Verify content quality + with open(postgres_output) as f: + postgres_schema = json.load(f) + + with open(mysql_output) as f: + mysql_schema = json.load(f) + + # Basic schema validation + assert postgres_schema.get("type") == "object", ( + f"Expected 'object', got {postgres_schema.get('type')}" + ) + assert mysql_schema.get("type") == "object", ( + f"Expected 'object', got {mysql_schema.get('type')}" + ) + + # Verify basic structure + assert "database" in postgres_schema.get("properties", {}) + assert "schema" in postgres_schema.get("properties", {}) + assert "database" in mysql_schema.get("properties", {}) + assert "schema" in mysql_schema.get("properties", {}) + + # Verify conditional logic was resolved (no oneOf should remain) + assert "oneOf" not in postgres_schema + assert "oneOf" not in mysql_schema + + def test_schema_generation_with_missing_docs(self, temp_output_dir): + """Should handle missing documentation directory gracefully.""" + non_existent_path = Path("/non/existent/path") + generator = SchemaGenerator( + docs_path=non_existent_path, output_path=temp_output_dir + ) + + with pytest.raises(SchemaGenerationError): + generator.run_for_testing() + + def test_schema_generation_with_invalid_specs_file( + self, temp_docs_dir, temp_output_dir + ): + """Should handle invalid specs.json file.""" + # Corrupt the specs.json file + with open(temp_docs_dir / "specs.json", "w") as f: + f.write("invalid json content") + + generator = SchemaGenerator( + docs_path=temp_docs_dir, output_path=temp_output_dir + ) + + with pytest.raises(SchemaGenerationError): + generator.run_for_testing() + + def test_schema_generation_preserves_file_structure( + self, temp_docs_dir, temp_output_dir + ): + """Should create proper output file structure.""" + generator = SchemaGenerator( + docs_path=temp_docs_dir, output_path=temp_output_dir + ) + generator.run() + + # Verify directory structure matches expected pattern + expected_structure = [ + temp_output_dir / "postgresql" / "15.0", + temp_output_dir / "mysql" / "8.0", + ] + + for path in expected_structure: + assert path.exists(), f"Expected directory {path} should exist" + assert (path / "spec.json").exists(), f"spec.json should exist in {path}" + + def test_schema_generation_output_content_validity( + self, temp_docs_dir, temp_output_dir + ): + """Should generate valid JSON Schema compliant output.""" + import jsonschema + + generator = SchemaGenerator( + docs_path=temp_docs_dir, output_path=temp_output_dir + ) + generator.run() + + # Check all generated files are valid JSON Schema + for schema_file in temp_output_dir.rglob("*.json"): + with open(schema_file) as f: + schema_content = json.load(f) + + # Should be valid JSON Schema Draft 7 + try: + jsonschema.Draft7Validator.check_schema(schema_content) + except jsonschema.SchemaError as e: + pytest.fail( + f"Generated schema {schema_file} is not valid JSON Schema: {e}" + ) + + def test_schema_generation_handles_multiple_variants( + self, temp_docs_dir, temp_output_dir + ): + """Should correctly handle multiple database variants.""" + import json + + # Add another variant to database.json oneOf (this is what the variant extractor reads) + database_file = temp_docs_dir / "schemas" / "base" / "database.json" + with open(database_file) as f: + database_schema = json.load(f) + + # Add PostgreSQL 14.0 variant to oneOf + database_schema["oneOf"].append( + { + "properties": { + "engine": { + "type": "string", + "description": "The type of database engine used", + "const": "postgresql", + }, + "version": { + "type": "string", + "description": "The version of the PostgreSQL database engine", + "const": "14.0", + }, + } + } + ) + + with open(database_file, "w") as f: + json.dump(database_schema, f, indent=2) + + # Create directory for the new variant + postgresql_14_dir = ( + temp_docs_dir / "schemas" / "engines" / "postgresql" / "v14.0" + ) + postgresql_14_dir.mkdir(parents=True) + + # Create spec.json for the new variant + with open(postgresql_14_dir / "spec.json", "w") as f: + json.dump( + { + "title": "PostgreSQL 14.0 Schema Rules", + "properties": { + "postgres_14_features": { + "type": "object", + "description": "PostgreSQL 14.0-specific features", + } + }, + }, + f, + indent=2, + ) + + generator = SchemaGenerator( + docs_path=temp_docs_dir, output_path=temp_output_dir + ) + generator.run() + + # Should create output for all variants + assert (temp_output_dir / "postgresql" / "15.0" / "spec.json").exists() + assert (temp_output_dir / "postgresql" / "14.0" / "spec.json").exists() + assert (temp_output_dir / "mysql" / "8.0" / "spec.json").exists() + + def test_schema_generation_with_circular_references( + self, temp_docs_dir, temp_output_dir + ): + """Should detect and handle circular references.""" + # Create a circular reference scenario + circular_schema = {"$ref": "circular_ref.json"} + + with open(temp_docs_dir / "schemas" / "base" / "circular_ref.json", "w") as f: + json.dump({"$ref": "database.json"}, f) + + # Modify database.json to reference the circular file + with open(temp_docs_dir / "schemas" / "base" / "database.json", "w") as f: + json.dump(circular_schema, f) + + generator = SchemaGenerator( + docs_path=temp_docs_dir, output_path=temp_output_dir + ) + + # Should handle circular references gracefully + with pytest.raises(SchemaGenerationError, match="circular"): + generator.run_for_testing() + + def test_schema_generation_performance_with_large_schema( + self, temp_docs_dir, temp_output_dir + ): + """Should handle large schemas efficiently.""" + # Create a large schema with many properties + large_schema = {"type": "object", "properties": {}} + + # Add 100 properties to simulate a large schema + for i in range(100): + large_schema["properties"][f"property_{i}"] = { + "type": "string", + "description": f"Property {i}", + } + + # Add the large schema to the base schemas + with open(temp_docs_dir / "schemas" / "base" / "large_schema.json", "w") as f: + json.dump(large_schema, f, indent=2) + + # Read existing database.json (don't overwrite the oneOf structure) + with open(temp_docs_dir / "schemas" / "base" / "database.json") as f: + database_schema = json.load(f) + + # Add a reference to the large schema in properties if not already there + if "properties" not in database_schema: + database_schema["properties"] = {} + database_schema["properties"]["large_schema"] = {"$ref": "large_schema.json"} + + # Write back the modified database schema + with open(temp_docs_dir / "schemas" / "base" / "database.json", "w") as f: + json.dump(database_schema, f, indent=2) + + import time + + start_time = time.time() + + generator = SchemaGenerator( + docs_path=temp_docs_dir, output_path=temp_output_dir + ) + generator.run_for_testing() # Use test-friendly version + + end_time = time.time() + processing_time = end_time - start_time + + # Should complete within reasonable time (adjust as needed) + assert processing_time < 5.0, ( + f"Schema generation took too long: {processing_time}s" + ) diff --git a/tests/test_output_manager.py b/tests/test_output_manager.py new file mode 100644 index 0000000..4e03b31 --- /dev/null +++ b/tests/test_output_manager.py @@ -0,0 +1,342 @@ +"""Tests for the OutputManager class.""" + +import json +import tempfile +from pathlib import Path +from unittest.mock import patch + +import pytest + +from database_schema_spec.io.output_manager import OutputManager + + +@pytest.fixture +def temp_output_dir(): + """Create a temporary output directory with test data.""" + temp_dir = Path(tempfile.mkdtemp()) + + # Create test directory structure + postgresql_15_dir = temp_dir / "postgresql" / "15.0" + postgresql_16_dir = temp_dir / "postgresql" / "16.0" + mysql_8_dir = temp_dir / "mysql" / "8.0" + + postgresql_15_dir.mkdir(parents=True) + postgresql_16_dir.mkdir(parents=True) + mysql_8_dir.mkdir(parents=True) + + # Create test spec.json files + test_schema = { + "title": "Test Schema", + "type": "object", + "properties": {"test": {"type": "string"}}, + } + + with open(postgresql_15_dir / "spec.json", "w") as f: + json.dump(test_schema, f) + + with open(postgresql_16_dir / "spec.json", "w") as f: + json.dump(test_schema, f) + + with open(mysql_8_dir / "spec.json", "w") as f: + json.dump(test_schema, f) + + yield temp_dir + + # Cleanup + import shutil + + shutil.rmtree(temp_dir) + + +@pytest.fixture +def output_manager(temp_output_dir): + """Create an OutputManager instance with a temporary directory.""" + return OutputManager(temp_output_dir) + + +class TestOutputManager: + """Test suite for OutputManager class.""" + + def test_init_with_default_output_dir(self): + """Test OutputManager initialization with default output directory.""" + from database_schema_spec.core.config import config + + manager = OutputManager() + assert manager.output_dir == config.output_dir + + def test_init_with_custom_output_dir(self, temp_output_dir): + """Test OutputManager initialization with custom output directory.""" + manager = OutputManager(temp_output_dir) + assert manager.output_dir == temp_output_dir + + def test_create_output_structure_success(self, tmp_path): + """Test successful creation of output directory structure.""" + output_dir = tmp_path / "test_output" + manager = OutputManager(output_dir) + + manager.create_output_structure() + + assert output_dir.exists() + assert output_dir.is_dir() + + def test_create_output_structure_permission_error(self): + """Test handling of permission errors during directory creation.""" + # Use a path that would cause permission error (like root-only directory) + with patch("pathlib.Path.mkdir", side_effect=PermissionError("Access denied")): + manager = OutputManager(Path("/root/test")) + + with pytest.raises( + PermissionError, match="Failed to create output directory" + ): + manager.create_output_structure() + + def test_get_output_path(self, output_manager): + """Test _get_output_path method.""" + path = output_manager._get_output_path("postgresql", "15.0") + + expected = output_manager.output_dir / "postgresql" / "15.0" / "spec.json" + assert path == expected + + def test_get_output_path_lowercase_engine(self, output_manager): + """Test _get_output_path converts engine name to lowercase.""" + path = output_manager._get_output_path("PostgreSQL", "15.0") + + expected = output_manager.output_dir / "postgresql" / "15.0" / "spec.json" + assert path == expected + + def test_write_schema_success(self, output_manager): + """Test successful schema writing.""" + schema = { + "title": "Test Schema", + "type": "object", + "properties": {"name": {"type": "string"}}, + } + + result_path = output_manager.write_schema(schema, "postgresql", "15.0") + + # Check that file was created + assert result_path.exists() + assert result_path.is_file() + + # Check file contents + with open(result_path, "r") as f: + written_schema = json.load(f) + + assert written_schema == schema + + def test_write_schema_creates_directories(self, tmp_path): + """Test that write_schema creates necessary directories.""" + output_dir = tmp_path / "new_output" + manager = OutputManager(output_dir) + + schema = {"test": "data"} + + result_path = manager.write_schema(schema, "mysql", "8.0") + + # Check that directories were created + assert (output_dir / "mysql" / "8.0").exists() + assert result_path.exists() + + def test_write_schema_permission_error(self, output_manager): + """Test handling of permission errors during schema writing.""" + schema = {"test": "data"} + + with patch("builtins.open", side_effect=PermissionError("Access denied")): + with pytest.raises(PermissionError, match="Failed to write schema"): + output_manager.write_schema(schema, "postgresql", "15.0") + + def test_get_spec_url_relative_path(self, output_manager): + """Test _get_spec_url with no base URL (relative path).""" + url = output_manager._get_spec_url("postgresql", "15.0") + + assert url == "postgresql/15.0/spec.json" + + def test_get_spec_url_with_base_url(self, output_manager): + """Test _get_spec_url with base URL.""" + base_url = "https://api.example.com/schemas" + url = output_manager._get_spec_url("postgresql", "15.0", base_url) + + assert url == "https://api.example.com/schemas/postgresql/15.0/spec.json" + + def test_get_spec_url_with_trailing_slash_base_url(self, output_manager): + """Test _get_spec_url strips trailing slash from base URL.""" + base_url = "https://api.example.com/schemas/" + url = output_manager._get_spec_url("postgresql", "15.0", base_url) + + assert url == "https://api.example.com/schemas/postgresql/15.0/spec.json" + + def test_get_spec_url_engine_lowercase(self, output_manager): + """Test _get_spec_url converts engine to lowercase.""" + url = output_manager._get_spec_url("PostgreSQL", "15.0") + + assert url == "postgresql/15.0/spec.json" + + def test_generate_version_map_empty_directory(self, tmp_path): + """Test _generate_version_map with empty output directory.""" + empty_dir = tmp_path / "empty" + manager = OutputManager(empty_dir) + + version_map = manager._generate_version_map() + + assert version_map == {} + + def test_generate_version_map_nonexistent_directory(self, tmp_path): + """Test _generate_version_map with nonexistent output directory.""" + nonexistent_dir = tmp_path / "nonexistent" + manager = OutputManager(nonexistent_dir) + + version_map = manager._generate_version_map() + + assert version_map == {} + + def test_generate_version_map_multiple_engines(self, output_manager): + """Test _generate_version_map with multiple engines and versions.""" + version_map = output_manager._generate_version_map() + + expected = { + "postgresql": { + "15.0": "postgresql/15.0/spec.json", + "16.0": "postgresql/16.0/spec.json", + }, + "mysql": {"8.0": "mysql/8.0/spec.json"}, + } + + assert version_map == expected + + def test_generate_version_map_with_base_url(self, output_manager): + """Test _generate_version_map with base URL.""" + base_url = "https://api.example.com/schemas" + version_map = output_manager._generate_version_map(base_url) + + expected = { + "postgresql": { + "15.0": "https://api.example.com/schemas/postgresql/15.0/spec.json", + "16.0": "https://api.example.com/schemas/postgresql/16.0/spec.json", + }, + "mysql": {"8.0": "https://api.example.com/schemas/mysql/8.0/spec.json"}, + } + + assert version_map == expected + + def test_generate_version_map_ignores_files_in_engine_dir(self, temp_output_dir): + """Test _generate_version_map ignores files in engine directories.""" + # Create a file in the postgresql directory (not a version directory) + postgresql_dir = temp_output_dir / "postgresql" + (postgresql_dir / "readme.txt").write_text("Some file") + + manager = OutputManager(temp_output_dir) + version_map = manager._generate_version_map() + + # Should still have the version directories, but ignore the file + assert "postgresql" in version_map + assert "15.0" in version_map["postgresql"] + assert "16.0" in version_map["postgresql"] + + def test_generate_version_map_ignores_version_dirs_without_spec( + self, temp_output_dir + ): + """Test _generate_version_map ignores version directories without spec.json.""" + # Create a version directory without spec.json + empty_version_dir = temp_output_dir / "postgresql" / "17.0" + empty_version_dir.mkdir() + + manager = OutputManager(temp_output_dir) + version_map = manager._generate_version_map() + + # Should not include the empty version directory + assert "17.0" not in version_map["postgresql"] + assert "15.0" in version_map["postgresql"] + assert "16.0" in version_map["postgresql"] + + def test_write_version_map_success(self, output_manager): + """Test successful version map writing.""" + result_path = output_manager.write_version_map() + + # Check that file was created + assert result_path.exists() + assert result_path.name == "vmap.json" + + # Check file contents + with open(result_path, "r") as f: + written_map = json.load(f) + + expected = { + "postgresql": { + "15.0": "postgresql/15.0/spec.json", + "16.0": "postgresql/16.0/spec.json", + }, + "mysql": {"8.0": "mysql/8.0/spec.json"}, + } + + assert written_map == expected + + def test_write_version_map_with_base_url(self, output_manager): + """Test version map writing with base URL.""" + base_url = "https://api.example.com/schemas" + result_path = output_manager.write_version_map(base_url) + + # Check file contents + with open(result_path, "r") as f: + written_map = json.load(f) + + # All URLs should include the base URL + for engine_versions in written_map.values(): + for url in engine_versions.values(): + assert url.startswith(base_url) + + def test_write_version_map_creates_output_directory(self, tmp_path): + """Test that write_version_map creates output directory if it doesn't exist.""" + output_dir = tmp_path / "new_output" + manager = OutputManager(output_dir) + + result_path = manager.write_version_map() + + # Directory should be created + assert output_dir.exists() + assert result_path.exists() + + def test_write_version_map_permission_error(self, output_manager): + """Test handling of permission errors during version map writing.""" + with patch("builtins.open", side_effect=PermissionError("Access denied")): + with pytest.raises(PermissionError, match="Failed to write version map"): + output_manager.write_version_map() + + def test_write_version_map_json_formatting(self, output_manager): + """Test that version map is written with proper JSON formatting.""" + result_path = output_manager.write_version_map() + + # Read the raw file content to check formatting + with open(result_path, "r") as f: + content = f.read() + + # Should be pretty-printed (contains newlines and indentation) + assert "\n" in content + assert " " in content # Check for indentation + + # Should be valid JSON + json.loads(content) # This will raise if invalid + + def test_integration_with_schema_generation(self, tmp_path): + """Test integration of version map with actual schema generation.""" + output_dir = tmp_path / "integration_test" + manager = OutputManager(output_dir) + + # Write some test schemas + test_schema = {"test": "data"} + manager.write_schema(test_schema, "postgresql", "15.0") + manager.write_schema(test_schema, "mysql", "8.0") + + # Generate version map + vmap_path = manager.write_version_map("https://example.com") + + # Verify version map contains the schemas we just wrote + with open(vmap_path, "r") as f: + version_map = json.load(f) + + assert "postgresql" in version_map + assert "mysql" in version_map + assert ( + version_map["postgresql"]["15.0"] + == "https://example.com/postgresql/15.0/spec.json" + ) + assert version_map["mysql"]["8.0"] == "https://example.com/mysql/8.0/spec.json" diff --git a/tests/test_resolver.py b/tests/test_resolver.py new file mode 100644 index 0000000..300564b --- /dev/null +++ b/tests/test_resolver.py @@ -0,0 +1,31 @@ +from database_schema_spec.resolution.resolver import JSONRefResolver + + +def test_circular_reference_detection(): + resolver = JSONRefResolver() + resolver.resolution_stack = ["a.json", "b.json"] + assert resolver.detect_circular_reference("a.json") + assert not resolver.detect_circular_reference("c.json") + + +def test_resolve_references_no_ref(): + resolver = JSONRefResolver() + schema = {"properties": {"foo": {"type": "string"}}} + result = resolver.resolve_references(schema) + assert result == schema + + +def test_resolve_references_with_ref(monkeypatch): + resolver = JSONRefResolver() + schema = {"$ref": "other.json", "extra": 1} + referenced = {"properties": {"bar": {"type": "number"}}} + + def fake_load_referenced_file(ref_path, current_file=None): + assert ref_path == "other.json" + return referenced + + resolver.load_referenced_file = fake_load_referenced_file + resolver.resolve_oneof_for_variant = lambda schema, schema_path: schema + result = resolver.resolve_references(schema) + assert "bar" in result["properties"] + assert result["extra"] == 1 diff --git a/tests/test_validation.py b/tests/test_validation.py new file mode 100644 index 0000000..8caeded --- /dev/null +++ b/tests/test_validation.py @@ -0,0 +1,24 @@ +from database_schema_spec.core.schemas import ValidationResult +from database_schema_spec.validation.schema_validator import SchemaValidator + + +def test_valid_schema(): + schema = { + "properties": { + "database": {"type": "object"}, + "schema": {"type": "object"}, + } + } + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(schema) + assert result.is_valid + assert not result.errors + + +def test_missing_properties(): + # Test a schema that explicitly declares type object but is missing properties + schema = {"type": "object"} + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(schema) + assert not result.is_valid + assert "Missing 'properties' field" in result.errors[0] diff --git a/tests/test_validation_production.py b/tests/test_validation_production.py new file mode 100644 index 0000000..2e328af --- /dev/null +++ b/tests/test_validation_production.py @@ -0,0 +1,232 @@ +"""Production-quality validation tests.""" + +import pytest + +from database_schema_spec.core.schemas import ValidationResult +from database_schema_spec.validation.schema_validator import SchemaValidator + + +class TestSchemaValidator: + """Test SchemaValidator with comprehensive coverage.""" + + def test_validate_valid_basic_schema(self): + """Should validate a basic valid schema successfully.""" + schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "database": {"type": "object"}, + "schema": {"type": "object"}, + }, + "required": ["database", "schema"], + } + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(schema) + + assert result.is_valid + assert len(result.errors) == 0 + assert isinstance(result.warnings, list) + + def test_validate_schema_missing_required_properties(self): + """Should detect missing required properties.""" + schema = { + "type": "object" + # Missing properties field + } + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(schema) + + assert not result.is_valid + assert len(result.errors) > 0 + assert any("properties" in error.lower() for error in result.errors) + + def test_validate_schema_invalid_json_schema(self): + """Should detect invalid JSON Schema structure.""" + invalid_schema = { + "type": "invalid_type", # Not a valid JSON Schema type + "properties": {"test": {"type": "another_invalid_type"}}, + } + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(invalid_schema) + + assert not result.is_valid + assert len(result.errors) > 0 + + def test_validate_schema_with_oneOf_conditions(self): + """Should validate schemas with complex oneOf conditions.""" + schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": {"database": {"type": "object"}}, + "oneOf": [ + { + "if": { + "properties": { + "database": { + "properties": {"engine": {"const": "postgresql"}} + } + } + }, + "then": {"properties": {"postgres_features": {"type": "object"}}}, + } + ], + } + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(schema) + + assert result.is_valid + assert len(result.errors) == 0 + + def test_validate_schema_with_refs(self): + """Should handle schemas with $ref references.""" + schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "database": {"$ref": "database.json"}, + "schema": {"$ref": "schema.json"}, + }, + } + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(schema) + + assert result.is_valid + # Should not error on $ref - that's resolved elsewhere + + def test_validate_schema_circular_structure(self): + """Should handle potential circular references in validation.""" + # Create a schema that references itself (though not a real $ref) + schema = { + "type": "object", + "properties": { + "self": { + "type": "object", + "properties": { + "nested": { + "type": "object", + # This would be a deep nesting scenario + "properties": {"database": {"type": "string"}}, + } + }, + } + }, + } + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(schema) + + assert result.is_valid + + def test_validate_schema_performance_large_schema(self): + """Should validate large schemas efficiently.""" + # Create a large schema with many properties + large_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": {}, + } + + # Add many properties + for i in range(1000): + large_schema["properties"][f"prop_{i}"] = { + "type": "string", + "description": f"Property {i}", + "minLength": 1, + "maxLength": 100, + } + + validator = SchemaValidator() + + import time + + start_time = time.time() + result: ValidationResult = validator.validate_schema(large_schema) + end_time = time.time() + + validation_time = end_time - start_time + + assert result.is_valid + assert validation_time < 2.0, f"Validation took too long: {validation_time}s" + + def test_validate_schema_with_warnings(self): + """Should generate warnings for problematic but valid schemas.""" + # Schema that might generate warnings (structure-specific) + schema_with_warnings = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "database": {"type": "object"} + # Might generate warnings about missing required fields + }, + } + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(schema_with_warnings) + + # Should be valid but might have warnings + assert result.is_valid + assert isinstance(result.warnings, list) + + @pytest.mark.parametrize( + "invalid_type", + [ + "invalid_type", + 123, # Not a string + [], # Array instead of string + None, # None type + ], + ) + def test_validate_schema_invalid_types(self, invalid_type): + """Should reject schemas with invalid type values.""" + invalid_schema = {"type": invalid_type, "properties": {}} + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(invalid_schema) + + assert not result.is_valid + assert len(result.errors) > 0 + + def test_validate_schema_empty_input(self): + """Test validation of empty schema - should be valid but with warnings.""" + validator = SchemaValidator() + result = validator.validate_schema({}) + + # Empty schema is technically valid JSON Schema but will have warnings + assert result.is_valid + assert ( + len(result.warnings) > 0 + ) # Should have warnings about missing recommended fields + assert "Missing '$schema' field" in result.warnings[0] + + def test_validate_schema_malformed_properties(self): + """Should detect malformed properties structures.""" + malformed_schema = { + "type": "object", + "properties": "this should be an object", # Invalid - should be dict + } + + validator = SchemaValidator() + result: ValidationResult = validator.validate_schema(malformed_schema) + + assert not result.is_valid + assert len(result.errors) > 0 + + def test_validation_result_immutability(self): + """Should ensure ValidationResult behaves as expected.""" + schema = {"type": "object", "properties": {"test": {"type": "string"}}} + + validator = SchemaValidator() + result = validator.validate_schema(schema) + + # Test the result object structure + assert hasattr(result, "is_valid") + assert hasattr(result, "errors") + assert hasattr(result, "warnings") + assert isinstance(result.errors, list) + assert isinstance(result.warnings, list) + assert isinstance(result.is_valid, bool) diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..0b2c531 --- /dev/null +++ b/uv.lock @@ -0,0 +1,528 @@ +version = 1 +revision = 2 +requires-python = ">=3.13" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/0e/66dbd4c6a7f0758a8d18044c048779ba21fb94856e1edcf764bd5403e710/coverage-7.10.1.tar.gz", hash = "sha256:ae2b4856f29ddfe827106794f3589949a57da6f0d38ab01e24ec35107979ba57", size = 819938, upload-time = "2025-07-27T14:13:39.045Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/72/135ff5fef09b1ffe78dbe6fcf1e16b2e564cd35faeacf3d63d60d887f12d/coverage-7.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ebb08d0867c5a25dffa4823377292a0ffd7aaafb218b5d4e2e106378b1061e39", size = 214960, upload-time = "2025-07-27T14:11:55.959Z" }, + { url = "https://files.pythonhosted.org/packages/b1/aa/73a5d1a6fc08ca709a8177825616aa95ee6bf34d522517c2595484a3e6c9/coverage-7.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f32a95a83c2e17422f67af922a89422cd24c6fa94041f083dd0bb4f6057d0bc7", size = 215220, upload-time = "2025-07-27T14:11:57.899Z" }, + { url = "https://files.pythonhosted.org/packages/8d/40/3124fdd45ed3772a42fc73ca41c091699b38a2c3bd4f9cb564162378e8b6/coverage-7.10.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c4c746d11c8aba4b9f58ca8bfc6fbfd0da4efe7960ae5540d1a1b13655ee8892", size = 245772, upload-time = "2025-07-27T14:12:00.422Z" }, + { url = "https://files.pythonhosted.org/packages/42/62/a77b254822efa8c12ad59e8039f2bc3df56dc162ebda55e1943e35ba31a5/coverage-7.10.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7f39edd52c23e5c7ed94e0e4bf088928029edf86ef10b95413e5ea670c5e92d7", size = 248116, upload-time = "2025-07-27T14:12:03.099Z" }, + { url = "https://files.pythonhosted.org/packages/1d/01/8101f062f472a3a6205b458d18ef0444a63ae5d36a8a5ed5dd0f6167f4db/coverage-7.10.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab6e19b684981d0cd968906e293d5628e89faacb27977c92f3600b201926b994", size = 249554, upload-time = "2025-07-27T14:12:04.668Z" }, + { url = "https://files.pythonhosted.org/packages/8f/7b/e51bc61573e71ff7275a4f167aecbd16cb010aefdf54bcd8b0a133391263/coverage-7.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5121d8cf0eacb16133501455d216bb5f99899ae2f52d394fe45d59229e6611d0", size = 247766, upload-time = "2025-07-27T14:12:06.234Z" }, + { url = "https://files.pythonhosted.org/packages/4b/71/1c96d66a51d4204a9d6d12df53c4071d87e110941a2a1fe94693192262f5/coverage-7.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df1c742ca6f46a6f6cbcaef9ac694dc2cb1260d30a6a2f5c68c5f5bcfee1cfd7", size = 245735, upload-time = "2025-07-27T14:12:08.305Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/efbc2ac4d35ae2f22ef6df2ca084c60e13bd9378be68655e3268c80349ab/coverage-7.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:40f9a38676f9c073bf4b9194707aa1eb97dca0e22cc3766d83879d72500132c7", size = 247118, upload-time = "2025-07-27T14:12:09.903Z" }, + { url = "https://files.pythonhosted.org/packages/d1/22/073848352bec28ca65f2b6816b892fcf9a31abbef07b868487ad15dd55f1/coverage-7.10.1-cp313-cp313-win32.whl", hash = "sha256:2348631f049e884839553b9974f0821d39241c6ffb01a418efce434f7eba0fe7", size = 217381, upload-time = "2025-07-27T14:12:11.535Z" }, + { url = "https://files.pythonhosted.org/packages/b7/df/df6a0ff33b042f000089bd11b6bb034bab073e2ab64a56e78ed882cba55d/coverage-7.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:4072b31361b0d6d23f750c524f694e1a417c1220a30d3ef02741eed28520c48e", size = 218152, upload-time = "2025-07-27T14:12:13.182Z" }, + { url = "https://files.pythonhosted.org/packages/30/e3/5085ca849a40ed6b47cdb8f65471c2f754e19390b5a12fa8abd25cbfaa8f/coverage-7.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:3e31dfb8271937cab9425f19259b1b1d1f556790e98eb266009e7a61d337b6d4", size = 216559, upload-time = "2025-07-27T14:12:14.807Z" }, + { url = "https://files.pythonhosted.org/packages/cc/93/58714efbfdeb547909feaabe1d67b2bdd59f0597060271b9c548d5efb529/coverage-7.10.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1c4f679c6b573a5257af6012f167a45be4c749c9925fd44d5178fd641ad8bf72", size = 215677, upload-time = "2025-07-27T14:12:16.68Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0c/18eaa5897e7e8cb3f8c45e563e23e8a85686b4585e29d53cacb6bc9cb340/coverage-7.10.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:871ebe8143da284bd77b84a9136200bd638be253618765d21a1fce71006d94af", size = 215899, upload-time = "2025-07-27T14:12:18.758Z" }, + { url = "https://files.pythonhosted.org/packages/84/c1/9d1affacc3c75b5a184c140377701bbf14fc94619367f07a269cd9e4fed6/coverage-7.10.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:998c4751dabf7d29b30594af416e4bf5091f11f92a8d88eb1512c7ba136d1ed7", size = 257140, upload-time = "2025-07-27T14:12:20.357Z" }, + { url = "https://files.pythonhosted.org/packages/3d/0f/339bc6b8fa968c346df346068cca1f24bdea2ddfa93bb3dc2e7749730962/coverage-7.10.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:780f750a25e7749d0af6b3631759c2c14f45de209f3faaa2398312d1c7a22759", size = 259005, upload-time = "2025-07-27T14:12:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/c8/22/89390864b92ea7c909079939b71baba7e5b42a76bf327c1d615bd829ba57/coverage-7.10.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:590bdba9445df4763bdbebc928d8182f094c1f3947a8dc0fc82ef014dbdd8324", size = 261143, upload-time = "2025-07-27T14:12:23.746Z" }, + { url = "https://files.pythonhosted.org/packages/2c/56/3d04d89017c0c41c7a71bd69b29699d919b6bbf2649b8b2091240b97dd6a/coverage-7.10.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b2df80cb6a2af86d300e70acb82e9b79dab2c1e6971e44b78dbfc1a1e736b53", size = 258735, upload-time = "2025-07-27T14:12:25.73Z" }, + { url = "https://files.pythonhosted.org/packages/cb/40/312252c8afa5ca781063a09d931f4b9409dc91526cd0b5a2b84143ffafa2/coverage-7.10.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d6a558c2725bfb6337bf57c1cd366c13798bfd3bfc9e3dd1f4a6f6fc95a4605f", size = 256871, upload-time = "2025-07-27T14:12:27.767Z" }, + { url = "https://files.pythonhosted.org/packages/1f/2b/564947d5dede068215aaddb9e05638aeac079685101462218229ddea9113/coverage-7.10.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e6150d167f32f2a54690e572e0a4c90296fb000a18e9b26ab81a6489e24e78dd", size = 257692, upload-time = "2025-07-27T14:12:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/93/1b/c8a867ade85cb26d802aea2209b9c2c80613b9c122baa8c8ecea6799648f/coverage-7.10.1-cp313-cp313t-win32.whl", hash = "sha256:d946a0c067aa88be4a593aad1236493313bafaa27e2a2080bfe88db827972f3c", size = 218059, upload-time = "2025-07-27T14:12:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fe/cd4ab40570ae83a516bf5e754ea4388aeedd48e660e40c50b7713ed4f930/coverage-7.10.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e37c72eaccdd5ed1130c67a92ad38f5b2af66eeff7b0abe29534225db2ef7b18", size = 219150, upload-time = "2025-07-27T14:12:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/8d/16/6e5ed5854be6d70d0c39e9cb9dd2449f2c8c34455534c32c1a508c7dbdb5/coverage-7.10.1-cp313-cp313t-win_arm64.whl", hash = "sha256:89ec0ffc215c590c732918c95cd02b55c7d0f569d76b90bb1a5e78aa340618e4", size = 217014, upload-time = "2025-07-27T14:12:34.406Z" }, + { url = "https://files.pythonhosted.org/packages/54/8e/6d0bfe9c3d7121cf936c5f8b03e8c3da1484fb801703127dba20fb8bd3c7/coverage-7.10.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:166d89c57e877e93d8827dac32cedae6b0277ca684c6511497311249f35a280c", size = 214951, upload-time = "2025-07-27T14:12:36.069Z" }, + { url = "https://files.pythonhosted.org/packages/f2/29/e3e51a8c653cf2174c60532aafeb5065cea0911403fa144c9abe39790308/coverage-7.10.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:bed4a2341b33cd1a7d9ffc47df4a78ee61d3416d43b4adc9e18b7d266650b83e", size = 215229, upload-time = "2025-07-27T14:12:37.759Z" }, + { url = "https://files.pythonhosted.org/packages/e0/59/3c972080b2fa18b6c4510201f6d4dc87159d450627d062cd9ad051134062/coverage-7.10.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ddca1e4f5f4c67980533df01430184c19b5359900e080248bbf4ed6789584d8b", size = 245738, upload-time = "2025-07-27T14:12:39.453Z" }, + { url = "https://files.pythonhosted.org/packages/2e/04/fc0d99d3f809452654e958e1788454f6e27b34e43f8f8598191c8ad13537/coverage-7.10.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:37b69226001d8b7de7126cad7366b0778d36777e4d788c66991455ba817c5b41", size = 248045, upload-time = "2025-07-27T14:12:41.387Z" }, + { url = "https://files.pythonhosted.org/packages/5e/2e/afcbf599e77e0dfbf4c97197747250d13d397d27e185b93987d9eaac053d/coverage-7.10.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2f22102197bcb1722691296f9e589f02b616f874e54a209284dd7b9294b0b7f", size = 249666, upload-time = "2025-07-27T14:12:43.056Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ae/bc47f7f8ecb7a06cbae2bf86a6fa20f479dd902bc80f57cff7730438059d/coverage-7.10.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1e0c768b0f9ac5839dac5cf88992a4bb459e488ee8a1f8489af4cb33b1af00f1", size = 247692, upload-time = "2025-07-27T14:12:44.83Z" }, + { url = "https://files.pythonhosted.org/packages/b6/26/cbfa3092d31ccba8ba7647e4d25753263e818b4547eba446b113d7d1efdf/coverage-7.10.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:991196702d5e0b120a8fef2664e1b9c333a81d36d5f6bcf6b225c0cf8b0451a2", size = 245536, upload-time = "2025-07-27T14:12:46.527Z" }, + { url = "https://files.pythonhosted.org/packages/56/77/9c68e92500e6a1c83d024a70eadcc9a173f21aadd73c4675fe64c9c43fdf/coverage-7.10.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ae8e59e5f4fd85d6ad34c2bb9d74037b5b11be072b8b7e9986beb11f957573d4", size = 246954, upload-time = "2025-07-27T14:12:49.279Z" }, + { url = "https://files.pythonhosted.org/packages/7f/a5/ba96671c5a669672aacd9877a5987c8551501b602827b4e84256da2a30a7/coverage-7.10.1-cp314-cp314-win32.whl", hash = "sha256:042125c89cf74a074984002e165d61fe0e31c7bd40ebb4bbebf07939b5924613", size = 217616, upload-time = "2025-07-27T14:12:51.214Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3c/e1e1eb95fc1585f15a410208c4795db24a948e04d9bde818fe4eb893bc85/coverage-7.10.1-cp314-cp314-win_amd64.whl", hash = "sha256:a22c3bfe09f7a530e2c94c87ff7af867259c91bef87ed2089cd69b783af7b84e", size = 218412, upload-time = "2025-07-27T14:12:53.429Z" }, + { url = "https://files.pythonhosted.org/packages/b0/85/7e1e5be2cb966cba95566ba702b13a572ca744fbb3779df9888213762d67/coverage-7.10.1-cp314-cp314-win_arm64.whl", hash = "sha256:ee6be07af68d9c4fca4027c70cea0c31a0f1bc9cb464ff3c84a1f916bf82e652", size = 216776, upload-time = "2025-07-27T14:12:55.482Z" }, + { url = "https://files.pythonhosted.org/packages/62/0f/5bb8f29923141cca8560fe2217679caf4e0db643872c1945ac7d8748c2a7/coverage-7.10.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d24fb3c0c8ff0d517c5ca5de7cf3994a4cd559cde0315201511dbfa7ab528894", size = 215698, upload-time = "2025-07-27T14:12:57.225Z" }, + { url = "https://files.pythonhosted.org/packages/80/29/547038ffa4e8e4d9e82f7dfc6d152f75fcdc0af146913f0ba03875211f03/coverage-7.10.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1217a54cfd79be20512a67ca81c7da3f2163f51bbfd188aab91054df012154f5", size = 215902, upload-time = "2025-07-27T14:12:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/e1/8a/7aaa8fbfaed900147987a424e112af2e7790e1ac9cd92601e5bd4e1ba60a/coverage-7.10.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:51f30da7a52c009667e02f125737229d7d8044ad84b79db454308033a7808ab2", size = 257230, upload-time = "2025-07-27T14:13:01.248Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1d/c252b5ffac44294e23a0d79dd5acf51749b39795ccc898faeabf7bee903f/coverage-7.10.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ed3718c757c82d920f1c94089066225ca2ad7f00bb904cb72b1c39ebdd906ccb", size = 259194, upload-time = "2025-07-27T14:13:03.247Z" }, + { url = "https://files.pythonhosted.org/packages/16/ad/6c8d9f83d08f3bac2e7507534d0c48d1a4f52c18e6f94919d364edbdfa8f/coverage-7.10.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc452481e124a819ced0c25412ea2e144269ef2f2534b862d9f6a9dae4bda17b", size = 261316, upload-time = "2025-07-27T14:13:04.957Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4e/f9bbf3a36c061e2e0e0f78369c006d66416561a33d2bee63345aee8ee65e/coverage-7.10.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9d6f494c307e5cb9b1e052ec1a471060f1dea092c8116e642e7a23e79d9388ea", size = 258794, upload-time = "2025-07-27T14:13:06.715Z" }, + { url = "https://files.pythonhosted.org/packages/87/82/e600bbe78eb2cb0541751d03cef9314bcd0897e8eea156219c39b685f869/coverage-7.10.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:fc0e46d86905ddd16b85991f1f4919028092b4e511689bbdaff0876bd8aab3dd", size = 256869, upload-time = "2025-07-27T14:13:08.933Z" }, + { url = "https://files.pythonhosted.org/packages/ce/5d/2fc9a9236c5268f68ac011d97cd3a5ad16cc420535369bedbda659fdd9b7/coverage-7.10.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80b9ccd82e30038b61fc9a692a8dc4801504689651b281ed9109f10cc9fe8b4d", size = 257765, upload-time = "2025-07-27T14:13:10.778Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/b4e00b2bd48a2dc8e1c7d2aea7455f40af2e36484ab2ef06deb85883e9fe/coverage-7.10.1-cp314-cp314t-win32.whl", hash = "sha256:e58991a2b213417285ec866d3cd32db17a6a88061a985dbb7e8e8f13af429c47", size = 218420, upload-time = "2025-07-27T14:13:12.882Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d21d05f33ea27ece327422240e69654b5932b0b29e7fbc40fbab3cf199bf/coverage-7.10.1-cp314-cp314t-win_amd64.whl", hash = "sha256:e88dd71e4ecbc49d9d57d064117462c43f40a21a1383507811cf834a4a620651", size = 219536, upload-time = "2025-07-27T14:13:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/a6/68/7fea94b141281ed8be3d1d5c4319a97f2befc3e487ce33657fc64db2c45e/coverage-7.10.1-cp314-cp314t-win_arm64.whl", hash = "sha256:1aadfb06a30c62c2eb82322171fe1f7c288c80ca4156d46af0ca039052814bab", size = 217190, upload-time = "2025-07-27T14:13:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/0f/64/922899cff2c0fd3496be83fa8b81230f5a8d82a2ad30f98370b133c2c83b/coverage-7.10.1-py3-none-any.whl", hash = "sha256:fa2a258aa6bf188eb9a8948f7102a83da7c430a0dce918dbd8b60ef8fcb772d7", size = 206597, upload-time = "2025-07-27T14:13:37.221Z" }, +] + +[[package]] +name = "database-schema-spec" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-dotenv" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pre-commit" }, + { name = "pyright" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "jsonschema", specifier = ">=4.25.0" }, + { name = "pydantic", specifier = ">=2.11.7" }, + { name = "pydantic-settings", specifier = ">=2.10.1" }, + { name = "python-dotenv", specifier = ">=1.1.1" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pre-commit", specifier = ">=4.2.0" }, + { name = "pyright", specifier = ">=1.1.403" }, + { name = "pytest", specifier = ">=8.4.1" }, + { name = "pytest-cov", specifier = ">=6.2.1" }, + { name = "pytest-mock", specifier = ">=3.14.1" }, + { name = "ruff", specifier = ">=0.12.7" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "filelock" +version = "3.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, +] + +[[package]] +name = "identify" +version = "2.6.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/00/a297a868e9d0784450faa7365c2172a7d6110c763e30ba861867c32ae6a9/jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f", size = 356830, upload-time = "2025-07-18T15:39:45.11Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/54/c86cd8e011fe98803d7e382fd67c0df5ceab8d2b7ad8c5a81524f791551c/jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716", size = 89184, upload-time = "2025-07-18T15:39:42.956Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.403" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/f6/35f885264ff08c960b23d1542038d8da86971c5d8c955cfab195a4f672d7/pyright-1.1.403.tar.gz", hash = "sha256:3ab69b9f41c67fb5bbb4d7a36243256f0d549ed3608678d381d5f51863921104", size = 3913526, upload-time = "2025-07-09T07:15:52.882Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/b6/b04e5c2f41a5ccad74a1a4759da41adb20b4bc9d59a5e08d29ba60084d07/pyright-1.1.403-py3-none-any.whl", hash = "sha256:c0eeca5aa76cbef3fcc271259bbd785753c7ad7bcac99a9162b4c4c7daed23b3", size = 5684504, upload-time = "2025-07-09T07:15:50.958Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, +] + +[[package]] +name = "pytest-cov" +version = "6.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/67/bb62d0109493b12b1c6ab00de7a5566aa84c0e44217c2d94bee1bd370da9/rpds_py-0.26.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:696764a5be111b036256c0b18cd29783fab22154690fc698062fc1b0084b511d", size = 363917, upload-time = "2025-07-01T15:54:34.755Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f3/34e6ae1925a5706c0f002a8d2d7f172373b855768149796af87bd65dcdb9/rpds_py-0.26.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e6c15d2080a63aaed876e228efe4f814bc7889c63b1e112ad46fdc8b368b9e1", size = 350073, upload-time = "2025-07-01T15:54:36.292Z" }, + { url = "https://files.pythonhosted.org/packages/75/83/1953a9d4f4e4de7fd0533733e041c28135f3c21485faaef56a8aadbd96b5/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390e3170babf42462739a93321e657444f0862c6d722a291accc46f9d21ed04e", size = 384214, upload-time = "2025-07-01T15:54:37.469Z" }, + { url = "https://files.pythonhosted.org/packages/48/0e/983ed1b792b3322ea1d065e67f4b230f3b96025f5ce3878cc40af09b7533/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7da84c2c74c0f5bc97d853d9e17bb83e2dcafcff0dc48286916001cc114379a1", size = 400113, upload-time = "2025-07-01T15:54:38.954Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/36c0925fff6f660a80be259c5b4f5e53a16851f946eb080351d057698528/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c5fe114a6dd480a510b6d3661d09d67d1622c4bf20660a474507aaee7eeeee9", size = 515189, upload-time = "2025-07-01T15:54:40.57Z" }, + { url = "https://files.pythonhosted.org/packages/13/45/cbf07fc03ba7a9b54662c9badb58294ecfb24f828b9732970bd1a431ed5c/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3100b3090269f3a7ea727b06a6080d4eb7439dca4c0e91a07c5d133bb1727ea7", size = 406998, upload-time = "2025-07-01T15:54:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/6c/b0/8fa5e36e58657997873fd6a1cf621285ca822ca75b4b3434ead047daa307/rpds_py-0.26.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c03c9b0c64afd0320ae57de4c982801271c0c211aa2d37f3003ff5feb75bb04", size = 385903, upload-time = "2025-07-01T15:54:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f7/b25437772f9f57d7a9fbd73ed86d0dcd76b4c7c6998348c070d90f23e315/rpds_py-0.26.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5963b72ccd199ade6ee493723d18a3f21ba7d5b957017607f815788cef50eaf1", size = 419785, upload-time = "2025-07-01T15:54:46.043Z" }, + { url = "https://files.pythonhosted.org/packages/a7/6b/63ffa55743dfcb4baf2e9e77a0b11f7f97ed96a54558fcb5717a4b2cd732/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9da4e873860ad5bab3291438525cae80169daecbfafe5657f7f5fb4d6b3f96b9", size = 561329, upload-time = "2025-07-01T15:54:47.64Z" }, + { url = "https://files.pythonhosted.org/packages/2f/07/1f4f5e2886c480a2346b1e6759c00278b8a69e697ae952d82ae2e6ee5db0/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5afaddaa8e8c7f1f7b4c5c725c0070b6eed0228f705b90a1732a48e84350f4e9", size = 590875, upload-time = "2025-07-01T15:54:48.9Z" }, + { url = "https://files.pythonhosted.org/packages/cc/bc/e6639f1b91c3a55f8c41b47d73e6307051b6e246254a827ede730624c0f8/rpds_py-0.26.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4916dc96489616a6f9667e7526af8fa693c0fdb4f3acb0e5d9f4400eb06a47ba", size = 556636, upload-time = "2025-07-01T15:54:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/05/4c/b3917c45566f9f9a209d38d9b54a1833f2bb1032a3e04c66f75726f28876/rpds_py-0.26.0-cp313-cp313-win32.whl", hash = "sha256:2a343f91b17097c546b93f7999976fd6c9d5900617aa848c81d794e062ab302b", size = 222663, upload-time = "2025-07-01T15:54:52.023Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0b/0851bdd6025775aaa2365bb8de0697ee2558184c800bfef8d7aef5ccde58/rpds_py-0.26.0-cp313-cp313-win_amd64.whl", hash = "sha256:0a0b60701f2300c81b2ac88a5fb893ccfa408e1c4a555a77f908a2596eb875a5", size = 234428, upload-time = "2025-07-01T15:54:53.692Z" }, + { url = "https://files.pythonhosted.org/packages/ed/e8/a47c64ed53149c75fb581e14a237b7b7cd18217e969c30d474d335105622/rpds_py-0.26.0-cp313-cp313-win_arm64.whl", hash = "sha256:257d011919f133a4746958257f2c75238e3ff54255acd5e3e11f3ff41fd14256", size = 222571, upload-time = "2025-07-01T15:54:54.822Z" }, + { url = "https://files.pythonhosted.org/packages/89/bf/3d970ba2e2bcd17d2912cb42874107390f72873e38e79267224110de5e61/rpds_py-0.26.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:529c8156d7506fba5740e05da8795688f87119cce330c244519cf706a4a3d618", size = 360475, upload-time = "2025-07-01T15:54:56.228Z" }, + { url = "https://files.pythonhosted.org/packages/82/9f/283e7e2979fc4ec2d8ecee506d5a3675fce5ed9b4b7cb387ea5d37c2f18d/rpds_py-0.26.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f53ec51f9d24e9638a40cabb95078ade8c99251945dad8d57bf4aabe86ecee35", size = 346692, upload-time = "2025-07-01T15:54:58.561Z" }, + { url = "https://files.pythonhosted.org/packages/e3/03/7e50423c04d78daf391da3cc4330bdb97042fc192a58b186f2d5deb7befd/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab504c4d654e4a29558eaa5bb8cea5fdc1703ea60a8099ffd9c758472cf913f", size = 379415, upload-time = "2025-07-01T15:54:59.751Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/d11ee60d4d3b16808432417951c63df803afb0e0fc672b5e8d07e9edaaae/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd0641abca296bc1a00183fe44f7fced8807ed49d501f188faa642d0e4975b83", size = 391783, upload-time = "2025-07-01T15:55:00.898Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/1069c394d9c0d6d23c5b522e1f6546b65793a22950f6e0210adcc6f97c3e/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b312fecc1d017b5327afa81d4da1480f51c68810963a7336d92203dbb3d4f1", size = 512844, upload-time = "2025-07-01T15:55:02.201Z" }, + { url = "https://files.pythonhosted.org/packages/08/3b/c4fbf0926800ed70b2c245ceca99c49f066456755f5d6eb8863c2c51e6d0/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c741107203954f6fc34d3066d213d0a0c40f7bb5aafd698fb39888af277c70d8", size = 402105, upload-time = "2025-07-01T15:55:03.698Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/db69b52ca07413e568dae9dc674627a22297abb144c4d6022c6d78f1e5cc/rpds_py-0.26.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3e55a7db08dc9a6ed5fb7103019d2c1a38a349ac41901f9f66d7f95750942f", size = 383440, upload-time = "2025-07-01T15:55:05.398Z" }, + { url = "https://files.pythonhosted.org/packages/4c/e1/c65255ad5b63903e56b3bb3ff9dcc3f4f5c3badde5d08c741ee03903e951/rpds_py-0.26.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e851920caab2dbcae311fd28f4313c6953993893eb5c1bb367ec69d9a39e7ed", size = 412759, upload-time = "2025-07-01T15:55:08.316Z" }, + { url = "https://files.pythonhosted.org/packages/e4/22/bb731077872377a93c6e93b8a9487d0406c70208985831034ccdeed39c8e/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dfbf280da5f876d0b00c81f26bedce274e72a678c28845453885a9b3c22ae632", size = 556032, upload-time = "2025-07-01T15:55:09.52Z" }, + { url = "https://files.pythonhosted.org/packages/e0/8b/393322ce7bac5c4530fb96fc79cc9ea2f83e968ff5f6e873f905c493e1c4/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1cc81d14ddfa53d7f3906694d35d54d9d3f850ef8e4e99ee68bc0d1e5fed9a9c", size = 585416, upload-time = "2025-07-01T15:55:11.216Z" }, + { url = "https://files.pythonhosted.org/packages/49/ae/769dc372211835bf759319a7aae70525c6eb523e3371842c65b7ef41c9c6/rpds_py-0.26.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dca83c498b4650a91efcf7b88d669b170256bf8017a5db6f3e06c2bf031f57e0", size = 554049, upload-time = "2025-07-01T15:55:13.004Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f9/4c43f9cc203d6ba44ce3146246cdc38619d92c7bd7bad4946a3491bd5b70/rpds_py-0.26.0-cp313-cp313t-win32.whl", hash = "sha256:4d11382bcaf12f80b51d790dee295c56a159633a8e81e6323b16e55d81ae37e9", size = 218428, upload-time = "2025-07-01T15:55:14.486Z" }, + { url = "https://files.pythonhosted.org/packages/7e/8b/9286b7e822036a4a977f2f1e851c7345c20528dbd56b687bb67ed68a8ede/rpds_py-0.26.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff110acded3c22c033e637dd8896e411c7d3a11289b2edf041f86663dbc791e9", size = 231524, upload-time = "2025-07-01T15:55:15.745Z" }, + { url = "https://files.pythonhosted.org/packages/55/07/029b7c45db910c74e182de626dfdae0ad489a949d84a468465cd0ca36355/rpds_py-0.26.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:da619979df60a940cd434084355c514c25cf8eb4cf9a508510682f6c851a4f7a", size = 364292, upload-time = "2025-07-01T15:55:17.001Z" }, + { url = "https://files.pythonhosted.org/packages/13/d1/9b3d3f986216b4d1f584878dca15ce4797aaf5d372d738974ba737bf68d6/rpds_py-0.26.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea89a2458a1a75f87caabefe789c87539ea4e43b40f18cff526052e35bbb4fdf", size = 350334, upload-time = "2025-07-01T15:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/18/98/16d5e7bc9ec715fa9668731d0cf97f6b032724e61696e2db3d47aeb89214/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feac1045b3327a45944e7dcbeb57530339f6b17baff154df51ef8b0da34c8c12", size = 384875, upload-time = "2025-07-01T15:55:20.399Z" }, + { url = "https://files.pythonhosted.org/packages/f9/13/aa5e2b1ec5ab0e86a5c464d53514c0467bec6ba2507027d35fc81818358e/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b818a592bd69bfe437ee8368603d4a2d928c34cffcdf77c2e761a759ffd17d20", size = 399993, upload-time = "2025-07-01T15:55:21.729Z" }, + { url = "https://files.pythonhosted.org/packages/17/03/8021810b0e97923abdbab6474c8b77c69bcb4b2c58330777df9ff69dc559/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a8b0dd8648709b62d9372fc00a57466f5fdeefed666afe3fea5a6c9539a0331", size = 516683, upload-time = "2025-07-01T15:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b1/da8e61c87c2f3d836954239fdbbfb477bb7b54d74974d8f6fcb34342d166/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d3498ad0df07d81112aa6ec6c95a7e7b1ae00929fb73e7ebee0f3faaeabad2f", size = 408825, upload-time = "2025-07-01T15:55:24.207Z" }, + { url = "https://files.pythonhosted.org/packages/38/bc/1fc173edaaa0e52c94b02a655db20697cb5fa954ad5a8e15a2c784c5cbdd/rpds_py-0.26.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24a4146ccb15be237fdef10f331c568e1b0e505f8c8c9ed5d67759dac58ac246", size = 387292, upload-time = "2025-07-01T15:55:25.554Z" }, + { url = "https://files.pythonhosted.org/packages/7c/eb/3a9bb4bd90867d21916f253caf4f0d0be7098671b6715ad1cead9fe7bab9/rpds_py-0.26.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9a63785467b2d73635957d32a4f6e73d5e4df497a16a6392fa066b753e87387", size = 420435, upload-time = "2025-07-01T15:55:27.798Z" }, + { url = "https://files.pythonhosted.org/packages/cd/16/e066dcdb56f5632713445271a3f8d3d0b426d51ae9c0cca387799df58b02/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de4ed93a8c91debfd5a047be327b7cc8b0cc6afe32a716bbbc4aedca9e2a83af", size = 562410, upload-time = "2025-07-01T15:55:29.057Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/ddbdec7eb82a0dc2e455be44c97c71c232983e21349836ce9f272e8a3c29/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:caf51943715b12af827696ec395bfa68f090a4c1a1d2509eb4e2cb69abbbdb33", size = 590724, upload-time = "2025-07-01T15:55:30.719Z" }, + { url = "https://files.pythonhosted.org/packages/2c/b4/95744085e65b7187d83f2fcb0bef70716a1ea0a9e5d8f7f39a86e5d83424/rpds_py-0.26.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4a59e5bc386de021f56337f757301b337d7ab58baa40174fb150accd480bc953", size = 558285, upload-time = "2025-07-01T15:55:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/37/37/6309a75e464d1da2559446f9c811aa4d16343cebe3dbb73701e63f760caa/rpds_py-0.26.0-cp314-cp314-win32.whl", hash = "sha256:92c8db839367ef16a662478f0a2fe13e15f2227da3c1430a782ad0f6ee009ec9", size = 223459, upload-time = "2025-07-01T15:55:33.312Z" }, + { url = "https://files.pythonhosted.org/packages/d9/6f/8e9c11214c46098b1d1391b7e02b70bb689ab963db3b19540cba17315291/rpds_py-0.26.0-cp314-cp314-win_amd64.whl", hash = "sha256:b0afb8cdd034150d4d9f53926226ed27ad15b7f465e93d7468caaf5eafae0d37", size = 236083, upload-time = "2025-07-01T15:55:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/47/af/9c4638994dd623d51c39892edd9d08e8be8220a4b7e874fa02c2d6e91955/rpds_py-0.26.0-cp314-cp314-win_arm64.whl", hash = "sha256:ca3f059f4ba485d90c8dc75cb5ca897e15325e4e609812ce57f896607c1c0867", size = 223291, upload-time = "2025-07-01T15:55:36.202Z" }, + { url = "https://files.pythonhosted.org/packages/4d/db/669a241144460474aab03e254326b32c42def83eb23458a10d163cb9b5ce/rpds_py-0.26.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5afea17ab3a126006dc2f293b14ffc7ef3c85336cf451564a0515ed7648033da", size = 361445, upload-time = "2025-07-01T15:55:37.483Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2d/133f61cc5807c6c2fd086a46df0eb8f63a23f5df8306ff9f6d0fd168fecc/rpds_py-0.26.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:69f0c0a3df7fd3a7eec50a00396104bb9a843ea6d45fcc31c2d5243446ffd7a7", size = 347206, upload-time = "2025-07-01T15:55:38.828Z" }, + { url = "https://files.pythonhosted.org/packages/05/bf/0e8fb4c05f70273469eecf82f6ccf37248558526a45321644826555db31b/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:801a71f70f9813e82d2513c9a96532551fce1e278ec0c64610992c49c04c2dad", size = 380330, upload-time = "2025-07-01T15:55:40.175Z" }, + { url = "https://files.pythonhosted.org/packages/d4/a8/060d24185d8b24d3923322f8d0ede16df4ade226a74e747b8c7c978e3dd3/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df52098cde6d5e02fa75c1f6244f07971773adb4a26625edd5c18fee906fa84d", size = 392254, upload-time = "2025-07-01T15:55:42.015Z" }, + { url = "https://files.pythonhosted.org/packages/b9/7b/7c2e8a9ee3e6bc0bae26bf29f5219955ca2fbb761dca996a83f5d2f773fe/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bc596b30f86dc6f0929499c9e574601679d0341a0108c25b9b358a042f51bca", size = 516094, upload-time = "2025-07-01T15:55:43.603Z" }, + { url = "https://files.pythonhosted.org/packages/75/d6/f61cafbed8ba1499b9af9f1777a2a199cd888f74a96133d8833ce5eaa9c5/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dfbe56b299cf5875b68eb6f0ebaadc9cac520a1989cac0db0765abfb3709c19", size = 402889, upload-time = "2025-07-01T15:55:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/92/19/c8ac0a8a8df2dd30cdec27f69298a5c13e9029500d6d76718130f5e5be10/rpds_py-0.26.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac64f4b2bdb4ea622175c9ab7cf09444e412e22c0e02e906978b3b488af5fde8", size = 384301, upload-time = "2025-07-01T15:55:47.098Z" }, + { url = "https://files.pythonhosted.org/packages/41/e1/6b1859898bc292a9ce5776016c7312b672da00e25cec74d7beced1027286/rpds_py-0.26.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ef9b6bbf9845a264f9aa45c31836e9f3c1f13be565d0d010e964c661d1e2b", size = 412891, upload-time = "2025-07-01T15:55:48.412Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b9/ceb39af29913c07966a61367b3c08b4f71fad841e32c6b59a129d5974698/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:49028aa684c144ea502a8e847d23aed5e4c2ef7cadfa7d5eaafcb40864844b7a", size = 557044, upload-time = "2025-07-01T15:55:49.816Z" }, + { url = "https://files.pythonhosted.org/packages/2f/27/35637b98380731a521f8ec4f3fd94e477964f04f6b2f8f7af8a2d889a4af/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e5d524d68a474a9688336045bbf76cb0def88549c1b2ad9dbfec1fb7cfbe9170", size = 585774, upload-time = "2025-07-01T15:55:51.192Z" }, + { url = "https://files.pythonhosted.org/packages/52/d9/3f0f105420fecd18551b678c9a6ce60bd23986098b252a56d35781b3e7e9/rpds_py-0.26.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c1851f429b822831bd2edcbe0cfd12ee9ea77868f8d3daf267b189371671c80e", size = 554886, upload-time = "2025-07-01T15:55:52.541Z" }, + { url = "https://files.pythonhosted.org/packages/6b/c5/347c056a90dc8dd9bc240a08c527315008e1b5042e7a4cf4ac027be9d38a/rpds_py-0.26.0-cp314-cp314t-win32.whl", hash = "sha256:7bdb17009696214c3b66bb3590c6d62e14ac5935e53e929bcdbc5a495987a84f", size = 219027, upload-time = "2025-07-01T15:55:53.874Z" }, + { url = "https://files.pythonhosted.org/packages/75/04/5302cea1aa26d886d34cadbf2dc77d90d7737e576c0065f357b96dc7a1a6/rpds_py-0.26.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f14440b9573a6f76b4ee4770c13f0b5921f71dde3b6fcb8dabbefd13b7fe05d7", size = 232821, upload-time = "2025-07-01T15:55:55.167Z" }, +] + +[[package]] +name = "ruff" +version = "0.12.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/81/0bd3594fa0f690466e41bd033bdcdf86cba8288345ac77ad4afbe5ec743a/ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71", size = 5197814, upload-time = "2025-07-29T22:32:35.877Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/d2/6cb35e9c85e7a91e8d22ab32ae07ac39cc34a71f1009a6f9e4a2a019e602/ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303", size = 11852189, upload-time = "2025-07-29T22:31:41.281Z" }, + { url = "https://files.pythonhosted.org/packages/63/5b/a4136b9921aa84638f1a6be7fb086f8cad0fde538ba76bda3682f2599a2f/ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb", size = 12519389, upload-time = "2025-07-29T22:31:54.265Z" }, + { url = "https://files.pythonhosted.org/packages/a8/c9/3e24a8472484269b6b1821794141f879c54645a111ded4b6f58f9ab0705f/ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3", size = 11743384, upload-time = "2025-07-29T22:31:59.575Z" }, + { url = "https://files.pythonhosted.org/packages/26/7c/458dd25deeb3452c43eaee853c0b17a1e84169f8021a26d500ead77964fd/ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860", size = 11943759, upload-time = "2025-07-29T22:32:01.95Z" }, + { url = "https://files.pythonhosted.org/packages/7f/8b/658798472ef260ca050e400ab96ef7e85c366c39cf3dfbef4d0a46a528b6/ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c", size = 11654028, upload-time = "2025-07-29T22:32:04.367Z" }, + { url = "https://files.pythonhosted.org/packages/a8/86/9c2336f13b2a3326d06d39178fd3448dcc7025f82514d1b15816fe42bfe8/ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423", size = 13225209, upload-time = "2025-07-29T22:32:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/76/69/df73f65f53d6c463b19b6b312fd2391dc36425d926ec237a7ed028a90fc1/ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb", size = 14182353, upload-time = "2025-07-29T22:32:10.053Z" }, + { url = "https://files.pythonhosted.org/packages/58/1e/de6cda406d99fea84b66811c189b5ea139814b98125b052424b55d28a41c/ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd", size = 13631555, upload-time = "2025-07-29T22:32:12.644Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ae/625d46d5164a6cc9261945a5e89df24457dc8262539ace3ac36c40f0b51e/ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e", size = 12667556, upload-time = "2025-07-29T22:32:15.312Z" }, + { url = "https://files.pythonhosted.org/packages/55/bf/9cb1ea5e3066779e42ade8d0cd3d3b0582a5720a814ae1586f85014656b6/ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606", size = 12939784, upload-time = "2025-07-29T22:32:17.69Z" }, + { url = "https://files.pythonhosted.org/packages/55/7f/7ead2663be5627c04be83754c4f3096603bf5e99ed856c7cd29618c691bd/ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8", size = 11771356, upload-time = "2025-07-29T22:32:20.134Z" }, + { url = "https://files.pythonhosted.org/packages/17/40/a95352ea16edf78cd3a938085dccc55df692a4d8ba1b3af7accbe2c806b0/ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa", size = 11612124, upload-time = "2025-07-29T22:32:22.645Z" }, + { url = "https://files.pythonhosted.org/packages/4d/74/633b04871c669e23b8917877e812376827c06df866e1677f15abfadc95cb/ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5", size = 12479945, upload-time = "2025-07-29T22:32:24.765Z" }, + { url = "https://files.pythonhosted.org/packages/be/34/c3ef2d7799c9778b835a76189c6f53c179d3bdebc8c65288c29032e03613/ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4", size = 12998677, upload-time = "2025-07-29T22:32:27.022Z" }, + { url = "https://files.pythonhosted.org/packages/77/ab/aca2e756ad7b09b3d662a41773f3edcbd262872a4fc81f920dc1ffa44541/ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77", size = 11756687, upload-time = "2025-07-29T22:32:29.381Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/26d45a5042bc71db22ddd8252ca9d01e9ca454f230e2996bb04f16d72799/ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f", size = 12912365, upload-time = "2025-07-29T22:32:31.517Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9b/0b8aa09817b63e78d94b4977f18b1fcaead3165a5ee49251c5d5c245bb2d/ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69", size = 11982083, upload-time = "2025-07-29T22:32:33.881Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.32.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/96/0834f30fa08dca3738614e6a9d42752b6420ee94e58971d702118f7cfd30/virtualenv-20.32.0.tar.gz", hash = "sha256:886bf75cadfdc964674e6e33eb74d787dff31ca314ceace03ca5810620f4ecf0", size = 6076970, upload-time = "2025-07-21T04:09:50.985Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/c6/f8f28009920a736d0df434b52e9feebfb4d702ba942f15338cb4a83eafc1/virtualenv-20.32.0-py3-none-any.whl", hash = "sha256:2c310aecb62e5aa1b06103ed7c2977b81e042695de2697d01017ff0f1034af56", size = 6057761, upload-time = "2025-07-21T04:09:48.059Z" }, +]