diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dbd7fa7..3e339fb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,43 +16,29 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Cache pip packages - uses: actions/cache@v4 + - name: Install uv + uses: astral-sh/setup-uv@v5 with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt', '**/requirements-dev.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Install production dependencies - run: | - python -m pip install --upgrade pip - pip install --no-cache-dir --prefer-binary -r requirements.txt + enable-cache: true + cache-dependency-glob: "uv.lock" - - name: Install development dependencies - run: | - pip install --no-cache-dir --prefer-binary -r requirements-dev.txt + - name: Set up Python ${{ matrix.python-version }} + run: uv python install ${{ matrix.python-version }} - - name: Install project in editable mode - run: | - pip install -e . + - name: Install dependencies + run: uv sync --all-extras - name: Run pre-commit hooks run: | - pip install pre-commit - pre-commit run --all-files + uv run pre-commit install + uv run pre-commit run --all-files timeout-minutes: 10 - name: Run Pytest - run: pytest + run: uv run pytest - name: Run Ruff (linter) - run: ruff check . + run: uv run ruff check . - name: Run Mypy (type checking) - run: mypy . \ No newline at end of file + run: uv run mypy . diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..ce05d25 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,64 @@ +name: Release + +on: + push: + tags: + - "v*" + workflow_dispatch: + +jobs: + publish: + name: Build and publish to PyPI + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/academic-paperweight + permissions: + id-token: write + contents: write + + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Set up Python + run: uv python install 3.11 + + - name: Install dependencies + run: uv sync --all-extras + + - name: Validate tag matches project version + if: startsWith(github.ref, 'refs/tags/') + run: | + TAG="${GITHUB_REF_NAME}" + VERSION="$(uv run python -c "import tomllib; print(tomllib.load(open('pyproject.toml','rb'))['project']['version'])")" + if [ "v${VERSION}" != "${TAG}" ]; then + echo "Tag ${TAG} does not match project version v${VERSION}" + exit 1 + fi + + - name: Run lint + run: uv run ruff check src tests + + - name: Run tests + run: uv run pytest + + - name: Build distribution + run: uv build + + - name: Create GitHub Release + uses: softprops/action-gh-release@v2 + if: startsWith(github.ref, 'refs/tags/') + with: + files: | + dist/*.whl + dist/*.tar.gz + generate_release_notes: true + + - name: Publish to PyPI + run: uv publish diff --git a/.gitignore b/.gitignore index 5a5d605..680ca4b 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,10 @@ *.log config.yaml last_processed_date.txt +data/ +artifacts/ +.DS_Store +.coverage # Build artifacts build/ @@ -14,4 +18,4 @@ dist/ **/__pycache__ .pytest_cache .ruff_cache -.mypy_cache \ No newline at end of file +.mypy_cache diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2542c46..ba919c0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,4 +9,5 @@ repos: rev: v1.11.2 hooks: - id: mypy - additional_dependencies: [types-PyYAML, types-requests] \ No newline at end of file + additional_dependencies: [types-PyYAML, types-requests] + exclude: ^src/mocks/ diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..cdf6459 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,79 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [0.2.0] - 2026-02-14 + +### Added +- Database support with PostgreSQL integration for paper storage +- Local arXiv mirror for offline integration testing +- Comprehensive test suite with integration and unit tests +- Deterministic text digest rendering for stdout/file workflows +- Atom feed rendering for secondary delivery workflows +- AI-first title/abstract triage stage with rationale/score annotations +- Content hydration helper to fetch full text only for shortlisted papers +- Minimalist CLI subcommands: `run`, `init`, and `doctor` +- Dedicated CLI ergonomics reference (`docs/CLI.md`) +- Small CLI workflow integration tests (`run` stdout/atom + `doctor`) +- JSON delivery mode for script-friendly output +- Output capping via `--max-items` +- Strict doctor mode (`doctor --strict`) for release/CI gating +- Trusted publishing workflow for release tags (`.github/workflows/release.yml`) + +### Changed +- Migrated project to uv for dependency management +- Refactored scraper to use the official `arxiv` Python library +- Restructured test suite with separate integration and unit test directories +- Updated core logic and notifier components +- CLI delivery modes now support `stdout` (default), `atom`, and optional `email` +- Configuration validation now treats notifier/email as optional unless email delivery is used +- Roadmap and docs rewritten around a simplified v0.2 direction +- Main pipeline now runs metadata triage before expensive content extraction +- `paperweight` now defaults to `run` for backward-compatible invocation +- README/CLI/FAQ docs audited and aligned to current behavior +- Roadmap rewritten with quantifiable usefulness and release metrics + +### Fixed +- Improved error handling throughout the codebase + +## [0.1.2] - 2025-03-23 + +### Added +- GitHub Actions CI pipeline for automated testing +- Comprehensive docstrings across the codebase +- Expanded README with detailed background and architecture documentation +- Detailed roadmap documentation + +### Changed +- Improved test resilience across different environments +- Updated pre-commit hook handling + +### Fixed +- pytest configuration for proper import resolution +- Config tests to be more environment agnostic + +## [0.1.1] - 2024-09-12 + +### Changed +- Increased minimum Python version to 3.10 + +## [0.1.0] - 2024-XX-XX + +### Added +- Initial release +- arXiv paper fetching and filtering +- Keyword-based relevance scoring +- LLM-powered summarization (OpenAI and Gemini) +- Email notification system +- YAML-based configuration + +[Unreleased]: https://github.com/seanbrar/paperweight/compare/v0.2.0...HEAD +[0.2.0]: https://github.com/seanbrar/paperweight/compare/v0.1.2...v0.2.0 +[0.1.2]: https://github.com/seanbrar/paperweight/compare/v0.1.1...v0.1.2 +[0.1.1]: https://github.com/seanbrar/paperweight/compare/v0.1.0...v0.1.1 +[0.1.0]: https://github.com/seanbrar/paperweight/releases/tag/v0.1.0 diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..0463ae4 --- /dev/null +++ b/Makefile @@ -0,0 +1,38 @@ +.PHONY: install test lint typecheck format clean build all + +# Development setup +install: + uv sync --all-extras + +# Run tests +test: + uv run pytest + +# Run tests with coverage +test-cov: + uv run pytest --cov=paperweight --cov-report=term-missing + +# Lint code +lint: + uv run ruff check src tests + +# Type check +typecheck: + uv run mypy src/paperweight + +# Format code +format: + uv run ruff format src tests + uv run ruff check --fix src tests + +# Clean build artifacts +clean: + rm -rf build/ dist/ *.egg-info/ .pytest_cache/ .mypy_cache/ .ruff_cache/ + find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true + +# Build distribution +build: clean + uv build + +# Run all checks (lint, typecheck, test) +all: lint typecheck test diff --git a/README.md b/README.md index 59a8fe2..16a573f 100644 --- a/README.md +++ b/README.md @@ -1,182 +1,104 @@ # paperweight -A scalable system for retrieving, filtering, and summarizing academic papers from arXiv based on user preferences, with customizable notifications. +[![PyPI](https://img.shields.io/pypi/v/academic-paperweight)](https://pypi.org/project/academic-paperweight/) +[![GitHub License](https://img.shields.io/badge/license-MIT-green)](LICENSE) +[![Python 3.11+](https://img.shields.io/badge/python-3.11+-blue.svg)](https://www.python.org/downloads/) -## Features +paperweight is an arXiv triage CLI. +It fetches recent papers, filters for relevance, and produces a digest you can read in minutes. -- **ArXiv Integration**: Fetches recent papers from arXiv using their API, ensuring up-to-date access to the latest research. -- **Customizable Filtering**: Filters papers based on user-defined preferences, including keywords, categories, and exclusion criteria. -- **Intelligent Summarization** (BETA): Generates concise summaries or extracts abstracts, providing quick insights into paper content. -- **Flexible Notification System**: Notifies users via email, with potential for expansion to other notification methods. -- **Configurable Settings**: Allows users to fine-tune the application's behavior through a YAML configuration file. +## Why this exists -## System Architecture +Checking arXiv directly is great for discovery. paperweight is for a different job: -``` -┌───────────────┐ ┌───────────────┐ ┌───────────────┐ ┌───────────────┐ -│ SCRAPER │────▶│ PROCESSOR │────▶│ ANALYZER │────▶│ NOTIFIER │ -└───────────────┘ └───────────────┘ └───────────────┘ └───────────────┘ - │ │ │ │ - ▼ ▼ ▼ ▼ -┌───────────────┐ ┌───────────────┐ ┌───────────────┐ ┌───────────────┐ -│ arXiv API & │ │ Scoring & │ │ Abstract │ │ Email & │ -│ PDF Processing│ │ Filtering │ │ Extraction │ │ Templating │ -└───────────────┘ └───────────────┘ └───────────────┘ └───────────────┘ -``` +- keep your daily list short +- rank by your interests +- make output scriptable (`stdout`, `json`, `atom`) +- run the same way every day -## Table of Contents -- [Getting Started](#getting-started) -- [Installation](#installation) -- [Quick Start](#quick-start) -- [Usage](#usage) -- [Configuration](#configuration) -- [FAQ and Troubleshooting](#faq-and-troubleshooting) -- [Technical Details](#technical-details) -- [Roadmap](#roadmap) -- [Glossary](#glossary) -- [License](#license) -- [Contributing](#contributing) -- [Acknowledgments](#acknowledgments) - -## Getting Started - -### Prerequisites - -- Python 3.10 or higher -- Required Python packages: - - pypdf - For PDF document processing - - python-dotenv - For environment variable management - - PyYAML - For configuration parsing - - requests - For API communication - - simplerllm - For LLM integration - - tenacity - For resilient API interactions - - tiktoken - For token counting - -## Installation - -1. Clone the repository: - ``` - git clone https://github.com/seanbrar/paperweight.git - cd paperweight - ``` - -2. Install the package: - ``` - pip install . - ``` - -## Quick Start - -1. Copy `config-base.yaml` to `config.yaml` and edit it with your preferences. -2. Create a `.env` file in the project root and add your API keys (if using the summarization functionality): - ``` - OPENAI_API_KEY=your_openai_api_key_here - GEMINI_API_KEY=your_gemini_api_key_here - ``` - Note: While .env files are supported for local development, using system environment variables is recommended for enhanced security in production environments. -3. **Important:** Configure valid SMTP settings in `config.yaml` for email notifications. -4. Run the application: - ``` - paperweight - ``` - -Note: paperweight uses a `last_processed_date.txt` file to track when it last processed papers, ensuring efficient updates on subsequent runs. - -## Usage - -After installation, you can run paperweight from anywhere using: +## Install +```bash +pip install academic-paperweight ``` -paperweight -``` - -Recommended usage: Run paperweight daily for optimal paper tracking. Automatic scheduling is not currently built-in. - -Note: Runtime may vary based on the number of categories, papers, and whether summarization is enabled. Check the log file for progress updates during execution. -### Command-line Arguments +From source: -- `--force-refresh`: Forces paperweight to fetch and process papers regardless of the last processed date. - -## Configuration - -For detailed information on configuration options, please see the [configuration guide](docs/CONFIGURATION.md). +```bash +git clone https://github.com/seanbrar/paperweight.git +cd paperweight +uv sync --all-extras +source .venv/bin/activate +``` -For details on environment variables and handling sensitive information, refer to the [environment variables guide](docs/ENVIRONMENT_VARIABLES.md). +## Quick start (works without API keys) -## FAQ and Troubleshooting +```bash +paperweight init +paperweight doctor +paperweight run --force-refresh +``` -For quick solutions to common issues: +Notes: -- **Email Notifications Not Sending**: Ensure your email configuration is correct and that you've allowed less secure app access if using Gmail. -- **Paper Content Not Downloading**: Check your internet connection and verify that the arXiv API is accessible from your network. +- `init` writes `config.yaml` with safe defaults. +- default analyzer mode is `abstract` (no summarization API key required). +- triage can run with heuristic fallback if no key is present. -For a comprehensive list of frequently asked questions, including setup instructions, usage details, and troubleshooting steps, please refer to the [FAQ](docs/FAQ.md). +## CLI -If you can't find an answer to your question or solution to your problem in the FAQ, please [open an issue](https://github.com/seanbrar/paperweight/issues) on GitHub. +```bash +paperweight [run-options] +paperweight run [run-options] +paperweight init [--config PATH] [--force] +paperweight doctor [--config PATH] [--strict] +``` -## Technical Details +Examples: -### Processing Pipeline +```bash +# default plain-text digest to stdout +paperweight -paperweight processes papers through four main stages: +# JSON output for scripts +paperweight run --delivery json --output ./paperweight.json --max-items 20 -1. **Scraping** (`scraper.py`): Fetches recent papers from arXiv's API based on user-defined categories and processes the PDF/LaTeX content. +# Atom feed output +paperweight run --delivery atom --output ./paperweight.xml -2. **Processing** (`processor.py`): Calculates relevance scores based on keyword matching, with weights for title, abstract, and content matches, plus handling of exclusion keywords. +# optional email delivery (requires notifier.email config) +paperweight run --delivery email -3. **Analysis** (`analyzer.py`): Either extracts the abstract or generates a summary using an LLM (OpenAI or Gemini), with configurable options. +# strict checks for CI/release gates +paperweight doctor --strict +``` -4. **Notification** (`notifier.py`): Formats the filtered papers and sends them via email, with options for sorting by relevance, date, or title. +Detailed command behavior: `docs/CLI.md` -### Resilience Features +## Configuration -- **Retry Logic**: Uses the `tenacity` library to implement exponential backoff for API calls -- **Error Handling**: Comprehensive error catching and logging throughout the codebase -- **State Persistence**: Maintains processing state between runs using the `last_processed_date.txt` file +Core sections: -### Performance Considerations +- `arxiv`: categories and max results +- `triage`: shortlist gate (title + abstract) +- `processor`: scoring config +- `analyzer`: `abstract` or `summary` +- `logging` +- `notifier` (optional, only for email) -- **Token Counting**: Uses `tiktoken` to accurately count tokens for LLM context management -- **Configurable Limits**: Allows setting maximum papers per category to control processing time -- **Incremental Processing**: Only fetches papers published since the last run +See: `docs/CONFIGURATION.md` ## Roadmap -Key upcoming features: -- Implement machine learning-based paper recommendations -- Add support for additional academic paper sources -- Expand notification methods -- Enhance batch processing capabilities +See `docs/ROADMAP.md` for quantified release goals and forward plan. -For a full list of proposed features and planned enhancements, see the detailed [roadmap](docs/ROADMAP.md). +## Development -## Glossary - -- **arXiv**: An open-access repository of electronic preprints for scientific papers. -- **API**: Application Programming Interface; a way for different software to communicate. -- **YAML**: A human-readable data serialization format used for configuration files. -- **SMTP**: Simple Mail Transfer Protocol; used for sending emails. -- **LLM**: Large Language Model; an AI model used for text generation and analysis. -- **Embedding**: A numerical representation of text that captures semantic meaning. -- **Token**: A unit of text processed by language models, roughly corresponding to 4 characters. +```bash +make lint +make test +``` ## License -This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. - -## Contributing - -Contributions are welcome! If you're interested in contributing to paperweight, please refer to the [contributing guide](docs/CONTRIBUTING.md) for detailed information on: - -- Setting up the development environment -- Running tests -- Our coding standards -- The pull request process - -We appreciate all forms of contribution, from code to documentation to bug reports. Thank you for helping to improve paperweight! - -## Acknowledgments - -- arXiv for providing the API -- [simplerllm](https://github.com/hassancs91/SimplerLLM) for the LLM interface \ No newline at end of file +MIT. See `LICENSE`. diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 0000000..8de9866 --- /dev/null +++ b/TESTING.md @@ -0,0 +1,349 @@ +# paperweight Testing Guide + +**Based on Minimal Tests, Maximum Trust v0.1.0** + +A testing standard that prioritizes signal over coverage, architecture over assertion, and clarity over volume. + +--- + +## About This Document + +This guide adopts the Minimal Tests, Maximum Trust specification for paperweight. It is divided into two parts: + +- **Part I: Testing Philosophy** — Core principles for deciding what to test +- **Part II: Testing Structure** — Recommended organization for test files (optional) + +For extended rationale behind these principles, see the [Minimal Tests, Maximum Trust Manifesto](https://github.com/seanbrar/minimal-tests-maximum-trust/blob/main/MANIFESTO.md) (optional background reading). + +--- + +## Table of Contents + +- [About This Document](#about-this-document) +- [License](#license) +- [Part I: Testing Philosophy](#part-i-testing-philosophy) + - [Purpose](#purpose) + - [Principles](#principles) + - [What This Is Not](#what-this-is-not) + - [Test Categories](#test-categories) + - [Evaluating Existing Tests](#evaluating-existing-tests) + - [On Coverage Metrics](#on-coverage-metrics) +- [Part II: Testing Structure (Recommended)](#part-ii-testing-structure-recommended) + - [Tenets](#tenets) + - [Defining Boundaries](#defining-boundaries) + - [Test Placement](#test-placement) + - [Project Conventions (Optional)](#project-conventions-optional) + - [Known Limitations](#known-limitations) + - [Relationship Between Parts](#relationship-between-parts) +- [Adapting This Specification](#adapting-this-specification) +- [Changelog](#changelog) + +--- + +## License + +This specification is released under [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/). Copy, adapt, and redistribute without restriction. + +--- + +## Part I: Testing Philosophy + +### Purpose + +A test suite should produce meaningful information about system correctness. Tests that fail should indicate genuine problems. Tests that pass should inspire confidence. Tests that do neither are overhead. + +These principles guide decisions about what to test, what to leave untested, and how to evaluate the usefulness of existing tests. + +--- + +### Principles + +#### 1. Signal over coverage + +Coverage measures what tests *execute*, not what *requires* testing. A codebase at 95% coverage with hollow tests is in worse shape than one at 70% coverage with tests that catch real bugs. + +Prioritize tests that catch real problems over tests that merely touch code. + +#### 2. Trust the architecture + +Well-designed systems prevent invalid states through structural means: type constraints, immutable data structures, factory methods with validation, and compile-time or static-analysis checks. + +> Examples in this section use Python constructs. The underlying principles apply to any language with equivalent mechanisms—record types, sealed classes, builder patterns with validation, etc. + +When the architecture prevents a class of errors, tests verifying that prevention add maintenance burden without adding safety. Examples: + +| Architectural guarantee | Why testing is unnecessary | +|------------------------|---------------------------| +| Immutable data structures | Mutation is impossible by construction | +| Factory methods with validation | Invalid objects cannot be created | +| Type constraints (e.g., literals, enums) | Static analysis rejects invalid values | +| Post-initialization validation | Invalid inputs are rejected before objects exist | + +Tests should focus on behavior that the architecture does not already guarantee. + +#### 3. Test at boundaries + +The most valuable tests operate at system boundaries: + +- Where external input enters the system +- Where output leaves the system +- Where subsystems integrate +- Where the system interacts with external dependencies + +Interior logic—when deterministic, type-safe, and exercised through boundary tests—often does not require dedicated unit tests. Bugs in interior modules surface through boundary tests; dedicated interior tests may duplicate coverage without adding signal. + +#### 4. Regression-driven growth + +Tests written in response to actual bugs have demonstrated value. They encode specific failure modes that occurred in practice. + +New code should have minimal smoke tests verifying basic functionality. Additional targeted tests should be added when bugs are discovered. Let the suite grow from experience, not speculation. + +#### 5. Complexity indicates design feedback + +When code requires extensive testing to achieve confidence, the code itself may be too complex. A high density of tests around a module can indicate: + +- Too many code paths +- Implicit state that is difficult to reason about +- Unclear contracts between components + +Before adding tests to manage complexity, consider whether the code could be simplified. + +#### 6. Intentional gaps + +Not all code requires dedicated tests. However, untested code should be intentional, not accidental. + +**Valid reasons to omit tests:** + +- The architecture prevents the failure mode +- The code is exercised transitively through boundary tests +- The code is trivial delegation with no logic +- The code is deterministic and tested through integration + +**Invalid reasons to omit tests:** + +- The code appears simple +- The author is confident it works +- Testing will be added later + +When code is intentionally untested, the rationale should be articulable. If it cannot be explained structurally, the code should be tested. + +--- + +### What This Is Not + +This specification is not permission to: + +- **Skip tests because the code seems simple.** Simplicity is not a structural argument. +- **Ignore coverage entirely.** Coverage is a useful signal—just not the goal. +- **Delete tests without justification.** Every removal should be explainable. +- **Avoid testing new code.** New code gets smoke tests; targeted tests follow bugs. + +The goal is intentional testing, not absent testing. + +--- + +### Test Categories + +Tests serve different purposes. The following categories are ordered by typical value: + +| Category | Purpose | When to write | +|----------|---------|---------------| +| **Boundary tests** | Verify behavior at system entry and exit points | Always, for each significant boundary | +| **Regression tests** | Prevent recurrence of specific bugs | After every bug caught late or reaching production | +| **Contract tests** | Verify architectural invariants spanning modules | When invariants affect correctness and cannot be verified by inspection | +| **Characterization tests** | Capture output format when stability matters | When output is consumed externally or drift is hard to detect | +| **Unit tests** | Verify isolated complex logic | When boundary tests do not adequately cover edge cases | + +Integration and boundary tests form the backbone of a well-designed suite. Unit tests are appropriate for genuinely complex logic but should be the exception, not the rule. + +--- + +### Evaluating Existing Tests + +When reviewing a test suite, ask of each test: + +1. **Does it provide signal?** If it failed, would you investigate the code or adjust the assertion? +2. **Does it duplicate coverage?** Is the same behavior verified elsewhere? +3. **Does it verify a language guarantee?** The runtime already enforces immutability, enum semantics, and similar constraints. +4. **Does it encode implementation rather than behavior?** Would a refactor that preserves behavior break this test? + +Tests answering "no" to the first question, or "yes" to the others, are candidates for removal or consolidation. + +--- + +### On Coverage Metrics + +Coverage is a diagnostic tool, not a goal. + +When reporting or evaluating coverage: + +- Distinguish between code that *should* be tested and code that is *protected by architecture* +- Use coverage exclusions with explanations for intentional gaps +- Communicate coverage in context—explain why, not just what + +High coverage with low-signal tests provides false confidence. Moderate coverage with high-signal tests provides genuine protection. + +--- + +## Part II: Testing Structure (Recommended) + +This section describes an organizational approach called **Boundary-First Flat Structure**. It complements the testing philosophy but is not required. Projects with different organizational needs may adopt Part I without Part II. + +--- + +### Tenets + +#### 1. One file per boundary + +A boundary is where user input enters, output leaves, or subsystems integrate. Each boundary gets one test file—not one per source module, not one per test category. + +#### 2. Flat over nested + +No directory hierarchy mirroring source structure. To find tests for a module, search by boundary responsibility, not by path. + +#### 3. Markers over directories + +Test *type* (unit, integration, contract, api) is expressed via test framework markers, not directory placement. Select tests by marker regardless of which file contains them. + +#### 4. Interior modules do not need dedicated tests + +Handlers, adapters, and internal logic are tested through their boundary. A bug in an interior module surfaces in the boundary test that exercises it. + +--- + +### Defining Boundaries + +Boundaries are specific to each project. paperweight adopts the following boundaries. + + + +| Boundary | Responsibility | +|----------|----------------| +| Configuration | YAML + environment expansion, validation, API key resolution | +| CLI & Pipeline | CLI args into end-to-end orchestration and error handling | +| arXiv Ingestion | arXiv queries, content download, source/PDF extraction | +| Processing | Relevance scoring, filtering, normalization | +| Analysis | Abstract extraction or LLM summarization | +| Notifications | Email formatting and SMTP delivery | +| Persistence | Postgres run tracking, papers/scores/summaries | +| Local Mirror | Offline arXiv dataset + mock client for deterministic runs | + + + +New boundaries should be rare. If you're creating a new test file, first ask whether the test belongs to an existing boundary. + +--- + +### Test Placement + +For any new test, determine placement by boundary responsibility. + + + +``` +Is this an architectural invariant that spans modules? + → Contract tests + +Does this exercise the CLI entrypoint or full pipeline flow? + → Pipeline boundary tests + +Does this validate config loading, env expansion, or required keys? + → Configuration boundary tests + +Does this fetch arXiv metadata/content or extract text? + → arXiv ingestion boundary tests + +Does this score, filter, or normalize relevance? + → Processing boundary tests + +Does this extract abstracts or summarize via LLM providers? + → Analysis boundary tests + +Does this format or send notifications? + → Notifications boundary tests + +Does this persist or query Postgres run data? + → Persistence boundary tests + +Does this use the local mirror or mock arXiv client? + → Local mirror boundary tests + +Does this require a real external service (DB, SMTP, live arXiv)? + → Separate directory, gated by marker and environment + +None of the above? + → Probably belongs in an existing boundary file. + If genuinely new, justify the new boundary. +``` + + + +--- + +### Project Conventions (Optional) + + + +- Keep live external calls opt-in; mark them and gate with environment flags. +- Prefer the local mirror dataset for deterministic scraping behavior. +- Regression tests include a brief note on the failure mode they prevent. + + + +--- + +### Known Limitations + +**Scale**: The flat structure is designed for small-to-medium projects with well-defined boundaries. Large projects with many boundaries may find that one file per boundary becomes unwieldy. Part II is optional for this reason—such projects may adopt Part I while organizing tests differently. + +--- + +### Relationship Between Parts + +Part I (Philosophy) answers: *Should I write this test?* + +Part II (Structure) answers: *Where does it go?* + +The philosophy provides principles applicable to any project. The structure applies those principles through a specific organizational approach suited to projects with well-defined boundaries. + +--- + +## Adapting This Specification + +The Minimal Tests, Maximum Trust specification is designed to be adopted by projects and adapted to their specific boundaries. The following guidelines define MTMT compliance. + +### Compliance Requirements + +**Part I (Testing Philosophy)** — Preserve unchanged. These principles define what it means to follow this standard. The sections from "Purpose" through "On Coverage Metrics" are normative. + +**Part II (Testing Structure)** — If adopting Part II: +- **Tenets**: Preserve unchanged. These define the Boundary-First Flat Structure approach. +- **Defining Boundaries**: Replace the example table with your project's actual boundaries. +- **Test Placement**: Replace the example decision tree with one tailored to your boundaries. +- **Project Conventions**: Add your project-specific guidance here (optional). +- **Known Limitations** and **Relationship Between Parts**: Preserve unchanged. + +If Part II does not fit your project, you may omit it entirely while remaining Part I compliant. + +### Instructional Text + +Sentences guiding the adoption process (e.g., "replace the table below," "adapt to your system's boundaries") are scaffolding. Remove or adapt them in your project's version. Only the substantive content of each section must be preserved—not the instructions for how to customize it. + +### Version Reference + +Preserve the version number (`v0.1.0`) in your adopted document. This indicates which iteration of the standard your project follows. + +If you customize the document title, include the version reference in the header: + +> # [Project Name] Testing Guide +> **Based on Minimal Tests, Maximum Trust v0.1.0** + +When the specification is updated, you may choose to update your adoption or remain on the previous version. + +--- + +## Changelog + +### v0.1.0 + +- Initial specification diff --git a/config-base.yaml b/config-base.yaml index 18c684b..f8a38be 100644 --- a/config-base.yaml +++ b/config-base.yaml @@ -1,30 +1,19 @@ -# Replace the values below with your own -# Rename this file to config.yaml +# Copy this file to config.yaml and customize values. arxiv: categories: - - cs.AI # Artificial Intelligence - - cs.CL # Computation and Language - - cs.LG # Machine Learning - # - physics.comp-ph # Computational Physics - max_results: 100 # Maximum number of papers to fetch per category (0 for no limit) + - cs.AI + - cs.CL + - cs.LG + max_results: 50 processor: keywords: - - machine learning - - natural language processing - - deep learning - - neural networks - - artificial intelligence - exclusion_keywords: - - quantum - - cryptography - - game theory - - data mining - important_words: - - novel - - innovative - - state-of-the-art + - transformer + - reasoning + - language model + exclusion_keywords: [] + important_words: [] title_keyword_weight: 3 abstract_keyword_weight: 2 content_keyword_weight: 1 @@ -33,18 +22,52 @@ processor: min_score: 10 analyzer: - type: abstract # abstract | summary - llm_provider: openai # gemini | openai - -notifier: - email: - to: "your_email@example.com" - from: "sender_email@example.com" - password: "YOUR_PASSWORD_HERE" - smtp_server: "smtp.example.com" - smtp_port: 587 # 465 | 587 - sort_order: alphabetical # alphabetical | publication_time | relevance + type: abstract # summary | abstract + llm_provider: openai # openai | gemini + # api_key: ${OPENAI_API_KEY} + max_input_tokens: 7000 + max_input_chars: 20000 + +# AI-first shortlist gate (title + abstract) +triage: + enabled: true + llm_provider: openai # openai | gemini + # api_key: ${OPENAI_API_KEY} + min_score: 60 + max_selected: 25 logging: level: INFO # DEBUG | INFO | WARNING | ERROR - file: paperweight.log # paperweight.log | /path/to/logfile.log \ No newline at end of file + file: paperweight.log + +# Optional metadata for --delivery atom +feed: + title: paperweight + id: https://github.com/seanbrar/paperweight + link: https://github.com/seanbrar/paperweight + +# Optional section for --delivery email +# notifier: +# email: +# to: "you@example.com" +# from: "you@example.com" +# password: "${EMAIL_PASSWORD}" +# smtp_server: "smtp.example.com" +# smtp_port: 587 +# use_tls: true +# use_auth: true +# sort_order: relevance + +# Optional Postgres persistence +db: + enabled: false + host: localhost + port: 5432 + database: paperweight + user: paperweight + password: "${DB_PASSWORD}" + sslmode: prefer + +# Optional artifact storage path +storage: + base_dir: data/artifacts diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml new file mode 100644 index 0000000..73d14ee --- /dev/null +++ b/docker-compose.dev.yml @@ -0,0 +1,17 @@ +# Development-only services for local testing. +# DO NOT use these settings in production. +version: '3.8' + +services: + mailpit: + image: axllent/mailpit + container_name: mailpit + restart: unless-stopped + ports: + - "8025:8025" # Web UI + - "1025:1025" # SMTP Port + environment: + MP_MAX_MESSAGES: 5000 + # Insecure auth settings for local testing only + MP_SMTP_AUTH_ACCEPT_ANY: 1 + MP_SMTP_AUTH_ALLOW_INSECURE: 1 diff --git a/docs/CLI.md b/docs/CLI.md new file mode 100644 index 0000000..0723fd8 --- /dev/null +++ b/docs/CLI.md @@ -0,0 +1,73 @@ +# CLI reference + +paperweight has three commands: + +1. `run`: execute the pipeline and deliver output +2. `init`: create a minimal config +3. `doctor`: validate local setup + +`paperweight` is shorthand for `paperweight run`. + +## run + +```bash +paperweight run \ + [--config PATH] \ + [--force-refresh] \ + [--delivery stdout|json|atom|email] \ + [--output PATH] \ + [--sort-order relevance|alphabetical|publication_time] \ + [--max-items N] +``` + +Behavior: + +- fetches recent arXiv papers +- runs triage on title + abstract +- hydrates full text only for shortlisted papers +- scores/summarizes and delivers digest + +Delivery modes: + +- `stdout`: plain text digest (default) +- `json`: script-friendly array of objects +- `atom`: Atom feed XML +- `email`: SMTP send via `notifier.email` config + +`json` fields: + +- `title` +- `date` +- `score` +- `why` +- `link` +- `summary` + +## init + +```bash +paperweight init [--config PATH] [--force] +``` + +Behavior: + +- writes a minimal `config.yaml` template +- refuses to overwrite unless `--force` is passed + +## doctor + +```bash +paperweight doctor [--config PATH] [--strict] +``` + +Checks: + +- config file exists +- config parses and validates +- triage provider key availability +- enabled delivery adapters + +Exit codes: + +- `0`: healthy (or warnings present without `--strict`) +- `1`: hard failure, or warning in strict mode diff --git a/docs/CONFIGURATION.md b/docs/CONFIGURATION.md index 633d83d..81a62b2 100644 --- a/docs/CONFIGURATION.md +++ b/docs/CONFIGURATION.md @@ -1,206 +1,130 @@ -# paperweight configuration guide +# paperweight configuration -## Introduction +paperweight keeps config intentionally simple. -This document explains how to configure paperweight. The system uses a YAML configuration file to customize its behavior, allowing you to tailor the paper selection, processing, analysis, and notification to your specific needs. - -## Table of Contents -- [paperweight Configuration Guide](#paperweight-configuration-guide) - - [Introduction](#introduction) - - [Configuration Setup](#configuration-setup) - - [Security Note](#security-note) - - [Configuration Options](#configuration-options) - - [ArXiv Settings](#arxiv-settings) - - [Processor Settings](#processor-settings) - - [Analyzer Settings](#analyzer-settings) - - [Notifier Settings](#notifier-settings) - - [Logging Settings](#logging-settings) - - [Additional Notes](#additional-notes) - - [Troubleshooting](#troubleshooting) - -## Configuration Setup - -1. Locate the `config-base.yaml` file in the project directory. -2. Make a copy of this file and rename it to `config.yaml`. -3. Open `config.yaml` in a text editor and modify the values according to your preferences. - -## Security Note - -> ⚠️ **Important:** Keep your `config.yaml` file secure, especially if it contains sensitive information like email passwords or API keys. We strongly recommend using environment variables for sensitive data instead of storing them directly in the configuration file. - -For more information on securely managing passwords, API keys, and other sensitive information, please refer to the [environment variables guide](ENVIRONMENT_VARIABLES.md). - -## Configuration Options - -### ArXiv Settings +## Minimal config ```yaml arxiv: - categories: - - cs.AI - - cs.CL - - cs.LG - max_results: 100 -``` - -- `categories`: List the arXiv categories you're interested in. Replace with your desired categories. Examples: - - `cs.AI` (Artificial Intelligence) - - `cs.CL` (Computation and Language) - - `cs.LG` (Machine Learning) -- Find a full list of arXiv categories [here](https://arxiv.org/category_taxonomy). -- `max_results`: Specifies an additional limit on the number of papers to fetch per category. - - If set to a positive number (e.g., 100), it limits the papers fetched per category to that number. - - If set to 0, no additional limit is applied beyond the daily published papers. - - Example: If there are 15 papers in category A, 10 in B, and 5 in C, with `max_results: 10`: - - Category A: 10 papers fetched - - Category B: 10 papers fetched - - Category C: 5 papers fetched - -**Note**: Setting a lower `max_results` value can help reduce processing time, especially for popular categories with many daily submissions. + categories: ["cs.AI", "cs.CL"] + max_results: 50 -### Processor Settings - -The processor settings control how papers are evaluated and scored. These settings allow you to customize the system to focus on topics that are most relevant to your interests. - -```yaml processor: - keywords: - - machine learning - - natural language processing - - deep learning - exclusion_keywords: - - quantum computing - - blockchain - important_words: - - transformer - - attention mechanism + keywords: ["transformer", "reasoning", "agents"] + exclusion_keywords: [] + important_words: [] title_keyword_weight: 3 abstract_keyword_weight: 2 content_keyword_weight: 1 exclusion_keyword_penalty: 5 important_words_weight: 0.5 min_score: 10 -``` - -#### Understanding the Settings -1. `keywords`: - - List of terms you're interested in. - - The system searches for these words in the title, abstract, and main content of each paper. - - Papers containing these words receive higher scores. +analyzer: + type: abstract # abstract | summary + llm_provider: openai # openai | gemini -2. `exclusion_keywords`: - - Terms you want to avoid. - - If a paper contains these words, its score is reduced. +triage: + enabled: true + llm_provider: openai # openai | gemini + min_score: 60 + max_selected: 25 -3. `important_words`: - - Special terms that are particularly significant in your field of interest. - - Finding these words in a paper gives it an extra score boost. +logging: + level: INFO + file: paperweight.log +``` -4. `title_keyword_weight`: 3 - - Determines how much a keyword in the title contributes to the overall score. +With this config, `paperweight` outputs a digest to `stdout`. -5. `abstract_keyword_weight`: 2 - - Sets the importance of keywords found in the paper's abstract. +## How triage works -6. `content_keyword_weight`: 1 - - Defines how much keywords in the main content contribute to the score. +1. Fetch metadata from arXiv. +2. Run AI triage on title + abstract (`triage` section). +3. Fetch full content only for shortlisted papers. +4. Run processor/analyzer on that smaller set. -7. `exclusion_keyword_penalty`: 5 - - Determines how much the score is reduced when an exclusion keyword is found. +This keeps runtime lower than downloading full text for every candidate. -8. `important_words_weight`: 0.5 - - Sets how much the important words contribute to the overall score. +## Optional sections -9. `min_score`: 10 - - The minimum score a paper must achieve to be included in your notifications. +### `notifier` (only for `--delivery email`) -#### How It Works +```yaml +notifier: + email: + to: "you@example.com" + from: "you@example.com" + password: "${EMAIL_PASSWORD}" + smtp_server: "smtp.example.com" + smtp_port: 587 + use_tls: true + use_auth: true + sort_order: relevance +``` -1. For each paper, the system searches for your keywords in the title, abstract, and content. -2. It calculates a score based on how many keywords are found and where they appear, using the respective weights. -3. If exclusion keywords are found, it reduces the score. -4. It then looks for important words and adds a small boost to the score if they're present. -5. If the final score is at least equal to the `min_score`, the paper is included in your notifications. +If you do not use `--delivery email`, this section is optional. -By adjusting these settings, you can fine-tune the system to better match your specific interests and filter out less relevant papers. +### `feed` (metadata for `--delivery atom`) -**Note**: The system requires at least one item in each category (keywords, exclusion_keywords, important_words) to function properly. +```yaml +feed: + title: "paperweight" + id: "https://github.com/seanbrar/paperweight" + link: "https://github.com/seanbrar/paperweight" +``` -### Analyzer Settings (BETA) +### `db` (optional Postgres persistence) ```yaml -analyzer: - type: abstract # abstract | summary - llm_provider: openai # gemini | openai +db: + enabled: false + host: localhost + port: 5432 + database: paperweight + user: paperweight + password: "${DB_PASSWORD}" + sslmode: prefer ``` -- `type`: Choose between: - - `abstract`: Uses the paper's original abstract in the final email. - - `summary` (BETA): Generates a summary using the specified LLM provider. Note: This feature is currently in beta and may have limitations. -- `llm_provider`: Select the LLM provider for summarization (only applicable when `type` is set to `summary`): - - `openai`: Uses OpenAI's API for summarization. - - `gemini`: Uses Google's Gemini API for summarization. +### `storage` (artifact storage, used with DB workflows) + +```yaml +storage: + base_dir: data/artifacts +``` -**Note**: Using the `summary` option requires an API key for the chosen provider. Set this as an environment variable for security. +## Environment overrides -> **Note**: The `summary` option is currently in BETA. If you experience any issues, please revert to the `abstract` type and report the problem on our GitHub issues page. +`PAPERWEIGHT_` env vars override config values. -### Notifier Settings +Preferred nested form: -```yaml -notifier: - email: - to: "your_email@example.com" - from: "sender_email@example.com" - password: "YOUR_PASSWORD_HERE" - smtp_server: "smtp.example.com" - smtp_port: 587 # 465 | 587 - sort_order: alphabetical # alphabetical | publication_time | relevance +```bash +export PAPERWEIGHT_ARXIV_MAX_RESULTS=100 ``` -Replace these values with your email settings: -- `to`: Your email address where you want to receive notifications. -- `from`: The email address sending the notifications (often the same as `to`). -- `password`: The password for the sender email account. -- `smtp_server` and `smtp_port`: These depend on your email provider. Common examples: - - Gmail: `smtp.gmail.com`, port 587 - - Yahoo: `smtp.mail.yahoo.com`, port 587 - - Outlook: `smtp-mail.outlook.com`, port 587 -- `sort_order`: Determines how papers are sorted in the notification email. - -### Logging Settings +Legacy leaf form is still supported: -```yaml -logging: - level: INFO # DEBUG | INFO | WARNING | ERROR - file: paperweight.log +```bash +export PAPERWEIGHT_MAX_RESULTS=100 ``` -- `level`: Set the logging level: - - `DEBUG`: Detailed information, typically useful for debugging. - - `INFO`: General information about program execution. - - `WARNING`: Indicates potential issues that don't prevent the program from working. - - `ERROR`: Serious issues that cause the program to fail in performing some functions. -- `file`: Specifies the log file name. This uses a relative path from the project's root directory. +## Analyzer keys -For detailed debugging, set the level to DEBUG. For normal operation, INFO is recommended. +When `analyzer.type: summary`, API key is required. -## Additional Notes +When `triage.enabled: true`, an API key is strongly recommended. Without one, +paperweight falls back to a lightweight keyword/abstract heuristic. -- The system processes multiple arXiv categories sequentially. -- Paper ranking in the final output is based on the specified `sort_order`: - - `alphabetical`: Papers are sorted alphabetically by title. - - `publication_time`: Papers are sorted by their publication date, most recent first. - - `relevance`: Papers are sorted by their relevance scores, highest first (default if not specified). -- When using LLM providers for summarization, ensure you have the necessary API keys set up as environment variables. +Provider keys: -## Troubleshooting +- `OPENAI_API_KEY` for OpenAI +- `GEMINI_API_KEY` for Gemini -If you encounter issues with your configuration: -1. Double-check all required fields are filled out correctly. -2. Ensure your email settings are correct, especially if using 2-factor authentication. -3. Verify your API keys are correctly set as environment variables when using LLM summarization. -4. Check the log file for any error messages. +## CLI + config interaction -For more general troubleshooting and frequently asked questions, please refer to the [FAQ](FAQ.md). \ No newline at end of file +- `--delivery stdout` ignores `notifier`. +- `--delivery json` ignores `notifier`. +- `--delivery atom` uses optional `feed` metadata. +- `--delivery email` requires valid `notifier.email` settings. diff --git a/docs/FAQ.md b/docs/FAQ.md index 4e84b63..d2a8144 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -1,195 +1,50 @@ -# paperweight FAQ +# FAQ -## Table of Contents +## Is paperweight still useful if I can just check arXiv directly? -1. [General Questions](#general-questions) -2. [Configuration and Setup](#configuration-and-setup) -3. [Usage and Features](#usage-and-features) -4. [Troubleshooting](#troubleshooting) -5. [Maintenance and Updates](#maintenance-and-updates) +Yes, if your main pain is triage time. paperweight is best when you want: -## General Questions +- a short, ranked list instead of scanning every new submission +- consistent daily output with the same filters +- scriptable output (`json`/`atom`) for your own workflows -### What is paperweight? +If you enjoy broad discovery and manual browsing, arXiv directly may be better. -paperweight is a personal project that automatically retrieves, filters, and summarizes recent academic papers from arXiv based on user-specified categories and preferences. It then sends notifications to the user via email. +## Do I need API keys? -### How often does paperweight check for new papers? +No for basic usage. -The program checks for new papers every time it is run. It compares the current date against the date stored in `last_processed_date.txt` in the root directory. If this file doesn't exist, it assumes it's the first run and pulls papers from the last seven days. +- `init`, `doctor`, and `run` in `analyzer.type: abstract` mode work without keys. +- if triage key is missing, triage falls back to heuristic mode. +- summaries in `analyzer.type: summary` require a provider key. -### What is the `last_processed_date.txt` file? +## What should I use as a first-run setup? -The `last_processed_date.txt` file is automatically created and updated by paperweight to keep track of when it last successfully processed papers. This file: - -- Is created in the root directory of the project after the first successful run. -- Contains a single date in the format YYYY-MM-DD. -- Is used to determine which papers to fetch on subsequent runs, avoiding duplicate processing. -- Can be safely deleted if you want to reset the last processed date (paperweight will then fetch papers from the last 7 days on the next run). - -### How does paperweight determine which papers to fetch? - -paperweight uses the following logic to determine which papers to fetch: - -1. If it's the first run (no `last_processed_date.txt` file exists), it fetches papers from the last 7 days. -2. On subsequent runs, it fetches papers published since the date in `last_processed_date.txt`. -3. The number of papers fetched per category is limited by the `max_results` setting in your configuration. - -### Can I use paperweight with sources other than arXiv? - -Currently, paperweight only supports arXiv as a source for academic papers. Support for additional sources may be added in future updates. - -## Configuration and Setup - -### How do I set up paperweight? - -To set up paperweight: - -1. Clone the repository and navigate to the project directory. -2. Copy `config-base.yaml` to `config.yaml` and edit it with your preferences. -3. Create a `.env` file in the project root and add your API keys if using summarization functionality. -4. Install the package using `pip install .` -5. Run the application using the `paperweight` command. - -For more detailed instructions, please refer to the [README.md](../README.md) file. -For detailed configuration instructions, please see the [configuration guide](CONFIGURATION.md). - -### How can I use a different LLM provider for summarization? - -Currently, paperweight supports two LLM providers for summarization: OpenAI's GPT and Google's Gemini. This feature is currently in BETA and may have some limitations. You can specify the provider in the `config.yaml` file under the `analyzer` section: - -```yaml -analyzer: - type: summary - llm_provider: openai # or gemini +```bash +paperweight init +paperweight doctor +paperweight run --force-refresh ``` -Make sure you have the appropriate API key set. You can set this in your `config.yaml`, as an environment variable, or in your `.env` file. For more details on securely managing API keys, please refer to the [environment variables guide](ENVIRONMENT_VARIABLES.md). - -If you experience any issues with the summarization feature, you can switch to the `abstract` type in your configuration. We encourage users to report any problems or suggestions related to the BETA features by opening an issue on our GitHub repository. - -## Usage and Features +## Which output mode should I choose? -### How can I customize which papers are retrieved and processed? +- `stdout`: best for direct terminal use +- `json`: best for scripts/automation +- `atom`: best for feed readers +- `email`: best for push delivery when SMTP is configured -You can customize paper retrieval and processing by editing the `config.yaml` file. Key settings include: +## How do I keep runs fast? -1. arXiv categories -2. Keywords and exclusion keywords -3. Scoring weights -4. Minimum score threshold +- keep `arxiv.max_results` moderate +- use triage to shortlist aggressively (`triage.min_score`, `triage.max_selected`) +- stay on `analyzer.type: abstract` unless summaries are needed -For a detailed explanation of all configuration options, please see the [configuration guide](CONFIGURATION.md). +## How do I validate setup in CI before release? -### How do I interpret the relevance scores and rankings? - -Relevance scores are calculated based on the presence of keywords, important words, and exclusion keywords in the paper's title, abstract, and content. Papers are then ranked based on these scores. A higher score indicates that the paper is more likely to be relevant to your interests as defined in the configuration. - -### Is it possible to exclude certain topics or keywords from the results? - -Yes, you can use exclusion keywords to make certain papers less likely to be recommended. In your `config.yaml`, add exclusion keywords under the `processor` section: - -```yaml -processor: - exclusion_keywords: - - keyword1 - - keyword2 -``` - -Note that this doesn't completely exclude papers with these keywords, but significantly reduces their relevance score. The effectiveness of exclusion keywords depends on their weight relative to other scoring factors. - -### How can I use the `--force-refresh` argument? - -The `--force-refresh` argument allows you to ignore the `last_processed_date.txt` file and fetch papers from the last 7 days. This can be useful if you want to reprocess recent papers or if you've made significant changes to your configuration. Use it like this: +Use: +```bash +paperweight doctor --strict ``` -paperweight --force-refresh -``` - -### Can I customize the email format or content? - -Currently, the email format and content are not customizable. This feature may be added in future updates. - -## Troubleshooting - -### How can I troubleshoot issues with paper downloads or processing? - -To troubleshoot paper download or processing issues: - -1. Set the logging level to DEBUG in your `config.yaml`: - -```yaml -logging: - level: DEBUG - file: paperweight.log -``` - -2. Run paperweight again and check the log file for detailed information about each step of the process. -3. Look for any error messages or warnings in the log that might indicate the source of the problem. - -### What should I do if I encounter Python dependency issues? - -If you encounter Python dependency issues: - -1. Ensure you're using Python 3.10 or higher. -2. Try creating a new virtual environment and installing paperweight fresh. -3. Update your pip and setuptools: `pip install --upgrade pip setuptools` -4. If you're still having issues, check the project's `setup.py` file for the list of required packages and versions, and try installing them manually. - -### What should I do if I'm not receiving email notifications? - -If you're not receiving email notifications: - -1. Check your spam folder and verify your email configuration in `config.yaml`. -2. Ensure your SMTP settings are correct, especially if using Gmail or other providers with specific security requirements. -3. Check the log file (default: `paperweight.log`) for any error messages. - -If you continue to have problems, please open an issue on the project's GitHub page. - -### What do I do if I encounter API rate limits or errors? - -If you encounter API rate limits: - -1. Try reducing the `max_results` value in your config file. -2. Run paperweight less frequently. -3. Check your API usage on the provider's website. -4. Consider using the 'abstract' analyzer type instead of 'summary' to limit external API use. - -### What should I do if the program seems to hang? - -Check the log file to ensure it's still processing. Large paper sets or enabled summarization can increase runtime. The program will update the log file as it progresses through different stages of paper retrieval and processing. - -### Why am I getting unexpected paper selections? - -Review your keyword and scoring settings in the configuration file. The relevance of papers is determined by these settings. Adjust keywords, exclusion keywords, and scoring weights as needed to refine results. You may need to experiment with different configurations to achieve the desired paper selection. - -## Maintenance and Updates - -### How do I update paperweight to the latest version? - -As paperweight doesn't have an established distribution pipeline yet, to update: - -1. Pull the most recent version from the GitHub repository. -2. Reinstall the package using `pip install .` in the project directory. - -### How can I contribute to the paperweight project? - -Contributions to paperweight are welcome! You can contribute by: - -1. Submitting issues for bugs or feature requests on the GitHub repository. -2. Creating pull requests with bug fixes or new features. -3. Improving documentation or writing tests. - -Please refer to the project's GitHub page for more information on contributing. - -### Is there a way to export or save the processed paper data? - -Currently, paperweight does not have a built-in feature to export or save processed paper data. This could be a valuable feature to add in the future. - -### How does paperweight handle papers in languages other than English? - -While paperweight should theoretically work with papers in languages other than English, this functionality has not been extensively tested. The effectiveness may vary depending on the language and the LLM provider used for summarization. - -### Can I use paperweight in an offline environment? -No, paperweight requires an internet connection to fetch papers from arXiv and to use the LLM APIs for summarization (if enabled). It cannot be used in a fully offline environment. \ No newline at end of file +This returns non-zero if warnings are present. diff --git a/docs/ROADMAP.md b/docs/ROADMAP.md index 8fc9760..a829a4a 100644 --- a/docs/ROADMAP.md +++ b/docs/ROADMAP.md @@ -1,87 +1,79 @@ # paperweight roadmap -This document outlines planned features and improvements for the paperweight project. The roadmap is organized into focused development areas to create a scalable, efficient academic paper processing system. - -## Core System Enhancements - -### Performance & Efficiency -- [ ] Implement asynchronous processing for paper fetching and analysis -- [ ] Add configurable batch processing with adjustable batch sizes -- [ ] Create memory usage tracking and optimization for large document sets -- [ ] Implement benchmarking tools to measure and optimize performance - -### Context Management -- [ ] Develop intelligent document chunking for papers exceeding token limits -- [ ] Implement hierarchical summarization for extremely long papers -- [ ] Create a context window awareness system that optimizes token usage -- [ ] Add semantic sectioning to prioritize important paper components - -### Caching Infrastructure -- [ ] Implement persistent caching for paper embeddings and metadata -- [ ] Create smart cache invalidation strategies based on paper updates -- [ ] Develop a disk-based storage system for embeddings to reduce API costs -- [ ] Add cache statistics reporting for optimization insights - -## Module-Specific Improvements - -### Scraper Module -- [ ] Enhance PDF extraction precision with specialized academic paper handling -- [ ] Add support for extracting and processing figures and tables -- [ ] Expand retry logic in API interactions using advanced backoff strategies -- [ ] Improve date-based paper filtering with precise version tracking - -### Processor Module -- [ ] Develop enhanced scoring algorithms for more accurate paper relevance -- [ ] Implement sliding window analysis for sequential context processing -- [ ] Create adaptive keyword weighting based on document section importance -- [ ] Add citation network analysis for evaluating paper significance - -### Analyzer Module -- [ ] Expand LLM provider support with a unified interface -- [ ] Implement streaming responses for long paper summarization -- [ ] Create domain-specific summarization templates for different fields -- [ ] Add comparative analysis between related papers - -### Notifier Module -- [ ] Develop a modular notification system supporting multiple channels -- [ ] Create customizable templates for notification formatting -- [ ] Implement digest mode for batched notifications -- [ ] Add interactive elements to notifications for user feedback - -## Strategic Directions - -### Machine Learning Integration -- [ ] Replace keyword-based filtering with embedding similarity scoring -- [ ] Implement personalized paper recommendations based on user interests -- [ ] Develop citation impact prediction for emerging papers -- [ ] Create a feedback loop to improve future recommendations - -### Expanded Data Sources -- [ ] Add support for multiple academic repositories (PubMed, IEEE, etc.) -- [ ] Implement unified metadata schema across different sources -- [ ] Create source-specific optimizations for each repository -- [ ] Develop cross-repository deduplication - -### User Experience -- [ ] Create a simple web interface for configuration and monitoring -- [ ] Develop a local dashboard for visualizing paper recommendations -- [ ] Add personalized preference learning from user interactions -- [ ] Implement saved searches and automated monitoring - -## Development Infrastructure - -### Testing & Quality -- [ ] Expand test coverage with more integration tests -- [ ] Develop performance regression testing -- [ ] Create automated benchmark suites for optimization -- [ ] Implement continuous profiling for memory and CPU usage - -### Documentation -- [ ] Expand API documentation for extensibility -- [ ] Create visual architecture diagrams -- [ ] Develop advanced configuration guides for specific use cases -- [ ] Add code examples for common extension patterns - -We welcome contributions and suggestions from the community. If you have ideas for features or improvements, please open an issue on the [GitHub repository](https://github.com/seanbrar/paperweight/issues). - -For information on how to contribute to paperweight, please see the [contributing guide](docs/CONTRIBUTING.md). \ No newline at end of file +This roadmap is metric-driven. Feature work is only accepted if it improves usefulness: +time saved, setup simplicity, and digest quality. + +## Product definition + +paperweight should be better than "just checking arXiv" when the user wants: + +- a smaller daily reading queue +- deterministic output that can be automated +- relevance filtering that improves over time + +## Core success metrics + +These metrics guide all releases: + +1. **Time to first useful run** + - target: <= 5 minutes from install to first digest +2. **Daily digest size** + - target: median 5-20 items after user tuning +3. **Runtime** + - target: <= 120 seconds for `3 categories x max_results=50` on default non-summary mode +4. **CLI reliability** + - target: >= 99% successful runs in local smoke workflows +5. **Signal quality (human-evaluated)** + - target: >= 7/10 items marked "worth reading" in pilot usage + +## v0.2 release gates (must pass) + +1. CLI contract stable: + - `run`, `init`, `doctor` + - `run` delivery: `stdout`, `json`, `atom`, optional `email` +2. Zero-key baseline works: + - `init` defaults to `analyzer.type: abstract` + - `run` works without LLM keys via triage fallback +3. Setup validation: + - `doctor --strict` returns non-zero on warnings/failures +4. Output ergonomics: + - deterministic text digest + - scriptable JSON + - Atom feed export +5. Quality checks: + - lint clean + - tests green (including small CLI integration suite) +6. Packaging: + - release workflow present and tag-driven + +## v0.3 focus (quality lift, not surface-area lift) + +1. **Speed** + - add metadata cache + - target: >= 40% runtime reduction on repeated daily runs +2. **Digest quality** + - improve triage rationale quality and compactness + - target: rationale present on >= 95% of shortlisted items +3. **Workflow fit** + - add saved presets/profile switching + - target: switch profile in one command, no config edits + +## v0.4 focus (feedback loop) + +1. add local feedback capture (`relevant` / `irrelevant`) +2. incorporate feedback into ranking +3. target: +20% improvement in user-rated relevance from v0.2 baseline + +## v1.0 criteria + +1. stable CLI and config semantics +2. upgrade path documented for all `v0.x` users +3. reproducible, deterministic outputs for identical inputs/config +4. reliability and quality metrics sustained for two consecutive minor releases + +## Non-goals (until metrics justify) + +- web dashboard +- many new paper sources +- broad plugin systems +- complex recommendation models without feedback data diff --git a/docs/db_schema.sql b/docs/db_schema.sql new file mode 100644 index 0000000..00d3030 --- /dev/null +++ b/docs/db_schema.sql @@ -0,0 +1,80 @@ +-- paperweight database schema (Postgres) +-- Run this on a fresh database. + +CREATE EXTENSION IF NOT EXISTS pgcrypto; + +CREATE TABLE runs ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + started_at timestamptz NOT NULL DEFAULT now(), + completed_at timestamptz, + status text NOT NULL, + config_hash text NOT NULL, + pipeline_version text NOT NULL, + notes text +); + +CREATE TABLE papers ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + arxiv_id text NOT NULL, + arxiv_version text NOT NULL, + title text NOT NULL, + abstract text, + published_at date, + updated_at timestamptz, + primary_category text, + categories text[], + link text, + doi text, + authors text[], + created_at timestamptz NOT NULL DEFAULT now(), + UNIQUE (arxiv_id, arxiv_version) +); + +CREATE TABLE paper_artifacts ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + paper_id uuid NOT NULL REFERENCES papers(id) ON DELETE CASCADE, + artifact_type text NOT NULL, + storage_uri text NOT NULL, + checksum text, + byte_size bigint, + created_at timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE scores ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + run_id uuid NOT NULL REFERENCES runs(id) ON DELETE CASCADE, + paper_id uuid NOT NULL REFERENCES papers(id) ON DELETE CASCADE, + score_type text NOT NULL, + score double precision NOT NULL, + details_json jsonb, + created_at timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE summaries ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + run_id uuid NOT NULL REFERENCES runs(id) ON DELETE CASCADE, + paper_id uuid NOT NULL REFERENCES papers(id) ON DELETE CASCADE, + summary_text text NOT NULL, + model text, + prompt_hash text, + token_usage_json jsonb, + created_at timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE paper_labels ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + paper_id uuid NOT NULL REFERENCES papers(id) ON DELETE CASCADE, + label_source text NOT NULL, + label_value boolean NOT NULL, + notes text, + created_at timestamptz NOT NULL DEFAULT now(), + UNIQUE (paper_id, label_source) +); + +CREATE INDEX papers_arxiv_id_idx ON papers (arxiv_id); +CREATE INDEX papers_published_at_idx ON papers (published_at); +CREATE INDEX scores_run_id_idx ON scores (run_id); +CREATE INDEX scores_paper_id_idx ON scores (paper_id); +CREATE INDEX paper_artifacts_paper_id_idx ON paper_artifacts (paper_id); +CREATE INDEX summaries_run_id_idx ON summaries (run_id); +CREATE INDEX summaries_paper_id_idx ON summaries (paper_id); diff --git a/docs/ingestion_plan.md b/docs/ingestion_plan.md new file mode 100644 index 0000000..a315343 --- /dev/null +++ b/docs/ingestion_plan.md @@ -0,0 +1,43 @@ +# Ingestion Plan (paperweight) + +## Goals +- Persist raw arXiv metadata, extracted content, and outputs with run lineage. +- Support reproducible runs and future evaluation without relying on local files. +- Keep ingestion idempotent using arXiv ID + version. + +## Step-by-step flow + +1) Initialize run +- Create a `runs` record with `config_hash`, `pipeline_version`, and status `running`. +- Use the most recent successful run as the watermark for incremental fetch. + +2) Fetch metadata from arXiv +- Query categories with `start` + `max_results` paging. +- Stop when `published_at` < watermark date. +- Upsert into `papers` on `(arxiv_id, arxiv_version)`. + +3) Fetch and store content +- For new paper versions, download source/PDF. +- Write raw files to object storage or local `data/` and save `paper_artifacts`. +- Extract text once and store as `paper_artifacts` type `text`. +- Optionally chunk and store in `paper_text_chunks`. + +4) Scoring +- Run baseline keyword scoring. +- Insert `scores` rows with a structured `details_json` breakdown. + +5) Summarization +- Generate summaries with prompt + model metadata. +- Cache by `prompt_hash` to avoid re-summarization. +- Insert `summaries` rows with token usage metadata. + +6) Notification +- Build notification payload from `scores` + `summaries`. +- Insert a `notifications` row and mark as `sent` when delivered. + +7) Finalize run +- Update `runs` status to `success` or `failed`, set `completed_at`. + +## Evaluation-ready data +- Use `paper_labels` to store human relevance labels. +- Metrics can be computed per `run_id` and compared across runs. diff --git a/pyproject.toml b/pyproject.toml index 6dfbd23..54de9db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,14 +1,93 @@ +[project] +name = "academic-paperweight" +version = "0.2.0" +description = "Automated retrieval, filtering, and LLM-powered summarization of arXiv papers based on your research interests." +readme = "README.md" +requires-python = ">=3.11, <3.14" +license = { text = "MIT" } +authors = [{ name = "Sean Brar", email = "hello@seanbrar.com" }] +keywords = [ + "arxiv", + "research", + "papers", + "academic", + "llm", + "summarization", + "machine-learning", + "ai", + "notifications", +] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Text Processing :: Filters", + "Typing :: Typed", +] +dependencies = [ + "arxiv>=2.1.0", + "pypdf>=4.3.1", + "psycopg[binary]>=3.2.1", + "python-dotenv>=1.0.1", + "PyYAML>=6.0.2", + "requests>=2.31.0", + "tenacity>=9.0.0", + "tiktoken>=0.9.0", + "pollux-ai>=1.0.0a0", +] + +[project.scripts] +paperweight = "paperweight.main:main" + +[project.optional-dependencies] +dev = [ + "html2text>=2024.2.26", + "mypy>=1.11.2", + "pre-commit>=3.8.0", + "pytest>=9.0.2", + "pytest-cov>=5.0.0", + "pytest-mock>=3.10.0", + "ruff>=0.6.4", + "types-PyYAML>=6.0.12.20240808", + "types-requests>=2.32.0.20240907", +] + +[project.urls] +Homepage = "https://github.com/seanbrar/paperweight" +Repository = "https://github.com/seanbrar/paperweight" +Changelog = "https://github.com/seanbrar/paperweight/blob/main/CHANGELOG.md" +Issues = "https://github.com/seanbrar/paperweight/issues" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/paperweight"] + +[tool.hatch.build.targets.sdist] +exclude = [ + "src/mocks/**", +] + [tool.ruff] line-length = 88 -lint.select = ["F", "E", "W", "C90", "I001", "F401"] -lint.ignore = ["E501"] +target-version = "py311" -# Enable automatic fixing of lint issues -lint.fixable = ["ALL"] -lint.unfixable = ["F401"] +[tool.ruff.lint] +select = ["F", "E", "W", "C90", "I001", "F401"] +ignore = ["E501"] +fixable = ["ALL"] +unfixable = ["F401"] [tool.ruff.format] -# Formatter settings quote-style = "double" indent-style = "space" skip-magic-trailing-comma = false @@ -16,11 +95,17 @@ line-ending = "auto" [tool.mypy] mypy_path = "src" +exclude = ['^src/mocks/'] + +[[tool.mypy.overrides]] +module = "arxiv.*" +ignore_missing_imports = true [tool.pytest.ini_options] -addopts = "--strict-markers" +addopts = "--strict-markers --cov=paperweight --cov-report=term-missing" markers = [ - "integration: marks tests as integration tests", + "integration: marks tests as integration tests (may require external services)", + "api: marks tests requiring real external APIs (database, etc.)", ] testpaths = ["tests"] -pythonpath = ["src"] \ No newline at end of file +pythonpath = ["src"] diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 0112b19..0000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,9 +0,0 @@ --r requirements.txt -mypy==1.11.2 -pre-commit==3.8.0 -pytest==7.3.1 -pytest-mock==3.10.0 -ruff==0.6.4 -setuptools==74.1.2 -types-PyYAML==6.0.12.20240808 -types-requests==2.32.0.20240907 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 8476b66..0000000 --- a/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -pypdf==4.3.1 -python-dotenv==1.0.1 -PyYAML==6.0.2 -requests==2.31.0 -simplerllm==0.3.1 -tenacity==9.0.0 -tiktoken==0.7.0 \ No newline at end of file diff --git a/scripts/debug_arxiv.py b/scripts/debug_arxiv.py new file mode 100644 index 0000000..42a4236 --- /dev/null +++ b/scripts/debug_arxiv.py @@ -0,0 +1,53 @@ +"""Debug script for inspecting arxiv library and local mirror database.""" + +import inspect +import sqlite3 +from pathlib import Path + +import arxiv + +# Use same path as populate_mirror.py +DB_PATH = Path("data/local_mirror/index.sqlite3") + + +def debug_arxiv(): + """Inspect the arxiv.Result class signature.""" + print("--- arxiv.Result Inspection ---") + print(inspect.signature(arxiv.Result.__init__)) + + # Check what fields are actually in the object + try: + r = arxiv.Result(entry_id="test", title="test") + print("Created minimal Result:", r) + except Exception as e: + print("Creation failed:", e) + + +def debug_db(): + """Inspect the local mirror database contents.""" + print("\n--- DB Inspection ---") + + if not DB_PATH.exists(): + print(f"Database not found at {DB_PATH}") + print("Run scripts/populate_mirror.py first.") + return + + conn = sqlite3.connect(DB_PATH) + try: + cursor = conn.cursor() + cursor.execute("SELECT id, title FROM papers LIMIT 5") + rows = cursor.fetchall() + print("First 5 papers in DB:") + for r in rows: + print(r) + + # Check specific ID + cursor.execute("SELECT * FROM papers WHERE id='1706.03762'") + print("Check 1706.03762:", cursor.fetchone()) + finally: + conn.close() + + +if __name__ == "__main__": + debug_arxiv() + debug_db() diff --git a/scripts/dev_integration_test.py b/scripts/dev_integration_test.py new file mode 100644 index 0000000..61197c4 --- /dev/null +++ b/scripts/dev_integration_test.py @@ -0,0 +1,170 @@ +# ruff: noqa: E402 +import logging +import sys +import time +from pathlib import Path + +import requests +import yaml + +# Add src and scripts to sys.path +ROOT = Path(__file__).parent.parent +sys.path.append(str(ROOT / "src")) +sys.path.append(str(ROOT / "scripts")) + +import export_email + +from paperweight.analyzer import get_abstracts +from paperweight.db import connect_db, is_db_enabled +from paperweight.logging_config import setup_logging +from paperweight.notifier import compile_and_send_notifications +from paperweight.processor import process_papers +from paperweight.scraper import get_recent_papers +from paperweight.storage import ( + create_run, + finish_run, + insert_artifacts, + insert_scores, + insert_summaries, + upsert_papers, +) +from paperweight.utils import get_package_version, hash_config + +MAILPIT_API_URL = "http://localhost:8025/api/v1/messages" + +logging.basicConfig(level=logging.INFO, format="%(message)s") +logger = logging.getLogger("dev_integration_test") + + +def load_config(): + config_path = Path("config.yaml") + if not config_path.exists(): + logger.error("❌ Config file not found: %s", config_path) + sys.exit(1) + with config_path.open("r") as handle: + return yaml.safe_load(handle) + + +def mailpit_message_count(): + response = requests.get(MAILPIT_API_URL, timeout=5) + response.raise_for_status() + data = response.json() + return len(data.get("messages", [])) + + +def wait_for_mailpit_message(previous_count, timeout=10): + start = time.time() + while time.time() - start < timeout: + try: + current = mailpit_message_count() + except Exception: + time.sleep(1) + continue + if current > previous_count: + return True + time.sleep(1) + return False + + +def export_latest_email(): + try: + latest_summary = export_email.get_latest_message() + except SystemExit: + return None + + if not latest_summary: + return None + + message_id = latest_summary["ID"] + message = export_email.get_message_content(message_id) + markdown = export_email.format_as_markdown(message) + return export_email.save_email(markdown, message_id) + + +def main(): # noqa: C901 + logger.info("🚀 Starting Paperweight Dev Integration Test...") + config = load_config() + setup_logging(config["logging"]) + + # Fast, predictable test settings + config["arxiv"]["max_results"] = 2 + config["arxiv"]["categories"] = ["cs.AI"] + config["processor"]["min_score"] = 0 + + db_enabled = is_db_enabled(config) + run_id = None + paper_id_map = {} + run_status = "failed" + run_notes = None + + try: + if db_enabled: + config_hash = hash_config(config) + pipeline_version = get_package_version() + with connect_db(config["db"]) as conn: + run_id = create_run(conn, config_hash, pipeline_version, "dev_test") + + mailpit_start = mailpit_message_count() + logger.info("📨 Mailpit messages before run: %s", mailpit_start) + + logger.info("🔍 Fetching papers (force refresh)...") + papers = get_recent_papers(config, force_refresh=True) + if not papers: + logger.error("❌ No papers fetched.") + return 1 + + if db_enabled: + with connect_db(config["db"]) as conn: + paper_id_map = upsert_papers(conn, papers) + insert_artifacts(conn, papers, paper_id_map) + + logger.info("🧮 Scoring papers...") + processed = process_papers(papers, config["processor"]) + if not processed: + logger.error("❌ Processor filtered all papers.") + return 1 + + logger.info("🧠 Summarizing papers...") + summaries = get_abstracts(processed, config["analyzer"]) + for paper, summary in zip(processed, summaries): + paper["summary"] = summary or paper.get("abstract", "") + + if db_enabled: + with connect_db(config["db"]) as conn: + insert_scores(conn, run_id, processed, paper_id_map) + insert_summaries(conn, run_id, processed, paper_id_map) + + logger.info("📤 Sending notification email...") + notification_sent = compile_and_send_notifications(processed, config["notifier"]) + if not notification_sent: + logger.error("❌ Notification send failed.") + return 1 + + if not wait_for_mailpit_message(mailpit_start, timeout=12): + logger.error("❌ Mailpit did not receive a new message in time.") + return 1 + + exported_path = export_latest_email() + if not exported_path: + logger.error("❌ Failed to export latest email from Mailpit.") + return 1 + + run_status = "success" + logger.info("✅ Exported email markdown: %s", exported_path) + logger.info("🎉 SUCCESS: End-to-end pipeline verified.") + return 0 + except Exception as e: + run_notes = str(e) + logger.error("❌ Integration test failed: %s", e) + return 1 + finally: + if db_enabled and run_id: + try: + with connect_db(config["db"]) as conn: + finish_run(conn, run_id, run_status, run_notes) + except Exception as e: + logger.error("❌ Failed to finalize run status: %s", e) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/export_email.py b/scripts/export_email.py new file mode 100644 index 0000000..b27ca0b --- /dev/null +++ b/scripts/export_email.py @@ -0,0 +1,114 @@ +import sys +from datetime import datetime +from pathlib import Path + +import html2text +import requests + +# Configuration +MAILPIT_API_URL = "http://localhost:8025/api/v1" +EXPORT_DIR = Path("artifacts/emails") + +def ensure_export_dir(): + if not EXPORT_DIR.exists(): + EXPORT_DIR.mkdir(parents=True, exist_ok=True) + +def get_latest_message(): + try: + # Get list of messages (default limit is 50, which is fine) + response = requests.get(f"{MAILPIT_API_URL}/messages") + response.raise_for_status() + data = response.json() + + messages = data.get("messages", []) + if not messages: + return None + + return messages[0] # First one is the latest + except requests.exceptions.ConnectionError: + print("❌ Could not connect to Mailpit. Is it running?") + print(" Run: docker compose up -d") + sys.exit(1) + except Exception as e: + print(f"❌ Error fetching messages: {e}") + sys.exit(1) + +def get_message_content(message_id): + try: + response = requests.get(f"{MAILPIT_API_URL}/message/{message_id}") + response.raise_for_status() + return response.json() + except Exception as e: + print(f"❌ Error fetching message content: {e}") + sys.exit(1) + +def format_as_markdown(message): + h = html2text.HTML2Text() + h.ignore_links = False + h.ignore_images = False + h.body_width = 0 # No wrapping + + headers = message.get("Headers", {}) + subject = headers.get("Subject", ["(No Subject)"])[0] + from_addr = headers.get("From", ["(Unknown)"])[0] + to_addr = headers.get("To", ["(Unknown)"])[0] + date_str = headers.get("Date", [datetime.now().isoformat()])[0] + + # Prefer HTML, fallback to Text + html_body = message.get("HTML") + text_body = message.get("Text") + + if html_body: + body_content = h.handle(html_body) + elif text_body: + body_content = f"```text\n{text_body}\n```" + else: + body_content = "(No content)" + + markdown_output = f"""--- +Subject: {subject} +From: {from_addr} +To: {to_addr} +Date: {date_str} +ID: {message['ID']} +--- + +# {subject} + +{body_content} +""" + return markdown_output + +def save_email(markdown_content, message_id): + ensure_export_dir() + + # Create a nice filename + # e.g., email_20230101_120000_abc123.md + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + filename = f"email_{timestamp}_{message_id}.md" + filepath = EXPORT_DIR / filename + + with open(filepath, "w") as f: + f.write(markdown_content) + + return filepath + +if __name__ == "__main__": + print("🔍 Fetching latest email from Mailpit...") + + latest_msg_summary = get_latest_message() + + if not latest_msg_summary: + print("📭 Mailpit is empty! No emails to export.") + print(" Try running: python scripts/verify_email_setup.py") + sys.exit(0) + + msg_id = latest_msg_summary["ID"] + print(f"📥 Found message: {latest_msg_summary.get('Subject')} (ID: {msg_id})") + + full_message = get_message_content(msg_id) + markdown_content = format_as_markdown(full_message) + saved_path = save_email(markdown_content, msg_id) + + print(f"✅ Saved to: {saved_path}") + print(f"\nPro tip: View it with 'code {saved_path}'") diff --git a/scripts/init_db.sh b/scripts/init_db.sh new file mode 100755 index 0000000..c6897be --- /dev/null +++ b/scripts/init_db.sh @@ -0,0 +1,100 @@ +#!/usr/bin/env bash +set -euo pipefail + +# --- 1. Find psql --- +PSQL="psql" +if ! command -v psql &> /dev/null; then + # Check common Postgres.app paths + if [ -f "/Applications/Postgres.app/Contents/Versions/latest/bin/psql" ]; then + PSQL="/Applications/Postgres.app/Contents/Versions/latest/bin/psql" + elif [ -d "/Applications/Postgres.app/Contents/Versions" ]; then + # Try to find the latest version directory + LATEST_VER=$(ls -1 /Applications/Postgres.app/Contents/Versions | sort -n | tail -1) + if [ -n "$LATEST_VER" ] && [ -f "/Applications/Postgres.app/Contents/Versions/$LATEST_VER/bin/psql" ]; then + PSQL="/Applications/Postgres.app/Contents/Versions/$LATEST_VER/bin/psql" + fi + fi +fi + +if ! command -v "$PSQL" &> /dev/null && [ ! -x "$PSQL" ]; then + echo "Error: psql not found. Please ensure Postgres.app is installed and running." + exit 1 +fi + +echo "Using psql: $PSQL" + +# --- 2. Parse config.yaml --- +CONFIG_FILE="config.yaml" +if [ ! -f "$CONFIG_FILE" ]; then + echo "Error: $CONFIG_FILE not found." + exit 1 +fi + +# Simple grep/awk parsing (assumes simple structure as seen in file) +get_config() { + key=$1 + grep -A 10 "^db:" "$CONFIG_FILE" | grep "^ $key:" | awk -F': ' '{print $2}' | tr -d '"' | sed 's/ *#.*//' +} + +DB_HOST=$(get_config "host") +DB_PORT=$(get_config "port") +DB_NAME=$(get_config "database") +DB_USER=$(get_config "user") +DB_PASS=$(get_config "password") + +: "${DB_HOST:=localhost}" +: "${DB_PORT:=5432}" +: "${DB_NAME:=paperweight}" +: "${DB_USER:=paperweight}" + +# Validate DB_USER and DB_NAME to prevent SQL injection (alphanumeric and underscores only) +if ! [[ "$DB_USER" =~ ^[a-zA-Z_][a-zA-Z0-9_]*$ ]]; then + echo "Error: DB_USER must be alphanumeric (with underscores). Got: '$DB_USER'" + exit 1 +fi +if ! [[ "$DB_NAME" =~ ^[a-zA-Z_][a-zA-Z0-9_]*$ ]]; then + echo "Error: DB_NAME must be alphanumeric (with underscores). Got: '$DB_NAME'" + exit 1 +fi + +echo "Config: Host=$DB_HOST Port=$DB_PORT DB=$DB_NAME User=$DB_USER" + +# --- 3. Create Role/DB if missing --- +# We try to connect to 'postgres' database with current system user to do admin tasks +# This assumes the current system user has superuser access (default in Postgres.app) + +echo "Checking if role '$DB_USER' exists..." +if ! "$PSQL" -h "$DB_HOST" -d postgres -tAc "SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'" | grep -q 1; then + echo "Creating role '$DB_USER'..." + "$PSQL" -h "$DB_HOST" -d postgres -v pw="$DB_PASS" -c "CREATE USER $DB_USER WITH PASSWORD :'pw';" +else + echo "Role '$DB_USER' already exists." +fi + +echo "Checking if database '$DB_NAME' exists..." +if ! "$PSQL" -h "$DB_HOST" -d postgres -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" | grep -q 1; then + echo "Creating database '$DB_NAME'..." + "$PSQL" -h "$DB_HOST" -d postgres -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;" +else + echo "Database '$DB_NAME' already exists." +fi + +# --- 4. Install Extensions (as superuser) --- +echo "Installing extensions..." +"$PSQL" -h "$DB_HOST" -d "$DB_NAME" -c "CREATE EXTENSION IF NOT EXISTS pgcrypto;" + +# --- 5. Run Schema --- +echo "Applying schema..." + +# Use a temporary .pgpass file instead of PGPASSWORD environment variable. +# This avoids exposing the password via /proc or process listings. +PGPASS_TMP=$(mktemp) +chmod 600 "$PGPASS_TMP" +trap 'rm -f "$PGPASS_TMP"' EXIT + +# .pgpass format: hostname:port:database:username:password +echo "$DB_HOST:$DB_PORT:$DB_NAME:$DB_USER:$DB_PASS" > "$PGPASS_TMP" + +PGPASSFILE="$PGPASS_TMP" "$PSQL" -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -v ON_ERROR_STOP=1 -f "docs/db_schema.sql" + +echo "Database initialization complete!" diff --git a/scripts/populate_mirror.py b/scripts/populate_mirror.py new file mode 100644 index 0000000..88561e4 --- /dev/null +++ b/scripts/populate_mirror.py @@ -0,0 +1,436 @@ +#!/usr/bin/env python3 +""" +Populate Local arXiv Mirror +--------------------------- +Downloads a representative set of papers (Metadata + PDF/Source) to `data/local_mirror`. +Features: +- "Golden Set" of manually selected papers. +- Bulk fill from target categories. +- Polite rate limiting (default: 3 seconds/request). +- Resumable (skips existing files). +- SQLite Metadata Index. +- Efficient skipping: file-first checks, batch DB lookups, reuses API results. + +Usage: + python scripts/populate_mirror.py --count 100 --dry-run + python scripts/populate_mirror.py --categories cs.AI cs.CL --count 50 + python scripts/populate_mirror.py --max-size 50 # Skip files > 50MB +""" + +import argparse +import logging +import sqlite3 +import time +from pathlib import Path +from typing import Dict, List, Optional, Tuple + +import arxiv # type: ignore + +# Configuration +DATA_DIR = Path("data/local_mirror") +FILES_DIR = DATA_DIR / "files" +DB_PATH = DATA_DIR / "index.sqlite3" +RATE_LIMIT_DELAY = 3.0 # Seconds +DEFAULT_MAX_SIZE_MB = 100 # Default max file size in MB + +# "Golden Set" - Specific papers to stress test specific config rules +GOLDEN_SET_IDS = [ + "1706.03762", # Attention Is All You Need (High Relevance: "machine learning", "neural networks") + "1810.04805", # BERT (High Relevance: "NLP") + "2303.08774", # GPT-4 Technical Report (High Relevance: "LLM", "Artificial Intelligence") + "1904.09456", # Quantum ML (Target for Exclusion: "Quantum") + "2401.00001", # Recent random (Test Date parsing) +] + +# Categories to sample from (Config + Extras for noise) +DEFAULT_CATEGORIES = [ + "cs.AI", + "cs.CL", + "cs.LG", + "physics.comp-ph", # Config commented out, good for testing exclusion + "quant-ph", # Quantum Physics (Test exclusion) + "math.ST", # Statistics (Noise) +] + +logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") +logger = logging.getLogger(__name__) + + +def parse_args(): + """Parse command-line arguments.""" + parser = argparse.ArgumentParser( + description="Populate local arXiv mirror with papers for testing.", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + %(prog)s --count 100 # Download ~100 papers across categories + %(prog)s --dry-run # Show what would be downloaded + %(prog)s --categories cs.AI cs.CL # Only these categories + %(prog)s --max-size 50 # Skip sources > 50MB + %(prog)s --count 200 --categories cs.LG # 200 papers from cs.LG only + """ + ) + parser.add_argument( + "--count", "-n", + type=int, + default=50, + help="Total number of papers to download across all categories (default: 50)" + ) + parser.add_argument( + "--categories", "-c", + nargs="+", + default=None, + help=f"Categories to sample from (default: {', '.join(DEFAULT_CATEGORIES)})" + ) + parser.add_argument( + "--dry-run", "-d", + action="store_true", + help="Show what would be downloaded without actually downloading" + ) + parser.add_argument( + "--max-size", "-m", + type=float, + default=DEFAULT_MAX_SIZE_MB, + help=f"Maximum file size in MB to download (default: {DEFAULT_MAX_SIZE_MB}MB)" + ) + parser.add_argument( + "--skip-golden", + action="store_true", + help="Skip the golden set of manually selected papers" + ) + return parser.parse_args() + + +def init_db(): + """Initialize SQLite database for metadata.""" + conn = sqlite3.connect(DB_PATH) + cursor = conn.cursor() + cursor.execute(""" + CREATE TABLE IF NOT EXISTS papers ( + id TEXT PRIMARY KEY, + title TEXT, + abstract TEXT, + authors TEXT, + categories TEXT, + published DATE, + updated DATE, + pdf_url TEXT, + doi TEXT, + local_file_path TEXT, + local_source_path TEXT + ) + """) + # Migration: Add local_source_path if it doesn't exist + try: + cursor.execute("ALTER TABLE papers ADD COLUMN local_source_path TEXT") + except sqlite3.OperationalError: + pass # Column likely already exists + + conn.commit() + return conn + + +def get_file_paths(paper_id: str) -> Tuple[Path, Path]: + """Get the expected file paths for a paper's PDF and source.""" + pdf_path = FILES_DIR / f"{paper_id}.pdf" + src_path = FILES_DIR / f"{paper_id}.tar.gz" + return pdf_path, src_path + + +def check_files_complete(paper_id: str) -> Tuple[bool, bool, bool]: + """ + Quick file-system check for paper completeness. + Returns: (pdf_exists, src_exists, both_complete) + """ + pdf_path, src_path = get_file_paths(paper_id) + pdf_exists = pdf_path.exists() + src_exists = src_path.exists() + return pdf_exists, src_exists, (pdf_exists and src_exists) + + +def check_file_size(path: Path, max_size_mb: float) -> bool: + """Check if a file exceeds the maximum size limit.""" + if not path.exists(): + return True # Non-existent files are "ok" (will be downloaded) + size_mb = path.stat().st_size / (1024 * 1024) + return size_mb <= max_size_mb + + +def batch_check_db_status(cursor: sqlite3.Cursor, paper_ids: List[str]) -> Dict[str, Tuple[Optional[str], Optional[str]]]: + """ + Batch check database status for multiple papers. + Returns dict: paper_id -> (local_file_path, local_source_path) or None if not in DB. + """ + if not paper_ids: + return {} + + placeholders = ",".join("?" * len(paper_ids)) + cursor.execute( + f"SELECT id, local_file_path, local_source_path FROM papers WHERE id IN ({placeholders})", + paper_ids + ) + results = {row[0]: (row[1], row[2]) for row in cursor.fetchall()} + return results + + +def should_skip_paper(paper_id: str, db_status: Optional[Tuple[Optional[str], Optional[str]]]) -> bool: + """ + Determine if a paper can be completely skipped. + Optimization: Check files FIRST (no DB/API needed if files exist). + """ + pdf_exists, src_exists, both_complete = check_files_complete(paper_id) + + # Fast path: Both files exist on disk + if both_complete: + # Also verify DB has the paths recorded + if db_status is not None: + db_pdf, db_src = db_status + if db_pdf and db_src: + return True + + return False + + +def save_paper_metadata(conn: sqlite3.Connection, paper, paper_id: str, pdf_path: Optional[Path], src_path: Optional[Path]): + """Save or update paper metadata in the database.""" + cursor = conn.cursor() + + final_pdf_path = str(pdf_path.absolute()) if pdf_path and pdf_path.exists() else None + final_src_path = str(src_path.absolute()) if src_path and src_path.exists() else None + + logger.info(f"Indexing {paper_id}: {paper.title[:50]}...") + authors = ", ".join([a.name for a in paper.authors]) + categories = ", ".join(paper.categories) + + cursor.execute(""" + INSERT OR REPLACE INTO papers + (id, title, abstract, authors, categories, published, updated, pdf_url, doi, local_file_path, local_source_path) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, ( + paper_id, + paper.title, + paper.summary, + authors, + categories, + paper.published.isoformat(), + paper.updated.isoformat(), + paper.pdf_url, + paper.doi, + final_pdf_path, + final_src_path + )) + conn.commit() + + +def rate_limit_sleep(context: str = ""): + """Sleep for rate limiting with optional context logging.""" + if context: + logger.debug(f"Rate limiting ({RATE_LIMIT_DELAY}s) after {context}") + time.sleep(RATE_LIMIT_DELAY) + + +def download_paper_files(paper, paper_id: str, dry_run: bool = False, max_size_mb: float = DEFAULT_MAX_SIZE_MB) -> Tuple[Path, Path]: + """ + Download PDF and source files for a paper if missing. + Returns the paths (may or may not exist after download attempt). + """ + pdf_path, src_path = get_file_paths(paper_id) + pdf_exists, src_exists, _ = check_files_complete(paper_id) + + + # Download PDF (if missing) + if not pdf_exists: + if dry_run: + logger.info(f"[DRY-RUN] Would download PDF for {paper_id}") + else: + logger.info(f"Downloading PDF for {paper_id}...") + try: + paper.download_pdf(dirpath=FILES_DIR, filename=f"{paper_id}.pdf") + rate_limit_sleep(f"PDF download for {paper_id}") + # Check size after download + if not check_file_size(pdf_path, max_size_mb): + size_mb = pdf_path.stat().st_size / (1024 * 1024) + logger.warning(f"PDF {paper_id}.pdf exceeds max size ({size_mb:.1f}MB > {max_size_mb}MB), removing") + pdf_path.unlink() + except Exception as e: + logger.error(f"Failed to download PDF for {paper_id}: {e}") + else: + logger.info(f"PDF {paper_id}.pdf exists.") + + # Download Source (if missing) + if not src_exists: + if dry_run: + logger.info(f"[DRY-RUN] Would download Source for {paper_id}") + else: + logger.info(f"Downloading Source for {paper_id}...") + try: + paper.download_source(dirpath=FILES_DIR, filename=f"{paper_id}.tar.gz") + rate_limit_sleep(f"Source download for {paper_id}") + # Check size after download + if not check_file_size(src_path, max_size_mb): + size_mb = src_path.stat().st_size / (1024 * 1024) + logger.warning(f"Source {paper_id}.tar.gz exceeds max size ({size_mb:.1f}MB > {max_size_mb}MB), removing") + src_path.unlink() + except Exception as e: + logger.error(f"Failed to download Source for {paper_id}: {e}") + else: + logger.info(f"Source {paper_id}.tar.gz exists.") + + return pdf_path, src_path + + +def download_paper_by_id(client: arxiv.Client, paper_id: str, conn: sqlite3.Connection, + db_status: Optional[Tuple[Optional[str], Optional[str]]] = None, + dry_run: bool = False, max_size_mb: float = DEFAULT_MAX_SIZE_MB): + """ + Download metadata, PDF, and source for a paper by ID. + Used for Golden Set where we only have IDs. + """ + # Check if we can skip entirely + if should_skip_paper(paper_id, db_status): + logger.info(f"Skipping {paper_id} (All files present)") + return + + if dry_run: + logger.info(f"[DRY-RUN] Would fetch metadata for {paper_id}") + pdf_path, src_path = get_file_paths(paper_id) + download_paper_files(None, paper_id, dry_run=True, max_size_mb=max_size_mb) + return + + # Must fetch metadata from API + try: + search = arxiv.Search(id_list=[paper_id]) + paper = next(client.results(search)) + except (StopIteration, Exception) as e: + logger.error(f"Failed to fetch metadata for {paper_id}: {e}") + return + + # Download files and save metadata + pdf_path, src_path = download_paper_files(paper, paper_id, dry_run=dry_run, max_size_mb=max_size_mb) + save_paper_metadata(conn, paper, paper_id, pdf_path, src_path) + + +def process_paper_with_metadata(paper, conn: sqlite3.Connection, + db_status: Optional[Tuple[Optional[str], Optional[str]]] = None, + dry_run: bool = False, max_size_mb: float = DEFAULT_MAX_SIZE_MB): + """ + Process a paper when we already have the metadata (from bulk search). + This avoids redundant API calls. + """ + paper_id = paper.entry_id.split('/')[-1] + + # Check if we can skip entirely + if should_skip_paper(paper_id, db_status): + logger.info(f"Skipping {paper_id} (All files present)") + return + + # Download files and save metadata (no API call needed - we have the paper object) + pdf_path, src_path = download_paper_files(paper, paper_id, dry_run=dry_run, max_size_mb=max_size_mb) + if not dry_run: + save_paper_metadata(conn, paper, paper_id, pdf_path, src_path) + else: + logger.info(f"[DRY-RUN] Would index {paper_id}: {paper.title[:50]}...") + + +def process_golden_set(client: arxiv.Client, conn: sqlite3.Connection, + dry_run: bool = False, max_size_mb: float = DEFAULT_MAX_SIZE_MB): + """Process the golden set with batch DB lookup optimization.""" + logger.info("--- Processing Golden Set ---") + + cursor = conn.cursor() + + # Batch check DB status for all golden set papers + db_statuses = batch_check_db_status(cursor, GOLDEN_SET_IDS) + + # Quick pre-filter: count how many can be skipped without any API calls + skippable = sum(1 for pid in GOLDEN_SET_IDS if should_skip_paper(pid, db_statuses.get(pid))) + logger.info(f"Golden Set: {skippable}/{len(GOLDEN_SET_IDS)} papers can be skipped (files complete)") + + for pid in GOLDEN_SET_IDS: + download_paper_by_id(client, pid, conn, db_statuses.get(pid), dry_run=dry_run, max_size_mb=max_size_mb) + + +def bulk_fill( + client: arxiv.Client, + conn: sqlite3.Connection, + count: int = 20, + categories: Optional[List[str]] = None, + dry_run: bool = False, + max_size_mb: float = DEFAULT_MAX_SIZE_MB, +): + """ + Download random papers from target categories. + Optimized: Reuses paper metadata from search results, batch DB checks. + """ + if categories is None: + categories = DEFAULT_CATEGORIES + + logger.info(f"--- Processing Bulk Fill ({count} papers across {len(categories)} categories) ---") + if dry_run: + logger.info("[DRY-RUN MODE] No files will be downloaded") + + cursor = conn.cursor() + papers_per_category = count // len(categories) + 5 # Slight buffer + + for category in categories: + query = f"cat:{category}" + logger.info(f"Searching category: {category}") + + search = arxiv.Search( + query=query, + max_results=papers_per_category, + sort_by=arxiv.SortCriterion.SubmittedDate + ) + + # Collect papers first for batch DB lookup + papers_list = list(client.results(search)) + paper_ids = [p.entry_id.split('/')[-1] for p in papers_list] + + # Batch check DB status + db_statuses = batch_check_db_status(cursor, paper_ids) + + # Quick pre-filter stats + skippable = sum(1 for pid in paper_ids if should_skip_paper(pid, db_statuses.get(pid))) + logger.info(f"Category {category}: {skippable}/{len(paper_ids)} papers can be skipped") + + # Process each paper (reusing metadata from search results) + for paper in papers_list: + paper_id = paper.entry_id.split('/')[-1] + process_paper_with_metadata(paper, conn, db_statuses.get(paper_id), + dry_run=dry_run, max_size_mb=max_size_mb) + + +def main(): + args = parse_args() + + # Display configuration + categories = args.categories if args.categories else DEFAULT_CATEGORIES + logger.info("=" * 60) + logger.info("arXiv Mirror Population Script") + logger.info("=" * 60) + logger.info(f" Count: {args.count} papers") + logger.info(f" Categories: {', '.join(categories)}") + logger.info(f" Max Size: {args.max_size} MB") + logger.info(f" Dry Run: {args.dry_run}") + logger.info(f" Skip Golden: {args.skip_golden}") + logger.info("=" * 60) + + if not DATA_DIR.exists(): + DATA_DIR.mkdir(parents=True) + if not FILES_DIR.exists(): + FILES_DIR.mkdir(parents=True) + + conn = init_db() + client = arxiv.Client(page_size=100, delay_seconds=RATE_LIMIT_DELAY, num_retries=3) + + if not args.skip_golden: + process_golden_set(client, conn, dry_run=args.dry_run, max_size_mb=args.max_size) + + bulk_fill(client, conn, count=args.count, categories=categories, + dry_run=args.dry_run, max_size_mb=args.max_size) + + conn.close() + logger.info("Done.") + +if __name__ == "__main__": + main() diff --git a/scripts/test_db_conn.py b/scripts/test_db_conn.py new file mode 100644 index 0000000..1b37f16 --- /dev/null +++ b/scripts/test_db_conn.py @@ -0,0 +1,30 @@ +"""Simple script to test database connectivity.""" + +from pathlib import Path + +import psycopg +import yaml + + +def test_db_connection(): + config_path = Path("config.yaml") + with config_path.open("r") as handle: + config = yaml.safe_load(handle) + db_config = config["db"] + + try: + conn_str = f"host={db_config['host']} port={db_config['port']} dbname={db_config['database']} user={db_config['user']} password={db_config['password']} sslmode={db_config['sslmode']}" + with psycopg.connect(conn_str) as conn: + print("Successfully connected to the database!") + with conn.cursor() as cur: + cur.execute("SELECT version();") + version = cur.fetchone() + print(f"PostgreSQL version: {version[0]}") + except Exception as e: + print(f"Failed to connect to the database: {e}") + return False + return True + + +if __name__ == "__main__": + test_db_connection() diff --git a/scripts/verify_db_setup.py b/scripts/verify_db_setup.py new file mode 100644 index 0000000..fc7999d --- /dev/null +++ b/scripts/verify_db_setup.py @@ -0,0 +1,66 @@ +import logging +import sys +from pathlib import Path + +import yaml + +# Add src to sys.path to import paperweight modules +sys.path.append(str(Path(__file__).parent.parent / "src")) + +from paperweight.db import connect_db, is_db_enabled + +# Configure logging +logging.basicConfig(level=logging.INFO, format="%(message)s") +logger = logging.getLogger("verify_db") + +def load_config(): + config_path = Path("config.yaml") + if not config_path.exists(): + logger.error(f"❌ Config file not found: {config_path}") + sys.exit(1) + + with open(config_path, "r") as f: + return yaml.safe_load(f) + +def verify_db_connection(config): + if not is_db_enabled(config): + logger.info("ℹ️ Database is NOT enabled in config.yaml") + return True + + logger.info("🔍 Connecting to database...") + db_config = config.get("db", {}) + + try: + with connect_db(db_config) as conn: + with conn.cursor() as cur: + cur.execute("SELECT 1") + result = cur.fetchone() + if result and result[0] == 1: + logger.info(f"✅ Successfully connected to database '{db_config.get('database')}' at '{db_config.get('host')}:{db_config.get('port')}'") + + # Optional: Check if tables exist + cur.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'") + tables = cur.fetchall() + table_names = [t[0] for t in tables] + if table_names: + logger.info(f" Found tables: {', '.join(table_names)}") + else: + logger.warning(" ⚠️ Connected, but no tables found in 'public' schema.") + + return True + else: + logger.error("❌ Connected, but SELECT 1 failed.") + return False + except Exception as e: + logger.error(f"❌ Failed to connect to database: {e}") + return False + +if __name__ == "__main__": + print("🚀 Starting Paperweight Database Verification...") + config = load_config() + if verify_db_connection(config): + print("\n🎉 SUCCESS: Database connection verified.") + sys.exit(0) + else: + print("\n❌ FAILURE: Database verification failed.") + sys.exit(1) diff --git a/scripts/verify_email_setup.py b/scripts/verify_email_setup.py new file mode 100644 index 0000000..cf47064 --- /dev/null +++ b/scripts/verify_email_setup.py @@ -0,0 +1,105 @@ +import smtplib +import sys +import time +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from pathlib import Path + +import requests +import yaml + +# Configuration +CONFIG_PATH = Path("config.yaml") +MAILPIT_API_URL = "http://localhost:8025/api/v1/messages" +MAILPIT_WEB_URL = "http://localhost:8025" + +def load_config(): + if not CONFIG_PATH.exists(): + print(f"❌ Config file not found: {CONFIG_PATH}") + sys.exit(1) + + with open(CONFIG_PATH, "r") as f: + return yaml.safe_load(f) + +def send_test_email(config): + smtp_config = config.get("notifier", {}).get("email", {}) + + # Override for testing if not set to localhost + smtp_server = smtp_config.get("smtp_server") + smtp_port = smtp_config.get("smtp_port") + + if smtp_server != "localhost" or smtp_port != 1025: + print(f"⚠️ Config is configured for {smtp_server}:{smtp_port}") + print("ℹ️ For this test, we will force connection to localhost:1025 (Mailpit)") + smtp_server = "localhost" + smtp_port = 1025 + + sender = smtp_config.get("from", "test@example.com") + recipient = smtp_config.get("to", "user@example.com") + + msg = MIMEMultipart() + msg["From"] = sender + msg["To"] = recipient + msg["Subject"] = "Paperweight Validation Test" + + body = """ +

Paperweight Email Setup Verification

+

If you are reading this, the email delivery pipeline is working correctly!

+ + """.format(time.strftime("%Y-%m-%d %H:%M:%S")) + + msg.attach(MIMEText(body, "html")) + + try: + with smtplib.SMTP(smtp_server, smtp_port) as server: + server.sendmail(sender, recipient, msg.as_string()) + print(f"✅ Email sent to {recipient} via {smtp_server}:{smtp_port}") + return True + except Exception as e: + print(f"❌ Failed to send email: {e}") + return False + +def verify_receipt(): + print("🔍 Checking Mailpit for the message...") + # Give Mailpit a moment to index + time.sleep(1) + + try: + response = requests.get(MAILPIT_API_URL) + response.raise_for_status() + data = response.json() + + messages = data.get("messages", []) + if not messages: + print("❌ No messages found in Mailpit.") + return False + + # Look for our most recent message + latest = messages[0] + if latest.get("Subject") == "Paperweight Validation Test": + print("✅ Email verified in Mailpit!") + print(f"🔗 View it here: {MAILPIT_WEB_URL}/view/{latest['ID']}") + return True + else: + print(f"⚠️ Found a message, but subject matches '{latest.get('Subject')}'") + return False + + except Exception as e: + print(f"❌ Failed to verify with Mailpit API: {e}") + return False + +if __name__ == "__main__": + print("🚀 Starting Paperweight Email Validation...") + + config = load_config() + + if send_test_email(config): + if verify_receipt(): + print("\n🎉 SUCCESS: Email system is fully operational locally.") + else: + sys.exit(1) + else: + sys.exit(1) diff --git a/scripts/verify_mock.py b/scripts/verify_mock.py new file mode 100644 index 0000000..83be7d4 --- /dev/null +++ b/scripts/verify_mock.py @@ -0,0 +1,23 @@ +import arxiv + +from src.mocks.local_client import MockArxivClient + + +def test_mock_client(): + client = MockArxivClient() + print("Mirror DB:", client.mirror_db_path) + + # 1. Test Golden Set ID + search = arxiv.Search(id_list=["1706.03762"]) + results = list(client.results(search)) + print(f"Search by ID found {len(results)} results") + if results: + print("Title:", results[0].title) + + # 2. Test Category Search + search_cat = arxiv.Search(query="cat:cs.AI") + results_cat = list(client.results(search_cat)) + print(f"Search 'cat:cs.AI' found {len(results_cat)} results") + +if __name__ == "__main__": + test_mock_client() diff --git a/scripts/verify_pipeline.py b/scripts/verify_pipeline.py new file mode 100644 index 0000000..a257547 --- /dev/null +++ b/scripts/verify_pipeline.py @@ -0,0 +1,129 @@ +import logging +import os +import sys +from pathlib import Path + +import yaml + +# Add src to sys.path +sys.path.append(str(Path(__file__).parent.parent / "src")) + +from paperweight.analyzer import get_abstracts +from paperweight.processor import process_papers +from paperweight.scraper import get_recent_papers + +# Configure logging +logging.basicConfig(level=logging.INFO, format="%(message)s") +logger = logging.getLogger("verify_pipeline") + +def load_config(): + config_path = Path("config.yaml") + if not config_path.exists(): + logger.error(f"❌ Config file not found: {config_path}") + sys.exit(1) + + with open(config_path, "r") as f: + return yaml.safe_load(f) + +def verify_pipeline(config): # noqa: C901 + logger.info("🧪 Verifying Pipeline (Scraper -> Processor -> Analyzer)...") + + # Override config for fast testing + logger.info(" ℹ️ Overriding config to fetch max 1 paper from cs.AI...") + config["arxiv"]["max_results"] = 1 + config["arxiv"]["categories"] = ["cs.AI"] + + # 1. Scraper + logger.info("\n--- [1/3] Scraper Stage ---") + try: + # Use force_refresh=True to ensure we actually hit the API + papers = get_recent_papers(config, force_refresh=True) + + if not papers: + logger.error("❌ Scraper returned 0 papers.") + return False + + logger.info(f"✅ Scraper fetched {len(papers)} paper(s).") + logger.info(f" Title: {papers[0]['title'][:50]}...") + if 'content' not in papers[0] or not papers[0]['content']: + logger.warning(" ⚠️ Paper content is empty! Processor might fail.") + else: + logger.info(f" Content length: {len(papers[0]['content'])} chars") + + except Exception as e: + logger.error(f"❌ Scraper stage failed: {e}") + return False + + # 2. Processor + logger.info("\n--- [2/3] Processor Stage ---") + try: + processed_papers = process_papers(papers, config["processor"]) + + if processed_papers: + logger.info(f"✅ Processor passed. {len(processed_papers)} paper(s) met criteria.") + logger.info(f" Score: {processed_papers[0].get('relevance_score')}") + else: + logger.info("ℹ️ Processor passed but filtered all papers (low score). Pipeline is working logic-wise.") + # For verification purpose, let's pretend we have a paper to pass to analyzer + # if we really want to test analyzer, we might need to fake a high score or relax criteria + # But let's verify analyzer with the raw paper if processed list is empty, just to check API + if not processed_papers: + logger.info(" (Using raw paper for Analyzer check since Processor filtered it details)") + processed_papers = papers + + except Exception as e: + logger.error(f"❌ Processor stage failed: {e}") + return False + + # 3. Analyzer + logger.info("\n--- [3/3] Analyzer Stage ---") + analyzer_config = config.get("analyzer", {}) + provider = analyzer_config.get("llm_provider") + + if not provider: + logger.warning("⚠️ No LLM provider configured. Skipping Analyzer.") + return True + + logger.info(f" Provider: {provider}") + + # Check for API keys + if provider == "openai" and not os.environ.get("OPENAI_API_KEY"): + logger.warning("⚠️ OPENAI_API_KEY not found. Skipping Analyzer call.") + return True + if provider == "gemini" and not os.environ.get("GEMINI_API_KEY"): + logger.warning("⚠️ GEMINI_API_KEY not found. Skipping Analyzer call.") + return True + + try: + # Limit to 1 paper for cost/speed + target_papers = processed_papers[:1] + logger.info(f" Sending {len(target_papers)} paper(s) to LLM...") + + summaries = get_abstracts(target_papers, analyzer_config) + + if summaries: + logger.info("✅ Analyzer returned summaries.") + if summaries[0]: + logger.info(f" Summary snippet: {summaries[0][:50]}...") + else: + logger.warning(" Summary was empty string (might be okay if abstract fallback used).") + else: + logger.error("❌ Analyzer returned None/Empty list.") + return False + + except Exception as e: + logger.error(f"❌ Analyzer stage failed: {e}") + return False + + return True + +if __name__ == "__main__": + print("🚀 Starting Paperweight Pipeline Verification...") + config = load_config() + + if verify_pipeline(config): + print("\n🎉 SUCCESS: Core pipeline verified.") + sys.exit(0) + else: + print("\n❌ FAILURE: Pipeline verification failed.") + sys.exit(1) diff --git a/setup.py b/setup.py deleted file mode 100644 index ff22c80..0000000 --- a/setup.py +++ /dev/null @@ -1,34 +0,0 @@ -# type: ignore -from setuptools import find_packages, setup - -setup( - name="paperweight", - version="0.1.2", - package_dir={"": "src"}, - packages=find_packages(where="src"), - install_requires=[ - "pypdf", - "python-dotenv", - "PyYAML", - "requests", - "simplerllm", - "tiktoken", - "tenacity", - ], - entry_points={ - "console_scripts": [ - "paperweight=paperweight.main:main", - ], - }, - author="Sean Brar", - description="Automatically retrieve, filter, and summarize recent academic papers from arXiv", - long_description=open("README.md").read(), - long_description_content_type="text/markdown", - url="https://github.com/seanbrar/paperweight", - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - ], - python_requires=">=3.10", -) diff --git a/src/mocks/__init__.py b/src/mocks/__init__.py new file mode 100644 index 0000000..8046e86 --- /dev/null +++ b/src/mocks/__init__.py @@ -0,0 +1,21 @@ +"""Mock implementations for testing without network access.""" + +from src.mocks.local_client import ( + DEFAULT_DB_PATH, + DEFAULT_FILES_DIR, + DEFAULT_MIRROR_PATH, + MockArxivClient, + mock_fetch_arxiv_papers, + mock_fetch_paper_content, + patch_scraper_for_local_mirror, +) + +__all__ = [ + "DEFAULT_DB_PATH", + "DEFAULT_FILES_DIR", + "DEFAULT_MIRROR_PATH", + "MockArxivClient", + "mock_fetch_arxiv_papers", + "mock_fetch_paper_content", + "patch_scraper_for_local_mirror", +] diff --git a/src/mocks/local_client.py b/src/mocks/local_client.py new file mode 100644 index 0000000..4d9e836 --- /dev/null +++ b/src/mocks/local_client.py @@ -0,0 +1,280 @@ +"""Local mock client for testing without real arXiv API calls. + +This module provides mock implementations that read from data/local_mirror +to enable integration testing without network access. +""" + +import re +import shutil +import sqlite3 +from datetime import datetime +from pathlib import Path +from typing import Any, Dict, Generator, List, Optional, Tuple + +import arxiv + +# Default paths - can be overridden +DEFAULT_MIRROR_PATH = Path("data/local_mirror") +DEFAULT_DB_PATH = DEFAULT_MIRROR_PATH / "index.sqlite3" +DEFAULT_FILES_DIR = DEFAULT_MIRROR_PATH / "files" + + +def mock_fetch_paper_content( + paper_id: str, + files_dir: Path = DEFAULT_FILES_DIR +) -> Tuple[Optional[bytes], Optional[str]]: + """Mock replacement for paperweight.scraper.fetch_paper_content. + + Reads content from local files instead of making HTTP requests. + Follows the same source-first, PDF-fallback pattern as the real function. + + Args: + paper_id: The arXiv paper ID (e.g., "2401.12345" or "2401.12345v1") + files_dir: Directory containing the local mirror files + + Returns: + Tuple of (content_bytes, method) where method is "source" or "pdf", + or (None, None) if no file found. + """ + # Normalize paper_id - strip version if present for base lookup + base_id = paper_id.split('v')[0] if 'v' in paper_id else paper_id + + # Try different ID patterns (with/without version) + id_patterns = [paper_id] + if paper_id != base_id: + id_patterns.append(base_id) + + # Also try finding versioned files if we only have base ID + if paper_id == base_id: + # Look for any versioned file + for f in files_dir.glob(f"{base_id}v*.tar.gz"): + id_patterns.insert(0, f.stem.replace('.tar', '')) + break + for f in files_dir.glob(f"{base_id}v*.pdf"): + if f.stem not in id_patterns: + id_patterns.insert(0, f.stem) + break + + # Try source first (.tar.gz), then PDF + for pid in id_patterns: + source_path = files_dir / f"{pid}.tar.gz" + if source_path.exists(): + return source_path.read_bytes(), "source" + + for pid in id_patterns: + pdf_path = files_dir / f"{pid}.pdf" + if pdf_path.exists(): + return pdf_path.read_bytes(), "pdf" + + return None, None + + +def mock_fetch_arxiv_papers( + category: str, + start_date: Any, + max_results: Optional[int] = None, + db_path: Path = DEFAULT_DB_PATH +) -> List[Dict[str, Any]]: + """Mock replacement for paperweight.scraper.fetch_arxiv_papers. + + Reads paper metadata from local SQLite database instead of arXiv API. + + Args: + category: The arXiv category to filter by (e.g., "cs.AI") + start_date: Not used in mock (we return all matching papers) + max_results: Maximum number of results to return + db_path: Path to the SQLite database + + Returns: + List of paper dictionaries with title, link, date, abstract. + """ + if not db_path.exists(): + raise FileNotFoundError(f"Local mirror DB not found at {db_path}") + + conn = sqlite3.connect(db_path) + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + + sql = "SELECT * FROM papers WHERE categories LIKE ?" + params: List[Any] = [f"%{category}%"] + + if max_results: + sql += " LIMIT ?" + params.append(int(max_results)) + + cursor.execute(sql, params) + rows = cursor.fetchall() + + papers = [] + for row in rows: + papers.append({ + "title": row["title"], + "link": f"http://arxiv.org/abs/{row['id']}", + "date": datetime.fromisoformat(row["published"]).date(), + "abstract": row["abstract"], + }) + + conn.close() + return papers + + +def patch_scraper_for_local_mirror(monkeypatch, files_dir: Path = DEFAULT_FILES_DIR): + """Apply all necessary patches to use local mirror instead of real API. + + Use this in pytest fixtures to mock the scraper module. + + Args: + monkeypatch: pytest monkeypatch fixture + files_dir: Directory containing local mirror files + + Example: + @pytest.fixture + def patched_scraper(monkeypatch): + patch_scraper_for_local_mirror(monkeypatch) + """ + def local_fetch_paper_content(paper_id): + return mock_fetch_paper_content(paper_id, files_dir) + + monkeypatch.setattr( + "paperweight.scraper.fetch_paper_content", + local_fetch_paper_content + ) + + # Also patch the retry-decorated wrapper if needed + monkeypatch.setattr( + "paperweight.scraper.fetch_arxiv_papers", + mock_fetch_arxiv_papers + ) + + +class MockArxivClient: + """Drop-in replacement for arxiv.Client that searches a local SQLite mirror. + + This mocks the arxiv library's Client class for use in tests that need + to work with arxiv.Search objects directly. + """ + + def __init__( + self, + page_size: int = 100, + delay_seconds: float = 3, + num_retries: int = 3, + mirror_path: Path = DEFAULT_MIRROR_PATH + ): + self.page_size = page_size + self.delay_seconds = delay_seconds + self.num_retries = num_retries + self.mirror_db_path = mirror_path / "index.sqlite3" + self.files_dir = mirror_path / "files" + + if not self.mirror_db_path.exists(): + raise FileNotFoundError( + f"Local mirror DB not found at {self.mirror_db_path}. " + "Run scripts/populate_mirror.py first." + ) + + def results( + self, + search: arxiv.Search, + offset: int = 0 + ) -> Generator[arxiv.Result, None, None]: + """Execute search against local SQLite database.""" + conn = sqlite3.connect(self.mirror_db_path) + conn.row_factory = sqlite3.Row + cursor = conn.cursor() + + query_str = getattr(search, 'query', '') + id_list = getattr(search, 'id_list', []) + + sql = "SELECT * FROM papers WHERE 1=1" + params: List[Any] = [] + + if id_list: + conditions = [] + for paper_id in id_list: + if re.search(r"v\d+$", paper_id): + conditions.append("id = ?") + params.append(paper_id) + else: + conditions.append("id LIKE ?") + params.append(f"{paper_id}%") + sql += " AND (" + " OR ".join(conditions) + ")" + elif query_str: + terms = query_str.split() + for term in terms: + if term.startswith("cat:"): + cat = term.split(":", 1)[1] + sql += " AND categories LIKE ?" + params.append(f"%{cat}%") + else: + sql += " AND (title LIKE ? OR abstract LIKE ?)" + params.append(f"%{term}%") + params.append(f"%{term}%") + + max_results = getattr(search, 'max_results', None) + if max_results: + sql += " LIMIT ?" + params.append(int(max_results)) + + cursor.execute(sql, params) + rows = cursor.fetchall() + + for row in rows: + yield self._row_to_result(row) + + conn.close() + + def _row_to_result(self, row: sqlite3.Row) -> arxiv.Result: + """Convert a SQLite row to an arxiv.Result object.""" + + class Author: + def __init__(self, name: str): + self.name = name + + authors = [Author(n.strip()) for n in row['authors'].split(',')] + paper_id = row['id'] + + res = arxiv.Result( + entry_id=f"http://arxiv.org/abs/{paper_id}", + updated=datetime.fromisoformat(row['updated']), + published=datetime.fromisoformat(row['published']), + title=row['title'], + authors=authors, + summary=row['abstract'], + comment=None, + journal_ref=None, + doi=row['doi'], + primary_category=row['categories'].split(',')[0].strip(), + categories=[cat.strip() for cat in row['categories'].split(',')], + links=[] + ) + + # Monkey-patch download methods to use local files + local_pdf_path = row['local_file_path'] + local_source_path = row['local_source_path'] + + def mock_download_pdf(dirpath: str = './', filename: str = '') -> str: + if not filename: + filename = f"{paper_id}.pdf" + target_path = Path(dirpath) / filename + + if local_pdf_path and Path(local_pdf_path).exists(): + shutil.copy(local_pdf_path, target_path) + return str(target_path) + raise FileNotFoundError(f"Mock PDF file missing for {paper_id}") + + def mock_download_source(dirpath: str = './', filename: str = '') -> str: + if not filename: + filename = f"{paper_id}.tar.gz" + target_path = Path(dirpath) / filename + + if local_source_path and Path(local_source_path).exists(): + shutil.copy(local_source_path, target_path) + return str(target_path) + raise FileNotFoundError(f"Mock source file missing for {paper_id}") + + res.download_pdf = mock_download_pdf # type: ignore + res.download_source = mock_download_source # type: ignore + res.pdf_url = row['pdf_url'] + + return res diff --git a/src/paperweight/__main__.py b/src/paperweight/__main__.py index 03c2da1..86ec844 100644 --- a/src/paperweight/__main__.py +++ b/src/paperweight/__main__.py @@ -1,4 +1,6 @@ +import sys + from paperweight.main import main if __name__ == "__main__": - main() + sys.exit(main()) diff --git a/src/paperweight/analyzer.py b/src/paperweight/analyzer.py index 789f121..a92bf38 100644 --- a/src/paperweight/analyzer.py +++ b/src/paperweight/analyzer.py @@ -1,21 +1,21 @@ -"""Module for analyzing and summarizing academic papers. +"""Analyze and summarize papers. -This module provides functionality for analyzing paper content using LLMs (Language Model Models) -and extracting relevant information. It supports different analysis types including abstract -extraction and paper summarization using various LLM providers. +The pipeline passes the *analyzer section* of the config into this module +(i.e. ``config["analyzer"]``). Keep this module's API aligned with that shape. """ +import asyncio +import json import logging -from typing import Any, Dict +import os +from typing import Any, Dict, List, Literal, cast -from SimplerLLM.language.llm import ( # type: ignore - LLM, - LLMProvider, -) -from tenacity import retry, stop_after_attempt, wait_exponential +from pollux import Config, RetryPolicy, Source, run from paperweight.utils import count_tokens +ProviderName = Literal["gemini", "openai"] + logger = logging.getLogger(__name__) @@ -42,22 +42,204 @@ def get_abstracts(processed_papers, config): raise ValueError(f"Unknown analysis type: {analysis_type}") -@retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10)) +def _truncate_for_prompt( + content: str, + prompt_prefix: str, + *, + max_input_tokens: int, + max_input_chars: int, +) -> str: + """Best-effort keep prompt within a reasonable size. + + Pollux/providers have their own limits; this is just a guardrail. + """ + content = (content or "").strip() + if max_input_chars > 0 and len(content) > max_input_chars: + content = content[:max_input_chars].rstrip() + "\n\n[TRUNCATED]\n" + + # Token-based trim (approx). We scale down the content length until the prompt fits. + if max_input_tokens > 0: + prompt = f"{prompt_prefix}{content}" + tokens = count_tokens(prompt) + if tokens > max_input_tokens and content: + # Approximate proportional truncation to avoid slow iterative trimming. + scale = max_input_tokens / max(tokens, 1) + target_chars = max(1000, int(len(content) * scale * 0.9)) + content = content[:target_chars].rstrip() + "\n\n[TRUNCATED]\n" + + return content + + +def _default_model_for_provider(provider: str) -> str: + provider = (provider or "").lower().strip() + if provider == "openai": + return "gpt-5-nano" + if provider == "gemini": + return "gemini-2.5-flash-lite" + return "" + + +def _resolve_triage_model_config( + full_config: Dict[str, Any], +) -> tuple[str, str, str, float, int]: + """Resolve provider/model/key and thresholds for triage mode.""" + triage_cfg = full_config.get("triage", {}) + analyzer_cfg = full_config.get("analyzer", {}) + + provider = ( + triage_cfg.get("llm_provider") + or analyzer_cfg.get("llm_provider") + or "openai" + ).lower() + model = triage_cfg.get("model") or _default_model_for_provider(provider) + api_key = ( + triage_cfg.get("api_key") + or analyzer_cfg.get("api_key") + or os.getenv(f"{provider.upper()}_API_KEY") + or "" + ) + min_score = float(triage_cfg.get("min_score", 60.0)) + max_selected = int(triage_cfg.get("max_selected", 25)) + return provider, model, api_key, min_score, max_selected + + +def _heuristic_triage_score(paper: Dict[str, Any], profile_terms: List[str]) -> float: + text = f"{paper.get('title', '')}\n{paper.get('abstract', '')}".lower() + hits = 0 + for term in profile_terms: + if term and term.lower() in text: + hits += 1 + if not profile_terms: + return 50.0 + return min(100.0, 100.0 * (hits / len(profile_terms))) + + +def _triage_one_paper( + paper: Dict[str, Any], + pollux_config: Config, + profile: str, + *, + min_score: float, +) -> Dict[str, Any]: + title = (paper.get("title") or "").strip() + abstract = (paper.get("abstract") or "").strip() + + prompt = ( + "You are triaging arXiv papers for relevance.\n" + "Return JSON only with keys: include (boolean), score (0-100 number), rationale (string).\n" + "Be strict. Include only if likely useful to the profile.\n\n" + f"Profile:\n{profile}\n\n" + f"Title: {title}\n\n" + f"Abstract:\n{abstract}\n" + ) + + result = asyncio.run(run(prompt, config=pollux_config)) + response = None + if isinstance(result, dict): + answers = result.get("answers") + if isinstance(answers, list) and answers: + response = answers[0] + if not response: + return { + "include": False, + "score": 0.0, + "rationale": "No model response", + } + + raw = str(response).strip() + start = raw.find("{") + end = raw.rfind("}") + if start >= 0 and end > start: + raw = raw[start : end + 1] + + parsed = json.loads(raw) + score = float(parsed.get("score", 0.0)) + include = bool(parsed.get("include", score >= min_score)) + rationale = str(parsed.get("rationale", "")).strip() + return {"include": include, "score": score, "rationale": rationale} + + +def triage_papers( + papers: List[Dict[str, Any]], + full_config: Dict[str, Any], +) -> List[Dict[str, Any]]: + """AI-first triage using title+abstract before expensive content processing.""" + if not papers: + return [] + + triage_cfg = full_config.get("triage", {}) + if not triage_cfg.get("enabled", True): + return papers + + provider, model, api_key, min_score, max_selected = _resolve_triage_model_config( + full_config + ) + profile_terms = full_config.get("processor", {}).get("keywords", []) + profile_text = "\n".join(f"- {term}" for term in profile_terms if term) + + if provider not in ("openai", "gemini") or not api_key: + logger.warning( + "AI triage is enabled but provider/key is unavailable; using heuristic triage." + ) + shortlisted = [] + for paper in papers: + score = _heuristic_triage_score(paper, profile_terms) + paper["triage_score"] = score + paper["triage_rationale"] = "Keyword/abstract heuristic fallback" + if score >= min_score: + shortlisted.append(paper) + return shortlisted[:max_selected] + + provider_name = cast(ProviderName, provider) + pollux_config = Config( + provider=provider_name, + model=model, + api_key=api_key, + retry=RetryPolicy(max_attempts=2, initial_delay_s=1.0, max_delay_s=5.0), + ) + + shortlisted = [] + for paper in papers: + try: + decision = _triage_one_paper( + paper, + pollux_config, + profile_text, + min_score=min_score, + ) + except Exception as e: + logger.warning("AI triage failed for '%s': %s", paper.get("title", ""), e) + score = _heuristic_triage_score(paper, profile_terms) + decision = { + "include": score >= min_score, + "score": score, + "rationale": "LLM error; keyword/abstract heuristic fallback", + } + + paper["triage_score"] = float(decision["score"]) + paper["triage_rationale"] = decision["rationale"] + if decision["include"] and float(decision["score"]) >= min_score: + shortlisted.append(paper) + + logger.info("AI triage selected %s/%s papers", len(shortlisted), len(papers)) + return shortlisted[:max_selected] + + def summarize_paper(paper: Dict[str, Any], config: Dict[str, Any]) -> str: """Generate a summary of a paper using an LLM. + Uses Pollux for LLM interaction. Pollux handles retries internally + via RetryPolicy (exponential backoff with jitter). + Args: paper: Dictionary containing paper data including content and metadata. config: Configuration dictionary containing LLM settings. Returns: A string containing the generated summary. - - Raises: - ValueError: If no valid LLM provider or API key is available. """ - llm_provider = config.get("analyzer", {}).get("llm_provider", "openai").lower() - api_key = config.get("analyzer", {}).get("api_key") + llm_provider = (config.get("llm_provider") or "openai").lower().strip() + api_key = config.get("api_key") if llm_provider not in ["openai", "gemini"] or not api_key: logger.warning( @@ -66,19 +248,55 @@ def summarize_paper(paper: Dict[str, Any], config: Dict[str, Any]) -> str: return paper["abstract"] try: - provider = LLMProvider[llm_provider.upper()] - model_name = ( - "gpt-4o-mini" if provider == LLMProvider.OPENAI else "gemini-1.5-flash" + provider: ProviderName = llm_provider # type: ignore[assignment] # guarded above + model_name = (config.get("model") or "").strip() or _default_model_for_provider( + llm_provider + ) + pollux_config = Config( + provider=provider, + model=model_name, + api_key=api_key, + retry=RetryPolicy(max_attempts=3, initial_delay_s=1.0, max_delay_s=10.0), ) - llm_instance = LLM.create( - provider=provider, model_name=model_name, api_key=api_key + + title = (paper.get("title") or "").strip() + abstract = (paper.get("abstract") or "").strip() + content = paper.get("content") or "" + + # Guardrails. Defaults intentionally conservative. + max_input_tokens = int(config.get("max_input_tokens", 7000)) + max_input_chars = int(config.get("max_input_chars", 20_000)) + + prompt = ( + "Summarize the paper for a busy researcher.\n" + "Constraints:\n" + "- Be accurate; do not invent results.\n" + "- 4-6 sentences.\n" + "- Include: problem, approach, key results/claims, and who should read it.\n\n" + f"Title: {title}\n\n" + f"Abstract:\n{abstract}\n\n" + ) + + content = _truncate_for_prompt( + str(content), + prompt, + max_input_tokens=max_input_tokens, + max_input_chars=max_input_chars, ) - prompt = f"Write a concise, accurate summary of the following paper's content in about 3-5 sentences:\n\n```{paper['content']}```" + source = Source.from_text(content, identifier=title or "paper-content") - input_tokens = count_tokens(prompt) + input_tokens = count_tokens(prompt) + count_tokens(content) logger.info(f"Input token count: {input_tokens}") - response = llm_instance.generate_response(prompt=prompt) + result = asyncio.run(run(prompt, source=source, config=pollux_config)) + response = None + if isinstance(result, dict): + answers = result.get("answers") + if isinstance(answers, list) and answers: + response = answers[0] + if not response: + logger.warning("LLM returned no answers; falling back to abstract.") + return paper.get("abstract", "") output_tokens = count_tokens(response) logger.info(f"Output token count: {output_tokens}") @@ -87,28 +305,3 @@ def summarize_paper(paper: Dict[str, Any], config: Dict[str, Any]) -> str: except Exception as e: logger.error(f"Error summarizing paper: {e}", exc_info=True) return paper["abstract"] - - -def create_llm_instance(provider: str, api_key: str) -> LLM: - """Create an instance of the specified LLM provider. - - Args: - provider: The name of the LLM provider ('openai' or 'gemini'). - api_key: API key for the specified provider. - - Returns: - An initialized LLM instance. - - Raises: - ValueError: If an unsupported provider is specified. - """ - if provider == "openai": - return LLM.create( - provider=LLMProvider.OPENAI, model_name="gpt-4o-mini", api_key=api_key - ) - elif provider == "gemini": - return LLM.create( - provider=LLMProvider.GEMINI, model_name="gemini-1.5-flash", api_key=api_key - ) - else: - raise ValueError(f"Unsupported LLM provider: {provider}") diff --git a/src/paperweight/db.py b/src/paperweight/db.py new file mode 100644 index 0000000..41e7bac --- /dev/null +++ b/src/paperweight/db.py @@ -0,0 +1,45 @@ +"""Database connection helpers for paperweight.""" + +from contextlib import contextmanager +from typing import Any, Dict, Generator + +import psycopg +from psycopg import Connection + + +class DatabaseConnectionError(RuntimeError): + """Raised when a configured database is unreachable.""" + + +def is_db_enabled(config: Dict[str, Any]) -> bool: + """Check if database persistence is enabled in configuration.""" + return bool(config.get("db", {}).get("enabled")) + + +@contextmanager +def connect_db( + db_config: Dict[str, Any], autocommit: bool = False +) -> Generator[Connection, None, None]: + """Create a database connection. + + Args: + db_config: Database configuration dictionary. + autocommit: If True, each statement commits immediately. + If False (default), use explicit transactions. + + Yields: + A psycopg connection object. + """ + conn = psycopg.connect( + host=db_config["host"], + port=db_config["port"], + dbname=db_config["database"], + user=db_config["user"], + password=db_config["password"], + sslmode=db_config.get("sslmode", "prefer"), + autocommit=autocommit, + ) + try: + yield conn + finally: + conn.close() diff --git a/src/paperweight/main.py b/src/paperweight/main.py index 1cc152b..b951452 100644 --- a/src/paperweight/main.py +++ b/src/paperweight/main.py @@ -7,22 +7,77 @@ import argparse import logging +import os +import sys import traceback +from pathlib import Path import requests import yaml -from paperweight.analyzer import get_abstracts +from paperweight.analyzer import get_abstracts, triage_papers +from paperweight.db import DatabaseConnectionError, connect_db, is_db_enabled from paperweight.logging_config import setup_logging -from paperweight.notifier import compile_and_send_notifications +from paperweight.notifier import ( + compile_and_send_notifications, + render_atom_feed, + render_json_digest, + render_text_digest, + write_output, +) from paperweight.processor import process_papers -from paperweight.scraper import get_recent_papers -from paperweight.utils import load_config +from paperweight.scraper import get_recent_papers, hydrate_papers_with_content +from paperweight.storage import ( + create_run, + finish_run, + insert_artifacts, + insert_scores, + insert_summaries, + upsert_papers, +) +from paperweight.utils import get_package_version, hash_config, load_config logger = logging.getLogger(__name__) +MINIMAL_CONFIG_TEMPLATE = """arxiv: + categories: + - cs.AI + - cs.CL + max_results: 50 -def setup_and_get_papers(force_refresh): +triage: + enabled: true + llm_provider: openai + min_score: 60 + max_selected: 25 + +processor: + keywords: + - transformer + - reasoning + - language model + exclusion_keywords: [] + important_words: [] + title_keyword_weight: 3 + abstract_keyword_weight: 2 + content_keyword_weight: 1 + exclusion_keyword_penalty: 5 + important_words_weight: 0.5 + min_score: 10 + +analyzer: + type: abstract + llm_provider: openai + max_input_tokens: 7000 + max_input_chars: 20000 + +logging: + level: INFO + file: paperweight.log +""" + + +def setup_and_get_papers(force_refresh, include_content=True, config_path="config.yaml"): """Set up the application and fetch papers. Args: @@ -33,15 +88,37 @@ def setup_and_get_papers(force_refresh): Tuple of (papers, config) where papers is a list of paper dictionaries and config is the loaded configuration dictionary. """ - config = load_config() + config = load_config(config_path=config_path) setup_logging(config["logging"]) logger.info("Configuration loaded successfully") if force_refresh: logger.info("Force refresh requested. Ignoring last processed date.") - return get_recent_papers(force_refresh=True), config + return ( + get_recent_papers( + config, force_refresh=True, include_content=include_content + ), + config, + ) else: - return get_recent_papers(), config + return get_recent_papers(config, include_content=include_content), config + + +def get_summary_model(config): + """Extract the summary model identifier from configuration. + + Args: + config: Configuration dictionary. + + Returns: + Model identifier string or None if not configured. + """ + analyzer_type = config.get("analyzer", {}).get("type") + if analyzer_type == "summary": + return config["analyzer"].get("llm_provider") + elif analyzer_type == "abstract": + return "abstract" + return None def process_and_summarize_papers(recent_papers, config): @@ -74,52 +151,385 @@ def process_and_summarize_papers(recent_papers, config): return processed_papers -def main(): - """Main entry point for the paperweight application. +def _initialize_db_run(config, recent_papers): + """Initialize a database run and persist paper metadata. - This function parses command line arguments, coordinates the paper processing - pipeline, and handles any errors that occur during execution. + Args: + config: Configuration dictionary. + recent_papers: List of paper dictionaries. Returns: - 0 on successful execution, 1 on error. + Tuple of (run_id, paper_id_map) where run_id is a UUID and + paper_id_map maps (arxiv_id, version) to database UUIDs. """ - parser = argparse.ArgumentParser( - description="paperweight: Fetch and process arXiv papers" + config_hash = hash_config(config) + pipeline_version = get_package_version() + with connect_db(config["db"]) as conn: + run_id = create_run(conn, config_hash, pipeline_version) + paper_id_map = {} + if recent_papers: + paper_id_map = upsert_papers(conn, recent_papers) + # NOTE: Artifacts are already written to disk by get_recent_papers() + # (via scraper._store_artifacts). This call records their metadata in DB. + insert_artifacts(conn, recent_papers, paper_id_map) + conn.commit() + return run_id, paper_id_map + + +def _persist_results(config, run_id, processed_papers, paper_id_map): + """Persist processing results (scores and summaries) to the database. + + Args: + config: Configuration dictionary. + run_id: UUID of the current run. + processed_papers: List of processed paper dictionaries. + paper_id_map: Mapping of (arxiv_id, version) to database UUIDs. + """ + summary_model = get_summary_model(config) + with connect_db(config["db"]) as conn: + insert_scores(conn, run_id, processed_papers, paper_id_map) + insert_summaries(conn, run_id, processed_papers, paper_id_map, summary_model) + conn.commit() + + +def _finalize_run(config, run_id, status, notes): + """Mark a pipeline run as finished in the database. + + Args: + config: Configuration dictionary. + run_id: UUID of the run to finalize. + status: Final status ('success' or 'failed'). + notes: Optional notes (e.g., error message). + """ + try: + with connect_db(config["db"], autocommit=True) as conn: + finish_run(conn, run_id, status, notes) + except Exception as e: + logger.error(f"Failed to finalize run status: {e}") + + +def _get_error_message(error): + """Get a human-readable error message for known exception types. + + Args: + error: The exception that occurred. + + Returns: + Human-readable error description string. + """ + if isinstance(error, requests.RequestException): + return "Network error occurred" + if isinstance(error, yaml.YAMLError): + return "Configuration error" + if isinstance(error, KeyError): + return "Missing configuration key" + if isinstance(error, ValueError): + return "Configuration validation error" + if isinstance(error, DatabaseConnectionError): + return "Database error" + return "An unexpected error occurred" + + +def _handle_error(error, error_type): + """Log an error and return its string representation. + + Args: + error: The exception that occurred. + error_type: Human-readable description of the error type. + + Returns: + String representation of the error for storage. + """ + logger.error(f"{error_type}: {error}") + return str(error) + + +def _deliver_output(processed_papers, config, args): + """Deliver processed papers via the requested adapter.""" + if args.max_items and args.max_items > 0: + processed_papers = processed_papers[: args.max_items] + + if args.delivery == "stdout": + digest = render_text_digest(processed_papers, sort_order=args.sort_order) + write_output(digest, args.output) + return + + if args.delivery == "json": + json_payload = render_json_digest(processed_papers, sort_order=args.sort_order) + write_output(json_payload + "\n", args.output) + return + + if args.delivery == "atom": + feed_config = config.get("feed", {}) + feed_xml = render_atom_feed( + processed_papers, + sort_order=args.sort_order, + feed_title=feed_config.get("title", "paperweight"), + feed_id=feed_config.get("id", "https://github.com/seanbrar/paperweight"), + feed_link=feed_config.get("link", "https://github.com/seanbrar/paperweight"), + ) + write_output(feed_xml, args.output) + return + + notifier_config = config.get("notifier") + if not notifier_config: + raise ValueError("Email delivery requested but notifier config is missing.") + + notification_sent = compile_and_send_notifications(processed_papers, notifier_config) + if notification_sent: + logger.info("Notifications compiled and sent successfully") + else: + logger.warning("Failed to send notifications") + + +def _apply_triage_and_hydrate(recent_papers, config): + """AI triage on metadata, then fetch full content only for shortlisted papers.""" + triaged_papers = triage_papers(recent_papers, config) + if not triaged_papers: + logger.info("AI triage selected no papers. Exiting.") + return [] + + return hydrate_papers_with_content(triaged_papers, config) + + +def _add_run_arguments(parser: argparse.ArgumentParser) -> None: + parser.add_argument( + "--config", + default="config.yaml", + help="Path to config file (default: config.yaml)", ) parser.add_argument( "--force-refresh", action="store_true", help="Force refresh papers regardless of last processed date", ) - args = parser.parse_args() + parser.add_argument( + "--delivery", + choices=["stdout", "json", "atom", "email"], + default="stdout", + help="Delivery target for results (default: stdout)", + ) + parser.add_argument( + "--output", + type=str, + help="Optional output file path for stdout/atom delivery", + ) + parser.add_argument( + "--sort-order", + choices=["relevance", "alphabetical", "publication_time"], + default="relevance", + help="Sort order for digest output", + ) + parser.add_argument( + "--max-items", + type=int, + default=0, + help="Optional cap on number of delivered papers (0 = no cap)", + ) + + +def _build_cli_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description="paperweight: Fetch, triage, and summarize arXiv papers" + ) + subparsers = parser.add_subparsers(dest="command") + + run_parser = subparsers.add_parser("run", help="Run the paperweight pipeline") + _add_run_arguments(run_parser) + + init_parser = subparsers.add_parser("init", help="Create a minimal config file") + init_parser.add_argument( + "--config", + default="config.yaml", + help="Path to write config file (default: config.yaml)", + ) + init_parser.add_argument( + "--force", + action="store_true", + help="Overwrite existing config file if present", + ) + + doctor_parser = subparsers.add_parser("doctor", help="Validate local configuration") + doctor_parser.add_argument( + "--config", + default="config.yaml", + help="Path to config file (default: config.yaml)", + ) + doctor_parser.add_argument( + "--strict", + action="store_true", + help="Return non-zero if any warnings are present", + ) + + return parser + + +def _parse_args(argv: list[str] | None = None) -> argparse.Namespace: + args_list = list(argv if argv is not None else sys.argv[1:]) + parser = _build_cli_parser() + + # Backward-compatible default: `paperweight [run-args]` == `paperweight run [run-args]` + known_commands = {"run", "init", "doctor"} + if args_list and args_list[0] in {"-h", "--help"}: + return parser.parse_args(args_list) + if args_list and args_list[0] in known_commands: + return parser.parse_args(args_list) + + run_parser = argparse.ArgumentParser( + description="paperweight: Fetch, triage, and summarize arXiv papers" + ) + _add_run_arguments(run_parser) + run_args = run_parser.parse_args(args_list) + run_args.command = "run" + return run_args + + +def _write_minimal_config(path: str, force: bool = False) -> None: + target = Path(path) + if target.exists() and not force: + raise ValueError(f"Config file already exists: {target}. Use --force to overwrite.") + + base_template = Path("config-base.yaml") + content = ( + base_template.read_text(encoding="utf-8") + if base_template.exists() + else MINIMAL_CONFIG_TEMPLATE + ) + target.write_text(content, encoding="utf-8") + print(f"Wrote config: {target}") + + +def _doctor(config_path: str, strict: bool = False) -> int: + results: list[tuple[str, str, str]] = [] + + config_file = Path(config_path) + if config_file.exists(): + results.append(("OK", "config file", str(config_file))) + else: + results.append(("FAIL", "config file", f"Missing: {config_file}")) + _print_doctor(results) + return 1 + + try: + config = load_config(config_path=config_path) + results.append(("OK", "config parse", "Loaded and validated")) + except Exception as e: + results.append(("FAIL", "config parse", str(e))) + _print_doctor(results) + return 1 + + triage_cfg = config.get("triage", {}) + triage_enabled = triage_cfg.get("enabled", True) + triage_provider = ( + triage_cfg.get("llm_provider") + or config.get("analyzer", {}).get("llm_provider") + or "openai" + ) + triage_key = ( + triage_cfg.get("api_key") + or config.get("analyzer", {}).get("api_key") + or os.getenv(f"{str(triage_provider).upper()}_API_KEY") + ) + + if triage_enabled and triage_key: + results.append(("OK", "triage auth", f"{triage_provider} key available")) + elif triage_enabled: + results.append( + ("WARN", "triage auth", "No API key found; heuristic fallback will be used") + ) + else: + results.append(("OK", "triage", "Disabled")) + + delivery_modes = ["stdout", "atom"] + notifier = config.get("notifier", {}) + if notifier: + delivery_modes.append("email") + results.append(("OK", "delivery modes", ", ".join(delivery_modes))) + + _print_doctor(results) + has_warn = any(status == "WARN" for status, _, _ in results) + return 1 if strict and has_warn else 0 + + +def _print_doctor(results: list[tuple[str, str, str]]) -> None: + print("paperweight doctor") + print("") + for status, check, detail in results: + print(f"[{status}] {check}: {detail}") + + +def _run_pipeline(args: argparse.Namespace) -> int: + config = None + run_id = None + paper_id_map = {} + run_status = "failed" + run_notes = None + db_enabled = False + had_error = False try: - recent_papers, config = setup_and_get_papers(args.force_refresh) - processed_papers = process_and_summarize_papers(recent_papers, config) + recent_papers, config = setup_and_get_papers( + args.force_refresh, + include_content=False, + config_path=args.config, + ) + shortlisted_papers = _apply_triage_and_hydrate(recent_papers, config) + db_enabled = is_db_enabled(config) + + if db_enabled: + run_id, paper_id_map = _initialize_db_run(config, shortlisted_papers) + + processed_papers = process_and_summarize_papers(shortlisted_papers, config) + + if db_enabled and run_id and processed_papers: + _persist_results(config, run_id, processed_papers, paper_id_map) if processed_papers: - notification_sent = compile_and_send_notifications( - processed_papers, config["notifier"] - ) - if notification_sent: - logger.info("Notifications compiled and sent successfully") - else: - logger.warning("Failed to send notifications") - except requests.RequestException as e: - logger.error(f"Network error occurred: {e}") - except yaml.YAMLError as e: - logger.error(f"Configuration error: {e}") - except KeyError as e: - logger.error(f"Missing configuration key: {e}") - except ValueError as e: - logger.error(f"Configuration validation error: {e}") + _deliver_output(processed_papers, config, args) + + run_status = "success" + except ( + requests.RequestException, + yaml.YAMLError, + KeyError, + ValueError, + DatabaseConnectionError, + ) as e: + had_error = True + error_type = _get_error_message(e) + run_notes = _handle_error(e, error_type) except Exception as e: - logger.error(f"An unexpected error occurred: {e}") + had_error = True + run_notes = _handle_error(e, "An unexpected error occurred") + finally: + if db_enabled and run_id: + _finalize_run(config, run_id, run_status, run_notes) + return 1 if had_error else 0 + + +def main(argv: list[str] | None = None) -> int: + """CLI entry point.""" + args = _parse_args(argv) + if args.command is None: + # No subcommand means default run mode. + args.command = "run" + args.config = getattr(args, "config", "config.yaml") + args.force_refresh = getattr(args, "force_refresh", False) + args.delivery = getattr(args, "delivery", "stdout") + args.output = getattr(args, "output", None) + args.sort_order = getattr(args, "sort_order", "relevance") + args.max_items = getattr(args, "max_items", 0) + if args.command == "init": + _write_minimal_config(args.config, force=args.force) + return 0 + if args.command == "doctor": + return _doctor(args.config, strict=getattr(args, "strict", False)) + return _run_pipeline(args) if __name__ == "__main__": try: - main() + sys.exit(main()) except Exception as e: print(f"Uncaught exception in main: {e}") traceback.print_exc() diff --git a/src/paperweight/notifier.py b/src/paperweight/notifier.py index 6db93b2..1d9f962 100644 --- a/src/paperweight/notifier.py +++ b/src/paperweight/notifier.py @@ -1,18 +1,142 @@ -"""Module for sending email notifications about processed papers. +"""Notification and digest rendering helpers. -This module handles the creation and sending of email notifications about relevant papers -that have been processed. It includes functionality for composing email content and -sending emails through SMTP servers. +paperweight's default delivery is a deterministic stdout digest. Atom feed and +email delivery are optional adapters. """ +import json import logging import smtplib +from datetime import datetime, timezone from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText +from pathlib import Path +from typing import Any, Dict, List +from xml.etree import ElementTree as ET logger = logging.getLogger(__name__) +def _sort_papers(papers: List[Dict[str, Any]], sort_order: str) -> List[Dict[str, Any]]: + if sort_order == "alphabetical": + return sorted(papers, key=lambda x: x.get("title", "").lower()) + if sort_order == "publication_time": + return sorted(papers, key=_format_paper_date, reverse=True) + return list(papers) + + +def _format_paper_date(paper: Dict[str, Any]) -> str: + value = paper.get("date") + if isinstance(value, datetime): + return value.isoformat() + if isinstance(value, str): + return value + return str(value or "") + + +def render_text_digest( + papers: List[Dict[str, Any]], + *, + sort_order: str = "relevance", + heading: str = "paperweight digest", +) -> str: + """Render a deterministic plain-text digest.""" + if not papers: + return "paperweight digest\n\nNo matching papers." + + ordered = _sort_papers(papers, sort_order) + lines = [heading, ""] + + for idx, paper in enumerate(ordered, start=1): + score = paper.get("relevance_score", paper.get("triage_score", 0.0)) + lines.append(f"{idx}. {paper.get('title', 'Untitled')}") + lines.append(f" Date: {_format_paper_date(paper)}") + lines.append(f" Score: {score:.2f}") + if paper.get("triage_rationale"): + lines.append(f" Why: {paper.get('triage_rationale')}") + lines.append(f" Link: {paper.get('link', '')}") + lines.append(f" Summary: {(paper.get('summary') or '').strip()}") + lines.append("") + + return "\n".join(lines).rstrip() + "\n" + + +def render_atom_feed( + papers: List[Dict[str, Any]], + *, + sort_order: str = "relevance", + feed_title: str = "paperweight", + feed_id: str = "https://github.com/seanbrar/paperweight", + feed_link: str = "https://github.com/seanbrar/paperweight", +) -> str: + """Render an Atom feed from processed papers.""" + ns = "http://www.w3.org/2005/Atom" + ET.register_namespace("", ns) + feed = ET.Element(f"{{{ns}}}feed") + + ET.SubElement(feed, f"{{{ns}}}title").text = feed_title + ET.SubElement(feed, f"{{{ns}}}id").text = feed_id + ET.SubElement(feed, f"{{{ns}}}link", {"href": feed_link, "rel": "self"}) + ET.SubElement(feed, f"{{{ns}}}updated").text = datetime.now(timezone.utc).strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + + ordered = _sort_papers(papers, sort_order) + for paper in ordered: + entry = ET.SubElement(feed, f"{{{ns}}}entry") + link = paper.get("link", "") + title = paper.get("title", "Untitled") + summary = (paper.get("summary") or "").strip() + score = paper.get("relevance_score", 0.0) + rationale = (paper.get("triage_rationale") or "").strip() + date_text = _format_paper_date(paper) + updated = f"{date_text}T00:00:00Z" if len(date_text) == 10 else date_text + + ET.SubElement(entry, f"{{{ns}}}id").text = link or title + ET.SubElement(entry, f"{{{ns}}}title").text = title + ET.SubElement(entry, f"{{{ns}}}updated").text = updated + if link: + ET.SubElement(entry, f"{{{ns}}}link", {"href": link, "rel": "alternate"}) + ET.SubElement(entry, f"{{{ns}}}summary").text = summary + ET.SubElement(entry, f"{{{ns}}}content", {"type": "text"}).text = ( + f"Score: {score:.2f}\nWhy: {rationale}\nLink: {link}\nSummary: {summary}" + ) + + xml_bytes = ET.tostring(feed, encoding="utf-8", xml_declaration=True) + return xml_bytes.decode("utf-8") + + +def render_json_digest( + papers: List[Dict[str, Any]], *, sort_order: str = "relevance" +) -> str: + """Render a deterministic JSON digest for scripting.""" + ordered = _sort_papers(papers, sort_order) + payload = [] + for paper in ordered: + payload.append( + { + "title": paper.get("title", "Untitled"), + "date": _format_paper_date(paper), + "score": paper.get("relevance_score", paper.get("triage_score", 0.0)), + "why": paper.get("triage_rationale", ""), + "link": paper.get("link", ""), + "summary": (paper.get("summary") or "").strip(), + } + ) + return json.dumps(payload, indent=2, ensure_ascii=True) + + +def write_output(content: str, output_path: str | None = None) -> None: + """Write digest/feed content to file or stdout.""" + if output_path: + target = Path(output_path) + target.parent.mkdir(parents=True, exist_ok=True) + target.write_text(content, encoding="utf-8") + logger.info("Wrote output to %s", target) + else: + print(content, end="") + + def send_email_notification(subject, body, config): """Send an email notification using the configured SMTP server. @@ -21,14 +145,16 @@ def send_email_notification(subject, body, config): body: The body text of the email. config: Configuration dictionary containing email settings. - Raises: - smtplib.SMTPException: If there is an error sending the email. + Returns: + bool: True if the email was sent successfully, False otherwise. """ from_email = config["email"]["from"] - from_password = config["email"]["password"] + from_password = config["email"].get("password") to_email = config["email"]["to"] smtp_server = config["email"]["smtp_server"] smtp_port = config["email"]["smtp_port"] + use_tls = config["email"].get("use_tls", True) + use_auth = config["email"].get("use_auth", True) # Create the email msg = MIMEMultipart() @@ -41,15 +167,20 @@ def send_email_notification(subject, body, config): # Send the email try: server = smtplib.SMTP(smtp_server, smtp_port) - server.starttls() - server.login(from_email, from_password) + if use_tls: + server.starttls() + if use_auth and from_password: + server.login(from_email, from_password) + elif use_auth and not from_password: + logger.warning("SMTP auth enabled but no password provided; skipping login.") text = msg.as_string() server.sendmail(from_email, to_email, text) server.quit() logger.info("Email notification sent successfully") + return True except Exception as e: logger.error(f"Failed to send email notification: {e}", exc_info=True) - raise + return False def compile_and_send_notifications(papers, config): @@ -64,24 +195,11 @@ def compile_and_send_notifications(papers, config): """ if not papers: logger.info("No papers to send notifications for.") - return + return False sort_order = config.get("email", {}).get("sort_order", "relevance") - - if sort_order == "alphabetical": - papers = sorted(papers, key=lambda x: x["title"].lower()) - elif sort_order == "publication_time": - papers = sorted(papers, key=lambda x: x["date"], reverse=True) - # For 'relevance' or any other value, we keep the existing order (already sorted by relevance) - + papers = _sort_papers(papers, sort_order) subject = "New Papers from ArXiv" - body = "Here are the latest papers:\n\n" - for paper in papers: - body += f"Title: {paper['title']}\n" - body += f"Date: {paper['date']}\n" - body += f"Summary: {paper['summary']}\n" - body += f"Link: {paper['link']}\n" - body += f"Relevance Score: {paper['relevance_score']:.2f}\n\n" - + body = render_text_digest(papers, sort_order=sort_order, heading="New Papers from ArXiv") success = send_email_notification(subject, body, config) return success diff --git a/src/paperweight/processor.py b/src/paperweight/processor.py index 8c698e7..d8c2aaf 100644 --- a/src/paperweight/processor.py +++ b/src/paperweight/processor.py @@ -86,9 +86,13 @@ def calculate_paper_score(paper, config): score = 0 score_breakdown = {} # Keyword matching - title_keywords = count_keywords(paper["title"], config["keywords"]) - abstract_keywords = count_keywords(paper["abstract"], config["keywords"]) - content_keywords = count_keywords(paper["content"], config["keywords"]) + title = paper.get("title", "") + abstract = paper.get("abstract", "") + content = paper.get("content", "") + + title_keywords = count_keywords(title, config["keywords"]) + abstract_keywords = count_keywords(abstract, config["keywords"]) + content_keywords = count_keywords(content, config["keywords"]) max_title_score = 50 max_abstract_score = 50 @@ -110,7 +114,7 @@ def calculate_paper_score(paper, config): } # Exclusion list - exclusion_count = count_keywords(paper["content"], config["exclusion_keywords"]) + exclusion_count = count_keywords(content, config["exclusion_keywords"]) exclusion_score = min( exclusion_count * config["exclusion_keyword_penalty"], max_content_score ) @@ -118,9 +122,7 @@ def calculate_paper_score(paper, config): score_breakdown["exclusion_penalty"] = -round(exclusion_score, 2) # Simple text analysis - important_word_count = count_important_words( - paper["content"], config["important_words"] - ) + important_word_count = count_important_words(content, config["important_words"]) important_word_score = min( important_word_count * config["important_words_weight"], max_content_score ) diff --git a/src/paperweight/scraper.py b/src/paperweight/scraper.py index 1e57dc1..0152e8f 100644 --- a/src/paperweight/scraper.py +++ b/src/paperweight/scraper.py @@ -6,18 +6,18 @@ """ import gzip +import hashlib import io import logging import os import tarfile import time -import xml.etree.ElementTree as ET from datetime import date, datetime, timedelta -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional +import arxiv import requests from pypdf import PdfReader -from requests.exceptions import HTTPError from tenacity import ( retry, retry_if_exception_type, @@ -25,20 +25,17 @@ wait_exponential, ) +from paperweight.db import DatabaseConnectionError, connect_db +from paperweight.storage import get_last_successful_run_date from paperweight.utils import ( get_last_processed_date, - load_config, save_last_processed_date, + split_arxiv_id, ) logger = logging.getLogger(__name__) -@retry( - stop=stop_after_attempt(3), - wait=wait_exponential(multiplier=1, min=4, max=10), - retry=retry_if_exception_type((requests.ConnectionError, requests.Timeout)), -) def fetch_arxiv_papers( category: str, start_date: date, max_results: Optional[int] = None ) -> List[Dict[str, Any]]: @@ -57,76 +54,58 @@ def fetch_arxiv_papers( requests.Timeout: If the request times out. """ logger.debug(f"Fetching arXiv papers for category '{category}' since {start_date}") - base_url = "http://export.arxiv.org/api/query?" + + # Construct the query query = f"cat:{category}" - params: Dict[str, Union[str, int]] = { - "search_query": query, - "start": 0, - "sortBy": "submittedDate", - "sortOrder": "descending", - } - if max_results is not None and max_results > 0: - params["max_results"] = max_results - try: - response = requests.get(base_url, params=params) - response.raise_for_status() - except HTTPError as http_err: - if response.status_code == 400 and "Invalid field: cat" in response.text: - logger.error( - f"Invalid arXiv category: {category}. Please check your configuration." - ) - raise ValueError( - f"Invalid arXiv category: {category}. Please check your configuration." - ) from http_err - else: - logger.error(f"HTTP error occurred: {http_err}") - raise + # Configure the client + client = arxiv.Client( + page_size=100, + delay_seconds=3.0, + num_retries=3 + ) - root = ET.fromstring(response.content) + search = arxiv.Search( + query=query, + max_results=max_results, + sort_by=arxiv.SortCriterion.SubmittedDate, + sort_order=arxiv.SortOrder.Descending, + ) papers = [] - for entry in root.findall("{http://www.w3.org/2005/Atom}entry"): - title_elem = entry.find("{http://www.w3.org/2005/Atom}title") - link_elem = entry.find("{http://www.w3.org/2005/Atom}id") - published_elem = entry.find("{http://www.w3.org/2005/Atom}published") - summary_elem = entry.find("{http://www.w3.org/2005/Atom}summary") - - if ( - title_elem is None - or link_elem is None - or published_elem is None - or summary_elem is None - ): - logger.warning("Skipping entry due to missing required elements") - continue - title = title_elem.text.strip() if title_elem.text else "" - link = link_elem.text.strip() if link_elem.text else "" - submitted = published_elem.text.strip() if published_elem.text else "" - abstract = summary_elem.text.strip() if summary_elem.text else "" + try: + # Iterate through the results + for result in client.results(search): + submitted_date = result.published.date() - try: - submitted_date = datetime.strptime(submitted, "%Y-%m-%dT%H:%M:%SZ").date() - except ValueError: - logger.warning(f"Invalid date format for paper: {title}") - continue + logger.debug(f"Paper '{result.title}' submitted on {submitted_date}") - logger.debug(f"Paper '{title}' submitted on {submitted_date}") + if submitted_date < start_date: + logger.debug( + f"Stopping fetch: paper date {submitted_date} is before start date {start_date}" + ) + break - if submitted_date < start_date: - logger.debug( - f"Stopping fetch: paper date {submitted_date} is before start date {start_date}" + papers.append( + { + "title": result.title, + "link": result.entry_id, + "date": submitted_date, + "abstract": result.summary, + } ) - break - papers.append( - {"title": title, "link": link, "date": submitted_date, "abstract": abstract} - ) + # Safety break if max_results is set multiple times or if the generator doesn't stop + if max_results is not None and max_results > 0 and len(papers) >= max_results: + break - if max_results is not None and max_results > 0 and len(papers) >= max_results: - logger.debug(f"Reached max_results limit of {max_results}") - break + except Exception as e: + # Map arxiv errors or other unexpected errors + logger.error(f"Error fetching papers: {e}") + # We might want to re-raise or handle gracefully depending on the exact error + # For now, consistent with previous behavior, let's allow tenacity or caller to handle + raise logger.info( f"Successfully fetched {len(papers)} papers for category '{category}' since {start_date}" @@ -134,7 +113,7 @@ def fetch_arxiv_papers( return papers -def fetch_recent_papers(start_days=1): +def fetch_recent_papers(config, start_days=1): """Fetch papers published within the last specified number of days. Args: @@ -143,7 +122,6 @@ def fetch_recent_papers(start_days=1): Returns: List of dictionaries containing paper metadata. """ - config = load_config() categories = config["arxiv"]["categories"] max_results = config["arxiv"].get("max_results", 0) # Default to 0 if not set end_date = datetime.now().date() @@ -326,16 +304,68 @@ def fetch_paper_contents(paper_ids): return contents -def get_recent_papers(force_refresh=False): +def _hydrate_papers_with_content(papers, config, db_enabled): + """Attach extracted content/artifacts to paper metadata.""" + if not papers: + return [] + + paper_ids = [paper["link"].split("/abs/")[-1] for paper in papers] + contents = fetch_paper_contents(paper_ids) + + papers_with_content = [] + storage_base = config.get("storage", {}).get("base_dir", "data/artifacts") + for paper, (paper_id, content, method) in zip(papers, contents): + if content: + logger.debug(f"Extracting text for paper ID: {paper_id}") + text = extract_text_from_source(content, method) + + artifacts = [] + if db_enabled: + artifacts = _store_artifacts(paper_id, method, content, text, storage_base) + + paper_with_content = dict(paper) + paper_with_content.update( + { + "id": paper_id, + "content": text, + "content_type": method, + "artifacts": artifacts, + } + ) + papers_with_content.append(paper_with_content) + + logger.info("Hydrated %s/%s papers with full content", len(papers_with_content), len(papers)) + return papers_with_content + + +def hydrate_papers_with_content(papers, config): + """Public helper to fetch/extract full content for an existing shortlist.""" + db_enabled = config.get("db", {}).get("enabled", False) + return _hydrate_papers_with_content(papers, config, db_enabled) + + +def get_recent_papers(config, force_refresh=False, include_content=True): """Get recent papers, either from cache or by fetching new ones. Args: force_refresh: If True, ignore cache and fetch new papers. + include_content: If True, fetch and extract full paper content. Returns: List of dictionaries containing paper metadata. """ - last_processed_date = get_last_processed_date() + db_enabled = config.get("db", {}).get("enabled", False) + used_local_watermark = not db_enabled + if db_enabled: + try: + with connect_db(config["db"]) as conn: + last_processed_date = get_last_successful_run_date(conn) + except Exception as e: + raise DatabaseConnectionError( + "Database enabled but unreachable. Check host, port, credentials, and sslmode." + ) from e + else: + last_processed_date = get_last_processed_date() logger.info(f"Last processed date: {last_processed_date}") current_date = datetime.now().date() logger.info(f"Current date: {current_date}") @@ -357,37 +387,128 @@ def get_recent_papers(force_refresh=False): ) logger.info(f"Fetching papers for the last {days} days") - recent_papers = fetch_recent_papers(days) + recent_papers = fetch_recent_papers(config, days) logger.info(f"Fetched {len(recent_papers)} recent papers") - paper_ids = [paper["link"].split("/abs/")[-1] for paper in recent_papers] - - contents = fetch_paper_contents(paper_ids) - - papers_with_content = [] - for paper, (paper_id, content, method) in zip(recent_papers, contents): - if content: - logger.debug(f"Extracting text for paper ID: {paper_id}") - text = extract_text_from_source(content, method) - papers_with_content.append( + papers_result = recent_papers + if include_content: + papers_result = _hydrate_papers_with_content(recent_papers, config, db_enabled) + else: + papers_result = [] + for paper in recent_papers: + paper_id = paper["link"].split("/abs/")[-1] + paper_without_content = dict(paper) + paper_without_content.update( { "id": paper_id, - "title": paper["title"], - "link": paper["link"], - "date": paper["date"], - "abstract": paper["abstract"], - "content": text, - "content_type": method, + "content": "", + "content_type": None, + "artifacts": [], } ) + papers_result.append(paper_without_content) - if papers_with_content: + if recent_papers and used_local_watermark: save_last_processed_date(current_date) logger.info( - f"Processed {len(papers_with_content)} papers. Last processed date updated to {current_date}" + "Processed fetch window (%s papers). Last processed date updated to %s", + len(recent_papers), + current_date, ) else: logger.info("No new papers found.") - logger.info(f"Returning {len(papers_with_content)} papers with content") - return papers_with_content + logger.info( + "Returning %s papers (%s content)", + len(papers_result), + "with" if include_content else "without", + ) + return papers_result + + +def _store_artifacts(paper_id, method, content, text, storage_base): + """Store paper artifacts (source and extracted text) to disk. + + Args: + paper_id: arXiv paper identifier. + method: Content retrieval method ('pdf' or 'source'). + content: Raw binary content of the paper. + text: Extracted text content. + storage_base: Base directory for artifact storage. + + Returns: + List of artifact metadata dictionaries with type, uri, checksum, and byte_size. + """ + arxiv_id, arxiv_version = split_arxiv_id(paper_id) + artifacts = [] + safe_id = arxiv_id.replace("/", "_") + paper_dir = os.path.join(storage_base, f"{safe_id}_{arxiv_version}") + + try: + os.makedirs(paper_dir, exist_ok=True) + except OSError as e: + logger.error("Failed to create artifact directory %s: %s", paper_dir, e) + return artifacts + + if content: + raw_ext = "pdf" if method == "pdf" else "bin" + raw_path = os.path.join(paper_dir, f"source.{raw_ext}") + try: + _write_bytes(raw_path, content) + artifacts.append( + _artifact_record("source" if method == "source" else "pdf", raw_path, content) + ) + except OSError as e: + logger.error("Failed to write source artifact %s: %s", raw_path, e) + + if text: + text_path = os.path.join(paper_dir, "extracted.txt") + try: + _write_text(text_path, text) + artifacts.append(_artifact_record("text", text_path, text.encode("utf-8"))) + except OSError as e: + logger.error("Failed to write text artifact %s: %s", text_path, e) + + return artifacts + + +def _artifact_record(artifact_type, path, payload): + """Create an artifact metadata record. + + Args: + artifact_type: Type of artifact ('pdf', 'source', or 'text'). + path: File path where the artifact is stored. + payload: Binary content of the artifact. + + Returns: + Dictionary with artifact metadata (type, uri, checksum, byte_size). + """ + checksum = hashlib.sha256(payload).hexdigest() + return { + "type": artifact_type, + "uri": path, + "checksum": checksum, + "byte_size": len(payload), + } + + +def _write_bytes(path, payload): + """Write binary data to a file. + + Args: + path: File path to write to. + payload: Binary data to write. + """ + with open(path, "wb") as handle: + handle.write(payload) + + +def _write_text(path, text): + """Write text data to a file with UTF-8 encoding. + + Args: + path: File path to write to. + text: Text content to write. + """ + with open(path, "w", encoding="utf-8") as handle: + handle.write(text) diff --git a/src/paperweight/storage.py b/src/paperweight/storage.py new file mode 100644 index 0000000..7b3d5b2 --- /dev/null +++ b/src/paperweight/storage.py @@ -0,0 +1,293 @@ +"""Persistence helpers for Postgres-backed paperweight runs.""" + +import logging +from datetime import date, datetime +from typing import Any, Dict, List, Optional, Tuple +from uuid import UUID + +from psycopg import Connection +from psycopg.types.json import Json + +from paperweight.utils import split_arxiv_id + +logger = logging.getLogger(__name__) + + +def create_run( + conn: Connection, + config_hash: str, + pipeline_version: str, + notes: Optional[str] = None, +) -> UUID: + """Create a new pipeline run record. + + Args: + conn: Database connection. + config_hash: Hash of the configuration used for this run. + pipeline_version: Version of the pipeline. + notes: Optional notes for the run. + + Returns: + UUID of the created run. + """ + with conn.cursor() as cur: + cur.execute( + """ + INSERT INTO runs (status, config_hash, pipeline_version, notes) + VALUES (%s, %s, %s, %s) + RETURNING id + """, + ("running", config_hash, pipeline_version, notes), + ) + row = cur.fetchone() + if row is None: + raise RuntimeError("Failed to create run record") + return row[0] + + +def get_last_successful_run_date(conn: Connection) -> Optional[date]: + """Get the completion date of the last successful run. + + Args: + conn: Database connection. + + Returns: + Date of the last successful run, or None if no successful runs exist. + """ + with conn.cursor() as cur: + cur.execute( + """ + SELECT completed_at + FROM runs + WHERE status = 'success' + ORDER BY completed_at DESC + LIMIT 1 + """ + ) + row = cur.fetchone() + if not row or row[0] is None: + return None + return row[0].date() + + +def finish_run( + conn: Connection, + run_id: UUID, + status: str, + notes: Optional[str] = None, +) -> None: + """Mark a run as finished with the given status. + + Args: + conn: Database connection. + run_id: UUID of the run to update. + status: Final status ('success' or 'failed'). + notes: Optional notes (e.g., error message on failure). + """ + with conn.cursor() as cur: + cur.execute( + """ + UPDATE runs + SET status = %s, completed_at = now(), notes = %s + WHERE id = %s + """, + (status, notes, run_id), + ) + + +def upsert_papers( + conn: Connection, papers: List[Dict[str, Any]] +) -> Dict[Tuple[str, str], UUID]: + """Insert or update paper records. + + Args: + conn: Database connection. + papers: List of paper dictionaries. + + Returns: + Mapping of (arxiv_id, version) tuples to database UUIDs. + """ + paper_id_map: Dict[Tuple[str, str], UUID] = {} + with conn.cursor() as cur: + for paper in papers: + arxiv_id, arxiv_version = split_arxiv_id(paper.get("id") or paper["link"]) + published_at = paper.get("date") + if isinstance(published_at, datetime): + published_at = published_at.date() + title = paper.get("title") or f"Untitled ({arxiv_id})" + cur.execute( + """ + INSERT INTO papers ( + arxiv_id, + arxiv_version, + title, + abstract, + published_at, + updated_at, + primary_category, + categories, + link, + doi, + authors + ) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + ON CONFLICT (arxiv_id, arxiv_version) DO UPDATE SET + title = EXCLUDED.title, + abstract = EXCLUDED.abstract, + published_at = EXCLUDED.published_at, + updated_at = EXCLUDED.updated_at, + primary_category = EXCLUDED.primary_category, + categories = EXCLUDED.categories, + link = EXCLUDED.link, + doi = EXCLUDED.doi, + authors = EXCLUDED.authors + RETURNING id + """, + ( + arxiv_id, + arxiv_version, + title, + paper.get("abstract"), + published_at, + paper.get("updated_at"), + paper.get("primary_category"), + paper.get("categories"), + paper.get("link"), + paper.get("doi"), + paper.get("authors"), + ), + ) + row = cur.fetchone() + if row is None: + logger.error("Failed to upsert paper %s", arxiv_id) + continue + paper_id_map[(arxiv_id, arxiv_version)] = row[0] + return paper_id_map + + +def insert_scores( + conn: Connection, + run_id: UUID, + papers: List[Dict[str, Any]], + paper_id_map: Dict[Tuple[str, str], UUID], + score_type: str = "keyword", +) -> None: + """Insert relevance scores for papers. + + Args: + conn: Database connection. + run_id: UUID of the current run. + papers: List of processed paper dictionaries with scores. + paper_id_map: Mapping of (arxiv_id, version) to database UUIDs. + score_type: Type of score (default: "keyword"). + """ + with conn.cursor() as cur: + for paper in papers: + arxiv_id, arxiv_version = split_arxiv_id(paper.get("id") or paper["link"]) + paper_id = paper_id_map.get((arxiv_id, arxiv_version)) + if not paper_id: + logger.warning( + "Skipping score insert; missing paper_id for %s", + paper.get("link"), + ) + continue + cur.execute( + """ + INSERT INTO scores (run_id, paper_id, score_type, score, details_json) + VALUES (%s, %s, %s, %s, %s) + """, + ( + run_id, + paper_id, + score_type, + paper.get("relevance_score"), + Json(paper.get("score_breakdown")), + ), + ) + + +def insert_summaries( + conn: Connection, + run_id: UUID, + papers: List[Dict[str, Any]], + paper_id_map: Dict[Tuple[str, str], UUID], + model: Optional[str] = None, + prompt_hash: Optional[str] = None, +) -> None: + """Insert paper summaries. + + Args: + conn: Database connection. + run_id: UUID of the current run. + papers: List of processed paper dictionaries with summaries. + paper_id_map: Mapping of (arxiv_id, version) to database UUIDs. + model: Model identifier used for summarization. + prompt_hash: Hash of the prompt used for summarization. + """ + with conn.cursor() as cur: + for paper in papers: + summary = paper.get("summary") + if not summary: + continue + arxiv_id, arxiv_version = split_arxiv_id(paper.get("id") or paper["link"]) + paper_id = paper_id_map.get((arxiv_id, arxiv_version)) + if not paper_id: + logger.warning( + "Skipping summary insert; missing paper_id for %s", + paper.get("link"), + ) + continue + cur.execute( + """ + INSERT INTO summaries (run_id, paper_id, summary_text, model, prompt_hash) + VALUES (%s, %s, %s, %s, %s) + """, + (run_id, paper_id, summary, model, prompt_hash), + ) + + +def insert_artifacts( + conn: Connection, + papers: List[Dict[str, Any]], + paper_id_map: Dict[Tuple[str, str], UUID], +) -> None: + """Insert paper artifact records. + + Args: + conn: Database connection. + papers: List of paper dictionaries with artifact metadata. + paper_id_map: Mapping of (arxiv_id, version) to database UUIDs. + """ + with conn.cursor() as cur: + for paper in papers: + artifacts = paper.get("artifacts") or [] + if not artifacts: + continue + arxiv_id, arxiv_version = split_arxiv_id(paper.get("id") or paper["link"]) + paper_id = paper_id_map.get((arxiv_id, arxiv_version)) + if not paper_id: + logger.warning( + "Skipping artifacts insert; missing paper_id for %s", + paper.get("link"), + ) + continue + for artifact in artifacts: + cur.execute( + """ + INSERT INTO paper_artifacts ( + paper_id, + artifact_type, + storage_uri, + checksum, + byte_size + ) + VALUES (%s, %s, %s, %s, %s) + """, + ( + paper_id, + artifact.get("type"), + artifact.get("uri"), + artifact.get("checksum"), + artifact.get("byte_size"), + ), + ) diff --git a/src/paperweight/utils.py b/src/paperweight/utils.py index 9df91ea..b1e5e3c 100644 --- a/src/paperweight/utils.py +++ b/src/paperweight/utils.py @@ -6,16 +6,21 @@ and managing the last processed date for paper fetching. """ +import hashlib +import json import logging import os import re from datetime import datetime +from importlib.metadata import PackageNotFoundError +from importlib.metadata import version as pkg_version import tiktoken import yaml from dotenv import load_dotenv LAST_PROCESSED_DATE_FILE = "last_processed_date.txt" +DEFAULT_ARXIV_VERSION = "v0" logger = logging.getLogger(__name__) @@ -39,33 +44,41 @@ def expand_env_vars(config): return config -def override_with_env(config): +def override_with_env(config, *, _path=()): """Override configuration values with environment variables. - Args: - config: Configuration dictionary to override. + Environment variables use the prefix ``PAPERWEIGHT_`` and uppercase keys. - Returns: - Configuration dictionary with values overridden by environment variables. + Canonical nested form is fully-qualified: + ``PAPERWEIGHT_ARXIV_MAX_RESULTS=50``. - Environment variables should be prefixed with 'PAPERWEIGHT_' and use uppercase. - Nested configuration keys are joined with underscores. + Backwards-compat: also accept the legacy leaf-only form (e.g. + ``PAPERWEIGHT_MAX_RESULTS``). Fully-qualified wins if both are present. """ + + def _coerce(env_value: str, current_value): + if isinstance(current_value, bool): + return env_value.lower() in ("true", "1", "yes") + if isinstance(current_value, int): + return int(env_value) + if isinstance(current_value, float): + return float(env_value) + return env_value + env_prefix = "PAPERWEIGHT_" for key, value in config.items(): - env_var = f"{env_prefix}{key.upper()}" if isinstance(value, dict): - config[key] = override_with_env(value) - elif env_var in os.environ: - env_value = os.environ[env_var] - if isinstance(value, bool): - config[key] = env_value.lower() in ("true", "1", "yes") - elif isinstance(value, int): - config[key] = int(env_value) - elif isinstance(value, float): - config[key] = float(env_value) - else: - config[key] = env_value + config[key] = override_with_env(value, _path=_path + (key,)) + continue + + qualified = f"{env_prefix}{'_'.join([p.upper() for p in (_path + (key,))])}" + legacy_leaf = f"{env_prefix}{key.upper()}" + + if qualified in os.environ: + config[key] = _coerce(os.environ[qualified], value) + elif legacy_leaf in os.environ: + config[key] = _coerce(os.environ[legacy_leaf], value) + return config @@ -151,8 +164,13 @@ def check_config(config): _check_required_sections(config) _check_arxiv_section(config["arxiv"]) _check_analyzer_section(config["analyzer"]) - _check_notifier_section(config["notifier"]) _check_logging_section(config["logging"]) + if "notifier" in config: + _check_notifier_section(config["notifier"]) + if "db" in config and config["db"].get("enabled"): + _check_db_section(config["db"]) + if "storage" in config: + _check_storage_section(config["storage"]) except KeyError as e: raise ValueError(f"Missing required section or key: {e}") @@ -166,7 +184,8 @@ def _check_required_sections(config): Raises: ValueError: If any required section is missing. """ - required_sections = ["arxiv", "processor", "analyzer", "notifier", "logging"] + # Notifier is optional (stdout-only runs should not require SMTP config). + required_sections = ["arxiv", "processor", "analyzer", "logging"] for section in required_sections: if section not in config: raise ValueError(f"Missing required section: '{section}'") @@ -227,12 +246,34 @@ def _check_notifier_section(notifier): Raises: ValueError: If notifier configuration is invalid. """ + # Support stdout-only configs: notifier can be omitted or empty. + if not notifier: + return + + notifier_type = (notifier.get("type") or "").strip().lower() + email_cfg = notifier.get("email") or {} + email_enabled = email_cfg.get("enabled") + + # Backwards-compat: older configs had only notifier.email.* and were implicitly enabled. + if email_enabled is None and "email" in notifier: + email_enabled = True + + # Non-email notifiers have no SMTP requirements. + if notifier_type and notifier_type != "email": + return + + if not email_enabled: + return + if "email" not in notifier: raise ValueError("Missing required subsection: 'email' in 'notifier'") - required_email_fields = ["to", "from", "password", "smtp_server", "smtp_port"] + required_email_fields = ["to", "from", "smtp_server", "smtp_port"] for field in required_email_fields: if field not in notifier["email"]: raise ValueError(f"Missing required email field: '{field}'") + use_auth = notifier["email"].get("use_auth", True) + if use_auth and not notifier["email"].get("password"): + raise ValueError("Missing required email field: 'password'") def _check_logging_section(logging): @@ -249,6 +290,36 @@ def _check_logging_section(logging): raise ValueError(f"Invalid logging level: '{logging.get('level')}'") +def _check_db_section(db): + """Validate the database section of the configuration. + + Args: + db: Database configuration dictionary. + + Raises: + ValueError: If database configuration is invalid. + """ + required_fields = ["host", "port", "database", "user", "password", "sslmode"] + for field in required_fields: + if field not in db: + raise ValueError(f"Missing required db field: '{field}'") + try: + int(db["port"]) + except (ValueError, TypeError) as e: + raise ValueError("'port' in 'db' section must be a valid integer") from e + valid_sslmodes = {"disable", "allow", "prefer", "require", "verify-ca", "verify-full"} + if db["sslmode"] not in valid_sslmodes: + raise ValueError( + f"Invalid sslmode '{db['sslmode']}'. Must be one of: {', '.join(sorted(valid_sslmodes))}" + ) + + +def _check_storage_section(storage): + """Validate the storage section of the configuration.""" + if "base_dir" not in storage: + raise ValueError("Missing required storage field: 'base_dir'") + + def is_valid_arxiv_category(category): """Check if an arXiv category string is valid. @@ -304,3 +375,77 @@ def count_tokens(text): """ encoding = tiktoken.encoding_for_model("gpt-3.5-turbo") return len(encoding.encode(text, allowed_special={"<|endoftext|>"})) + + +def hash_config(config): + """Create a stable hash of configuration values with secrets removed. + + Args: + config: Configuration dictionary. + + Returns: + Hex-encoded SHA-256 hash. + """ + sanitized = _redact_config(config) + payload = json.dumps(sanitized, sort_keys=True, default=str) + return hashlib.sha256(payload.encode("utf-8")).hexdigest() + + +def _redact_config(value): + """Remove sensitive keys before hashing configuration data.""" + if isinstance(value, dict): + redacted = {} + for key, val in value.items(): + if _is_sensitive_key(key): + continue + redacted[key] = _redact_config(val) + return redacted + if isinstance(value, list): + return [_redact_config(item) for item in value] + return value + + +def _is_sensitive_key(key): + key_lower = key.lower() + sensitive_substrings = ("password", "api_key", "apikey", "secret") + return any(s in key_lower for s in sensitive_substrings) + + +def get_package_version(): + """Get the installed version of the paperweight package. + + Returns: + Version string, or 'unknown' if the package is not installed. + """ + try: + return pkg_version("paperweight") + except PackageNotFoundError: + return "unknown" + + +def split_arxiv_id(raw_id): + """Parse an arXiv identifier into base ID and version components. + + Handles both new-style (YYMM.NNNNN) and legacy (archive/NNNNNNN) formats, + as well as full URLs. + + Args: + raw_id: Raw arXiv ID string, possibly including URL prefix or version suffix. + + Returns: + Tuple of (arxiv_id, version) where version defaults to DEFAULT_ARXIV_VERSION + if not specified. + """ + raw = (raw_id or "").strip() + if "/abs/" in raw: + raw = raw.split("/abs/")[-1] + raw = raw.replace("http://arxiv.org/abs/", "").replace( + "https://arxiv.org/abs/", "" + ) + new_style = re.match(r"^(?P\d{4}\.\d{4,5})(?Pv\d+)?$", raw) + if new_style: + return new_style.group("id"), new_style.group("version") or DEFAULT_ARXIV_VERSION + legacy_style = re.match(r"^(?P[a-z\-]+/\d{7})(?Pv\d+)?$", raw) + if legacy_style: + return legacy_style.group("id"), legacy_style.group("version") or DEFAULT_ARXIV_VERSION + return raw, DEFAULT_ARXIV_VERSION diff --git a/tests/api/__init__.py b/tests/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/api/test_database.py b/tests/api/test_database.py new file mode 100644 index 0000000..5a1248a --- /dev/null +++ b/tests/api/test_database.py @@ -0,0 +1,176 @@ +"""Real PostgreSQL integration tests. + +These tests require a running PostgreSQL database and are gated behind +the PAPERWEIGHT_TEST_DATABASE_URL environment variable. + +To run these tests: + export PAPERWEIGHT_TEST_DATABASE_URL="postgresql://user:pass@localhost:5432/paperweight_test" + pytest tests/api/test_database.py -v +""" + +import os +from datetime import date + +import pytest + +DATABASE_URL_ENV = "PAPERWEIGHT_TEST_DATABASE_URL" + + +def parse_database_url(url: str) -> dict: + """Parse a PostgreSQL URL into a config dict.""" + # postgresql://user:pass@host:port/database + from urllib.parse import urlparse + parsed = urlparse(url) + return { + "host": parsed.hostname, + "port": parsed.port or 5432, + "database": parsed.path.lstrip("/"), + "user": parsed.username, + "password": parsed.password, + "sslmode": "prefer", + } + + +@pytest.fixture +def db_config(): + """Get database config from environment.""" + url = os.getenv(DATABASE_URL_ENV) + if not url: + pytest.skip(f"Set {DATABASE_URL_ENV} to run database tests") + return parse_database_url(url) + + +@pytest.fixture +def db_connection(db_config): + """Create a database connection for testing.""" + from paperweight.db import connect_db + + with connect_db(db_config) as conn: + yield conn + # Rollback any uncommitted changes to keep tests isolated + conn.rollback() + + +@pytest.mark.api +class TestRealDatabase: + """Tests against a real PostgreSQL database.""" + + def test_create_and_finish_run(self, db_connection): + """Create a run record and mark it complete.""" + from paperweight.storage import create_run, finish_run + + run_id = create_run( + db_connection, + config_hash="test_hash_123", + pipeline_version="0.1.0", + notes="test run", + ) + + assert run_id is not None + + finish_run(db_connection, run_id, "success", notes="completed") + + # Verify the run was updated + with db_connection.cursor() as cursor: + cursor.execute( + "SELECT status, notes FROM runs WHERE id = %s", + (run_id,), + ) + row = cursor.fetchone() + assert row[0] == "success" + assert row[1] == "completed" + + def test_upsert_papers_idempotent(self, db_connection): + """Upserting the same paper twice returns the same ID.""" + from paperweight.storage import upsert_papers + + papers = [ + { + "id": "test.12345", + "link": "https://arxiv.org/abs/test.12345", + "title": "Test Paper", + "abstract": "Test abstract", + "date": date(2024, 1, 15), + } + ] + + # First upsert + result1 = upsert_papers(db_connection, papers) + db_connection.commit() + + # Second upsert of same paper + result2 = upsert_papers(db_connection, papers) + db_connection.commit() + + # Should get the same paper ID + assert ("test.12345", "v0") in result1 + assert ("test.12345", "v0") in result2 + assert result1[("test.12345", "v0")] == result2[("test.12345", "v0")] + + def test_full_storage_cycle(self, db_connection): + """Complete storage cycle: run, papers, scores, summaries.""" + from paperweight.storage import ( + create_run, + finish_run, + insert_scores, + insert_summaries, + upsert_papers, + ) + + # Create run + run_id = create_run( + db_connection, + config_hash="cycle_test", + pipeline_version="0.1.0", + ) + + # Upsert papers + papers = [ + { + "id": "cycle.001", + "link": "https://arxiv.org/abs/cycle.001", + "title": "Cycle Test Paper", + "abstract": "Testing the full cycle", + "date": date(2024, 1, 15), + "relevance_score": 0.85, + "score_breakdown": {"keyword": 0.5, "category": 0.35}, + "summary": "This paper tests the storage cycle.", + } + ] + paper_id_map = upsert_papers(db_connection, papers) + + # Insert scores + insert_scores(db_connection, run_id, papers, paper_id_map) + + # Insert summaries + insert_summaries(db_connection, run_id, papers, paper_id_map, model="test") + + # Finish run + finish_run(db_connection, run_id, "success") + + db_connection.commit() + + # Verify everything was stored + with db_connection.cursor() as cursor: + # Check run + cursor.execute("SELECT status FROM runs WHERE id = %s", (run_id,)) + assert cursor.fetchone()[0] == "success" + + # Check paper + paper_id = paper_id_map[("cycle.001", "v0")] + cursor.execute("SELECT title FROM papers WHERE id = %s", (paper_id,)) + assert cursor.fetchone()[0] == "Cycle Test Paper" + + # Check score + cursor.execute( + "SELECT score FROM scores WHERE run_id = %s AND paper_id = %s", + (run_id, paper_id), + ) + assert cursor.fetchone()[0] == pytest.approx(0.85) + + # Check summary + cursor.execute( + "SELECT summary_text FROM summaries WHERE run_id = %s AND paper_id = %s", + (run_id, paper_id), + ) + assert "storage cycle" in cursor.fetchone()[0] diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..cf737a7 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,126 @@ +"""Shared pytest fixtures for paperweight tests.""" + +from pathlib import Path +from typing import Any, Dict + +import pytest +import yaml + +from src.mocks.local_client import ( + MockArxivClient, + patch_scraper_for_local_mirror, +) + +ROOT = Path(__file__).parent.parent + + +@pytest.fixture +def project_root() -> Path: + """Return the project root directory.""" + return ROOT + + +@pytest.fixture +def local_mirror_path() -> Path: + """Path to the local mirror data directory. + + Skips the test if the mirror doesn't exist. + """ + mirror_path = ROOT / "data" / "local_mirror" + if not mirror_path.exists(): + pytest.skip("Local mirror not populated. Run scripts/populate_mirror.py first.") + return mirror_path + + +@pytest.fixture +def local_mirror_db(local_mirror_path: Path) -> Path: + """Path to the local mirror SQLite database.""" + db_path = local_mirror_path / "index.sqlite3" + if not db_path.exists(): + pytest.skip("Local mirror database not found.") + return db_path + + +@pytest.fixture +def local_mirror_files(local_mirror_path: Path) -> Path: + """Path to the local mirror files directory.""" + files_path = local_mirror_path / "files" + if not files_path.exists(): + pytest.skip("Local mirror files directory not found.") + return files_path + + +@pytest.fixture +def mock_arxiv_client(local_mirror_path: Path) -> MockArxivClient: + """Create a MockArxivClient using the local mirror.""" + return MockArxivClient(mirror_path=local_mirror_path) + + +@pytest.fixture +def patched_scraper(monkeypatch, local_mirror_files: Path): + """Patch the scraper module to use local mirror files. + + This patches: + - paperweight.scraper.fetch_paper_content + - paperweight.scraper.fetch_arxiv_papers + """ + patch_scraper_for_local_mirror(monkeypatch, local_mirror_files) + + +@pytest.fixture +def base_test_config(tmp_path: Path) -> Dict[str, Any]: + """Base config for integration tests with local mirror. + + Uses minimal processing to let most papers through. + """ + return { + "arxiv": { + "categories": ["cs.AI", "cs.CL", "cs.LG"], + "max_results": 10, + }, + "processor": { + "keywords": ["machine learning", "neural network", "deep learning", "ai", "transformer"], + "exclusion_keywords": [], # Don't exclude anything for testing + "important_words": ["novel", "state-of-the-art"], + "title_keyword_weight": 3, + "abstract_keyword_weight": 2, + "content_keyword_weight": 1, + "exclusion_keyword_penalty": 5, + "important_words_weight": 0.5, + "min_score": 0, # Accept all papers for testing + }, + "analyzer": { + "type": "abstract", + }, + "notifier": { + "email": { + "from": "test@example.com", + "to": "test@example.com", + "smtp_server": "localhost", + "smtp_port": 1025, + "use_tls": False, + "use_auth": False, + } + }, + "logging": { + "level": "DEBUG", + "file": str(tmp_path / "test_paperweight.log"), + }, + "db": { + "enabled": False, + }, + "storage": { + "base_dir": str(tmp_path / "artifacts"), + }, + } + + +@pytest.fixture +def production_config(project_root: Path) -> Dict[str, Any]: + """Load the production config.yaml file.""" + config_path = project_root / "config.yaml" + if not config_path.exists(): + pytest.skip("config.yaml not found") + + with config_path.open("r") as f: + return yaml.safe_load(f) diff --git a/tests/test_analyzer.py b/tests/test_analyzer.py index 963122b..6e37b58 100644 --- a/tests/test_analyzer.py +++ b/tests/test_analyzer.py @@ -1,63 +1,100 @@ -from unittest.mock import patch +"""Tests for the paper analyzer/summarization module. + +This file tests the LLM boundary: how paperweight interacts with +external LLM providers to generate summaries, including fallback behavior. +""" import pytest -from paperweight.analyzer import get_abstracts, summarize_paper - - -@pytest.mark.parametrize("llm_provider, api_key, expected_result", [ - ('openai', 'fake_api_key', "This is a summary of the paper."), - ('openai', None, "This is the abstract."), - ('invalid_provider', 'fake_api_key', "This is the abstract."), -]) -def test_summarize_paper(llm_provider, api_key, expected_result, mocker): - mock_llm = mocker.Mock() - mock_llm.generate_response.return_value = "This is a summary of the paper." - mocker.patch('paperweight.analyzer.LLM.create', return_value=mock_llm) - - paper = { - 'title': 'Test Paper', - 'abstract': 'This is the abstract.', - 'content': 'This is the full content of the paper.' - } - config = { - 'analyzer': { - 'type': 'summary', - 'llm_provider': llm_provider, - 'api_key': api_key - } - } +from paperweight.analyzer import get_abstracts, summarize_paper, triage_papers - result = summarize_paper(paper, config) - assert result == expected_result -def test_get_abstracts_invalid_analysis_type(): - with pytest.raises(ValueError, match="Unknown analysis type: invalid_type"): - config = {'type': 'invalid_type'} - get_abstracts([{'abstract': 'Test abstract'}], config) +class TestSummarizePaper: + """Tests for paper summarization with LLM providers.""" -def test_summarize_paper_api_key_missing(): - paper = { - 'title': 'Test Paper', - 'abstract': 'This is a test abstract.', - 'content': 'This is the full content of the paper.' - } - config = {'analyzer': {'llm_provider': 'openai', 'api_key': None}} + @pytest.mark.parametrize( + "llm_provider, api_key, expected_result", + [ + ("openai", "fake_api_key", "This is a summary of the paper."), + ("openai", None, "This is the abstract."), + ("invalid_provider", "fake_api_key", "This is the abstract."), + ], + ) + def test_summarize_with_fallback( + self, llm_provider, api_key, expected_result, mocker + ): + """Summarization falls back to abstract when LLM unavailable.""" + # Mock Pollux's async run() function + mock_result = {"answers": ["This is a summary of the paper."], "status": "ok"} + mocker.patch("paperweight.analyzer.run", return_value=mock_result) + + paper = { + "title": "Test Paper", + "abstract": "This is the abstract.", + "content": "This is the full content of the paper.", + } + config = { + "type": "summary", + "llm_provider": llm_provider, + "api_key": api_key, + } - with patch('paperweight.analyzer.logger') as mock_logger: - result = summarize_paper(paper, config) - assert result == paper['abstract'] - mock_logger.warning.assert_called_with("No valid LLM provider or API key available for openai. Falling back to abstract.") - -def test_summarize_paper_invalid_llm_provider(): - paper = { - 'title': 'Test Paper', - 'abstract': 'This is a test abstract.', - 'content': 'This is the full content of the paper.' - } - config = {'analyzer': {'llm_provider': 'invalid_provider', 'api_key': 'fake_api_key'}} - - with patch('paperweight.analyzer.logger') as mock_logger: result = summarize_paper(paper, config) - assert result == paper['abstract'] - mock_logger.warning.assert_called_with("No valid LLM provider or API key available for invalid_provider. Falling back to abstract.") + assert result == expected_result + + +class TestGetAbstracts: + """Tests for the get_abstracts function.""" + + def test_invalid_analysis_type_raises(self): + """Unknown analysis type raises ValueError.""" + config = {"type": "invalid_type"} + with pytest.raises(ValueError, match="Unknown analysis type: invalid_type"): + get_abstracts([{"abstract": "Test abstract"}], config) + + +class TestTriagePapers: + """Tests for AI triage stage.""" + + def test_triage_uses_llm_decision(self, mocker): + mocker.patch( + "paperweight.analyzer.run", + return_value={ + "answers": [ + '{"include": true, "score": 92, "rationale": "Strong profile match"}' + ] + }, + ) + papers = [ + { + "title": "Transformers for Agents", + "abstract": "A paper about language agents and planning.", + "link": "http://arxiv.org/abs/2401.12345", + } + ] + config = { + "triage": {"enabled": True, "llm_provider": "openai", "api_key": "key"}, + "processor": {"keywords": ["agents", "planning"]}, + "analyzer": {}, + } + shortlisted = triage_papers(papers, config) + assert len(shortlisted) == 1 + assert shortlisted[0]["triage_score"] == 92 + assert "Strong profile match" in shortlisted[0]["triage_rationale"] + + def test_triage_falls_back_without_api_key(self): + papers = [ + { + "title": "Transformers for Agents", + "abstract": "A paper about language agents and planning.", + "link": "http://arxiv.org/abs/2401.12345", + } + ] + config = { + "triage": {"enabled": True, "llm_provider": "openai", "min_score": 10}, + "processor": {"keywords": ["agents"]}, + "analyzer": {"type": "abstract"}, + } + shortlisted = triage_papers(papers, config) + assert len(shortlisted) == 1 + assert shortlisted[0]["triage_score"] >= 10 diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..06eb29f --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,67 @@ +import pytest + +from paperweight.main import main + + +def test_init_writes_minimal_config(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + exit_code = main(["init"]) + assert exit_code == 0 + config_path = tmp_path / "config.yaml" + assert config_path.exists() + assert "arxiv:" in config_path.read_text(encoding="utf-8") + + +def test_init_does_not_overwrite_without_force(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + config_path = tmp_path / "config.yaml" + config_path.write_text("existing: true\n", encoding="utf-8") + + with pytest.raises(ValueError, match="already exists"): + main(["init"]) + + +def test_doctor_reports_missing_config(tmp_path): + missing = tmp_path / "missing.yaml" + exit_code = main(["doctor", "--config", str(missing)]) + assert exit_code == 1 + + +def test_doctor_success_with_loaded_config(tmp_path, monkeypatch): + config_path = tmp_path / "config.yaml" + config_path.write_text("placeholder: true\n", encoding="utf-8") + + monkeypatch.setattr( + "paperweight.main.load_config", + lambda config_path: { + "arxiv": {"categories": ["cs.AI"]}, + "processor": {"keywords": ["agents"]}, + "analyzer": {"type": "abstract"}, + "triage": {"enabled": False}, + "logging": {"level": "INFO"}, + }, + ) + + exit_code = main(["doctor", "--config", str(config_path)]) + assert exit_code == 0 + + +def test_doctor_strict_fails_on_warning(tmp_path, monkeypatch): + config_path = tmp_path / "config.yaml" + config_path.write_text("placeholder: true\n", encoding="utf-8") + monkeypatch.delenv("OPENAI_API_KEY", raising=False) + monkeypatch.delenv("GEMINI_API_KEY", raising=False) + + monkeypatch.setattr( + "paperweight.main.load_config", + lambda config_path: { + "arxiv": {"categories": ["cs.AI"]}, + "processor": {"keywords": ["agents"]}, + "analyzer": {"type": "abstract", "llm_provider": "openai"}, + "triage": {"enabled": True, "llm_provider": "openai"}, + "logging": {"level": "INFO"}, + }, + ) + + exit_code = main(["doctor", "--config", str(config_path), "--strict"]) + assert exit_code == 1 diff --git a/tests/test_cli_integration.py b/tests/test_cli_integration.py new file mode 100644 index 0000000..7d21ebb --- /dev/null +++ b/tests/test_cli_integration.py @@ -0,0 +1,169 @@ +"""Small high-value CLI integration workflows. + +These tests run through the CLI entrypoint with real config loading while +stubbing network-heavy scraper boundaries. +""" + +from datetime import date + +import yaml + +from paperweight.main import main + + +def _write_config(tmp_path, *, triage_enabled=False): + config = { + "arxiv": {"categories": ["cs.AI"], "max_results": 5}, + "triage": { + "enabled": triage_enabled, + "llm_provider": "openai", + "min_score": 60, + "max_selected": 5, + }, + "processor": { + "keywords": ["transformer", "agent"], + "exclusion_keywords": [], + "important_words": [], + "title_keyword_weight": 3, + "abstract_keyword_weight": 2, + "content_keyword_weight": 1, + "exclusion_keyword_penalty": 5, + "important_words_weight": 0.5, + "min_score": 0, + }, + "analyzer": {"type": "abstract"}, + "logging": {"level": "INFO", "file": str(tmp_path / "paperweight.log")}, + } + config_path = tmp_path / "config.yaml" + config_path.write_text(yaml.safe_dump(config), encoding="utf-8") + return config_path + + +def _stub_scraper(monkeypatch): + fake_papers = [ + { + "title": "Transformer Agents", + "link": "http://arxiv.org/abs/2401.12345", + "date": date(2024, 1, 15), + "abstract": "A paper about transformer-based agents.", + } + ] + + monkeypatch.setattr( + "paperweight.scraper.fetch_recent_papers", lambda _config, _days: fake_papers + ) + monkeypatch.setattr( + "paperweight.scraper.fetch_paper_contents", + lambda _ids: [("2401.12345", b"stub-bytes", "pdf")], + ) + monkeypatch.setattr( + "paperweight.scraper.extract_text_from_source", lambda _c, _m: "transformer agent" + ) + monkeypatch.setattr("paperweight.scraper.get_last_processed_date", lambda: None) + monkeypatch.setattr("paperweight.scraper.save_last_processed_date", lambda _d: None) + + +def _stub_scraper_two_papers(monkeypatch): + fake_papers = [ + { + "title": "Transformer Agents", + "link": "http://arxiv.org/abs/2401.12345", + "date": date(2024, 1, 15), + "abstract": "A paper about transformer-based agents.", + }, + { + "title": "Reasoning Models", + "link": "http://arxiv.org/abs/2401.67890", + "date": date(2024, 1, 14), + "abstract": "A paper about reasoning models.", + }, + ] + monkeypatch.setattr( + "paperweight.scraper.fetch_recent_papers", lambda _config, _days: fake_papers + ) + monkeypatch.setattr( + "paperweight.scraper.fetch_paper_contents", + lambda _ids: [ + ("2401.12345", b"stub-bytes", "pdf"), + ("2401.67890", b"stub-bytes", "pdf"), + ], + ) + monkeypatch.setattr( + "paperweight.scraper.extract_text_from_source", lambda _c, _m: "transformer agent" + ) + monkeypatch.setattr("paperweight.scraper.get_last_processed_date", lambda: None) + monkeypatch.setattr("paperweight.scraper.save_last_processed_date", lambda _d: None) + + +def test_run_stdout_mode_smoke(tmp_path, monkeypatch, capsys): + config_path = _write_config(tmp_path, triage_enabled=False) + _stub_scraper(monkeypatch) + + exit_code = main(["run", "--config", str(config_path), "--force-refresh"]) + out = capsys.readouterr().out + + assert exit_code == 0 + assert "paperweight digest" in out + assert "Transformer Agents" in out + assert "http://arxiv.org/abs/2401.12345" in out + + +def test_run_atom_output_smoke(tmp_path, monkeypatch): + config_path = _write_config(tmp_path, triage_enabled=False) + atom_path = tmp_path / "digest.xml" + _stub_scraper(monkeypatch) + + exit_code = main( + [ + "run", + "--config", + str(config_path), + "--force-refresh", + "--delivery", + "atom", + "--output", + str(atom_path), + ] + ) + + assert exit_code == 0 + assert atom_path.exists() + xml = atom_path.read_text(encoding="utf-8") + assert " 0 + + def test_mock_fetch_paper_content_pdf_fallback(self, local_mirror_files: Path, tmp_path: Path): + """Test PDF fallback when source is missing.""" + # Create a PDF-only test case + test_pdf = tmp_path / "test_paper.pdf" + test_pdf.write_bytes(b"%PDF-1.4 test content") + + content, method = mock_fetch_paper_content("test_paper", tmp_path) + + assert content is not None + assert method == "pdf" + + def test_mock_fetch_paper_content_not_found(self, tmp_path: Path): + """Return None for non-existent paper.""" + content, method = mock_fetch_paper_content("nonexistent_paper", tmp_path) + + assert content is None + assert method is None + + def test_mock_fetch_arxiv_papers(self, local_mirror_db: Path): + """Fetch papers by category from local database.""" + papers = mock_fetch_arxiv_papers( + category="cs.AI", + start_date=date(2024, 1, 1), + max_results=5, + db_path=local_mirror_db + ) + + # May be empty if no cs.AI papers, but should not error + assert isinstance(papers, list) + + if papers: + paper = papers[0] + assert "title" in paper + assert "link" in paper + assert "abstract" in paper + + +class TestMockArxivClient: + """Test MockArxivClient behavior.""" + + def test_client_initialization(self, local_mirror_path: Path): + """Client initializes with valid mirror path.""" + client = MockArxivClient(mirror_path=local_mirror_path) + assert client.mirror_db_path.exists() + + def test_base_id_matches_versioned_paper(self, tmp_path: Path): + """Base arXiv ID should find papers stored with version suffix. + + Regression test: The mock should match real arXiv API behavior where + searching for '1706.03762' returns '1706.03762v7'. + """ + import arxiv + + # Setup: DB with only versioned ID + mirror = tmp_path / "mirror" + mirror.mkdir() + (mirror / "files").mkdir() + db = mirror / "index.sqlite3" + + conn = sqlite3.connect(db) + conn.execute("""CREATE TABLE papers ( + id TEXT PRIMARY KEY, title TEXT, abstract TEXT, authors TEXT, + categories TEXT, published DATE, updated DATE, pdf_url TEXT, + doi TEXT, local_file_path TEXT, local_source_path TEXT)""") + conn.execute( + "INSERT INTO papers VALUES (?,?,?,?,?,?,?,?,?,?,?)", + ("1706.03762v7", "Attention Is All You Need", "Abstract", + "Vaswani et al.", "cs.CL,cs.LG", "2017-06-12", "2017-06-12", + "http://arxiv.org/pdf/1706.03762v7", None, None, None), + ) + conn.commit() + conn.close() + + client = MockArxivClient(mirror_path=mirror) + + # Base ID should find the versioned paper + results = list(client.results(arxiv.Search(id_list=["1706.03762"]))) + assert len(results) == 1 + assert "1706.03762v7" in results[0].entry_id + + def test_versioned_id_exact_match(self, tmp_path: Path): + """Versioned arXiv ID should only match that exact version. + + Searching for '1706.03762v1' should NOT match '1706.03762v7'. + """ + import arxiv + + # Setup: DB with v7 only + mirror = tmp_path / "mirror" + mirror.mkdir() + (mirror / "files").mkdir() + db = mirror / "index.sqlite3" + + conn = sqlite3.connect(db) + conn.execute("""CREATE TABLE papers ( + id TEXT PRIMARY KEY, title TEXT, abstract TEXT, authors TEXT, + categories TEXT, published DATE, updated DATE, pdf_url TEXT, + doi TEXT, local_file_path TEXT, local_source_path TEXT)""") + conn.execute( + "INSERT INTO papers VALUES (?,?,?,?,?,?,?,?,?,?,?)", + ("1706.03762v7", "Test Paper", "Abstract", "Author", "cs.AI", + "2017-06-12", "2017-06-12", "http://example.com", None, None, None), + ) + conn.commit() + conn.close() + + client = MockArxivClient(mirror_path=mirror) + + # Searching for v1 should NOT find v7 + results = list(client.results(arxiv.Search(id_list=["1706.03762v1"]))) + assert len(results) == 0 + + # Searching for v7 should find v7 + results = list(client.results(arxiv.Search(id_list=["1706.03762v7"]))) + assert len(results) == 1 + + def test_base_id_finds_multiple_versions(self, tmp_path: Path): + """Base ID search returns all matching versions (mirrors arXiv behavior).""" + import arxiv + + # Setup: DB with multiple versions + mirror = tmp_path / "mirror" + mirror.mkdir() + (mirror / "files").mkdir() + db = mirror / "index.sqlite3" + + conn = sqlite3.connect(db) + conn.execute("""CREATE TABLE papers ( + id TEXT PRIMARY KEY, title TEXT, abstract TEXT, authors TEXT, + categories TEXT, published DATE, updated DATE, pdf_url TEXT, + doi TEXT, local_file_path TEXT, local_source_path TEXT)""") + conn.execute( + "INSERT INTO papers VALUES (?,?,?,?,?,?,?,?,?,?,?)", + ("1706.03762v1", "Paper v1", "Abstract", "Author", "cs.AI", + "2017-06-12", "2017-06-12", "http://example.com", None, None, None), + ) + conn.execute( + "INSERT INTO papers VALUES (?,?,?,?,?,?,?,?,?,?,?)", + ("1706.03762v7", "Paper v7", "Abstract", "Author", "cs.AI", + "2017-06-12", "2017-12-01", "http://example.com", None, None, None), + ) + conn.commit() + conn.close() + + client = MockArxivClient(mirror_path=mirror) + + # Base ID should find both versions + results = list(client.results(arxiv.Search(id_list=["1706.03762"]))) + assert len(results) == 2 + entry_ids = [r.entry_id for r in results] + assert any("1706.03762v1" in eid for eid in entry_ids) + assert any("1706.03762v7" in eid for eid in entry_ids) + + def test_client_search_by_category(self, mock_arxiv_client: MockArxivClient): + """Search returns papers matching category.""" + import arxiv + + search = arxiv.Search(query="cat:cs.AI", max_results=5) + results = list(mock_arxiv_client.results(search)) + + # Results depend on what's in the mirror + assert isinstance(results, list) + + def test_client_search_by_id(self, mock_arxiv_client: MockArxivClient, local_mirror_db: Path): + """Search by ID returns matching paper.""" + import arxiv + + # Get a versioned paper ID from the database (versioned IDs use exact matching) + conn = sqlite3.connect(local_mirror_db) + cursor = conn.cursor() + # Select a versioned ID to ensure exact match behavior + cursor.execute("SELECT id FROM papers WHERE id GLOB '*v[0-9]*' LIMIT 1") + row = cursor.fetchone() + conn.close() + + if not row: + pytest.skip("No versioned papers in database") + + paper_id = row[0] + search = arxiv.Search(id_list=[paper_id]) + results = list(mock_arxiv_client.results(search)) + + assert len(results) == 1 + assert paper_id in results[0].entry_id + + def test_result_has_download_methods(self, mock_arxiv_client: MockArxivClient, local_mirror_db: Path): + """arxiv.Result objects have mocked download methods.""" + import arxiv + + # Get a paper with files + conn = sqlite3.connect(local_mirror_db) + cursor = conn.cursor() + cursor.execute("SELECT id FROM papers WHERE local_file_path IS NOT NULL LIMIT 1") + row = cursor.fetchone() + conn.close() + + if not row: + pytest.skip("No papers with local files") + + paper_id = row[0] + search = arxiv.Search(id_list=[paper_id]) + results = list(mock_arxiv_client.results(search)) + + assert len(results) == 1 + result = results[0] + + # Check methods exist + assert hasattr(result, 'download_pdf') + assert hasattr(result, 'download_source') + assert callable(result.download_pdf) + assert callable(result.download_source) + + +class TestSourceExtraction: + """Test text extraction from local mirror files.""" + + def test_extract_text_from_source_archive(self, local_mirror_files: Path): + """Extract text from a .tar.gz source archive.""" + sources = list(local_mirror_files.glob("*.tar.gz")) + if not sources: + pytest.skip("No source archives in mirror") + + # Try a few sources until we find one that works + text = None + for source_path in sources[:20]: + try: + content = source_path.read_bytes() + text = extract_text_from_source(content, "source") + if text: + break + except Exception: + continue + + assert text is not None, "Should extract text from at least one source" + assert len(text) > 100, "Extracted text should be substantial" + + def test_extract_text_from_pdf(self, local_mirror_files: Path): + """Extract text from a PDF file.""" + pdfs = list(local_mirror_files.glob("*.pdf")) + if not pdfs: + pytest.skip("No PDFs in mirror") + + # Try a few PDFs until we find one that works + text = None + for pdf_path in pdfs[:20]: + try: + content = pdf_path.read_bytes() + text = extract_text_from_source(content, "pdf") + if text: + break + except Exception: + continue + + assert text is not None + assert isinstance(text, str) + + +class TestFullPipelineLocal: + """Test the full processing pipeline using local data.""" + + def test_fetch_process_score( + self, + local_mirror_files: Path, + local_mirror_db: Path, + base_test_config: Dict[str, Any], + tmp_path: Path, + ): + """Fetch papers, extract content, process, and score.""" + setup_logging(base_test_config["logging"]) + + # Get papers from local database + papers_raw = mock_fetch_arxiv_papers( + category="cs", # Broad category + start_date=date(2024, 1, 1), + max_results=5, + db_path=local_mirror_db + ) + + if not papers_raw: + pytest.skip("No papers in local mirror") + + # Enrich with content + papers_with_content = [] + for paper in papers_raw: + paper_id = paper["link"].split("/")[-1] + content, method = mock_fetch_paper_content(paper_id, local_mirror_files) + + if content and method: + try: + text = extract_text_from_source(content, method) + papers_with_content.append({ + "id": paper_id, + "title": paper["title"], + "link": paper["link"], + "date": paper["date"], + "abstract": paper["abstract"], + "content": text or "", + "content_type": method, + }) + except Exception: + # Skip papers that fail extraction + continue + + assert len(papers_with_content) > 0, "Should have papers with content" + + # Process and score + processed = process_papers(papers_with_content, base_test_config["processor"]) + + assert len(processed) > 0, "Should have processed papers" + + # Check scoring results + for paper in processed: + assert "relevance_score" in paper + assert "normalized_score" in paper + assert paper["relevance_score"] >= 0 + + def test_pipeline_with_production_config( + self, + local_mirror_files: Path, + local_mirror_db: Path, + production_config: Dict[str, Any], + tmp_path: Path, + ): + """Test pipeline with production config.yaml settings.""" + # Override logging to temp + production_config["logging"]["file"] = str(tmp_path / "test.log") + setup_logging(production_config["logging"]) + + # Get papers + papers_raw = mock_fetch_arxiv_papers( + category="cs.AI", + start_date=date(2024, 1, 1), + max_results=10, + db_path=local_mirror_db + ) + + if not papers_raw: + pytest.skip("No cs.AI papers in local mirror") + + # Enrich with content + papers_with_content = [] + for paper in papers_raw[:5]: + paper_id = paper["link"].split("/")[-1] + content, method = mock_fetch_paper_content(paper_id, local_mirror_files) + + if content and method: + try: + text = extract_text_from_source(content, method) + papers_with_content.append({ + "id": paper_id, + "title": paper["title"], + "link": paper["link"], + "date": paper["date"], + "abstract": paper["abstract"], + "content": text or paper["abstract"], + "content_type": method, + }) + except Exception: + continue + + if not papers_with_content: + pytest.skip("No papers with extractable content") + + # Process with production scoring rules + processed = process_papers(papers_with_content, production_config["processor"]) + + # Production config has min_score, so some may be filtered + # Just verify no errors + assert isinstance(processed, list) + + +class TestNoNetworkCalls: + """Verify that local mirror tests make no real network calls.""" + + def test_mock_functions_are_offline(self, local_mirror_files: Path, local_mirror_db: Path): + """Ensure mock functions don't make HTTP requests.""" + with patch("requests.get") as mock_get, patch("requests.post") as mock_post: + # Call mock functions + mock_fetch_paper_content("1706.03762", local_mirror_files) + mock_fetch_arxiv_papers("cs.AI", date.today(), 5, local_mirror_db) + + # Verify no HTTP calls + mock_get.assert_not_called() + mock_post.assert_not_called() + + def test_mock_client_is_offline(self, mock_arxiv_client: MockArxivClient): + """Ensure MockArxivClient doesn't make HTTP requests.""" + import arxiv + + with patch("requests.get") as mock_get, patch("requests.post") as mock_post: + search = arxiv.Search(query="cat:cs.AI", max_results=2) + list(mock_arxiv_client.results(search)) + + mock_get.assert_not_called() + mock_post.assert_not_called() + + +class TestGoldenSetScoring: + """Test that known golden set papers score as expected.""" + + def test_attention_paper_high_relevance( + self, + local_mirror_files: Path, + local_mirror_db: Path, + production_config: Dict[str, Any], + ): + """'Attention Is All You Need' should score well for ML keywords.""" + # Check if we have this paper + content, method = mock_fetch_paper_content("1706.03762", local_mirror_files) + if not content: + pytest.skip("Golden set paper 1706.03762 not in mirror") + + try: + text = extract_text_from_source(content, method) + except Exception: + pytest.skip("Could not extract text from 1706.03762") + + # Get metadata from DB + conn = sqlite3.connect(local_mirror_db) + cursor = conn.cursor() + cursor.execute("SELECT title, abstract FROM papers WHERE id LIKE ?", ("1706.03762%",)) + row = cursor.fetchone() + conn.close() + + if not row: + pytest.skip("Paper 1706.03762 not in database") + + paper = { + "id": "1706.03762", + "title": row[0], + "link": "http://arxiv.org/abs/1706.03762", + "date": date(2017, 6, 12), + "abstract": row[1], + "content": text or row[1], + } + + processed = process_papers([paper], production_config["processor"]) + + # This paper should pass min_score with ML keywords + if processed: + assert processed[0]["relevance_score"] > 0 diff --git a/tests/test_logging.py b/tests/test_logging.py deleted file mode 100644 index 7829e70..0000000 --- a/tests/test_logging.py +++ /dev/null @@ -1,18 +0,0 @@ -from unittest.mock import patch - -from paperweight.logging_config import setup_logging - - -def test_setup_logging_invalid_level(): - config = { - 'logging': { - 'level': 'INVALID_LEVEL', - 'file': 'test.log' - } - } - - with patch('paperweight.utils.load_config', return_value=config): - with patch('paperweight.logging_config.logging.config.dictConfig') as mock_dict_config: - setup_logging(config['logging']) - called_config = mock_dict_config.call_args[0][0] - assert called_config['root']['level'] == 'INFO' diff --git a/tests/test_main.py b/tests/test_main.py deleted file mode 100644 index c69537b..0000000 --- a/tests/test_main.py +++ /dev/null @@ -1,39 +0,0 @@ -import pytest -import yaml - -from paperweight.main import main - - -@pytest.fixture -def mock_main_dependencies(mocker): - return ( - mocker.patch('paperweight.main.load_config'), - mocker.patch('paperweight.main.setup_logging'), - mocker.patch('paperweight.main.get_recent_papers'), - mocker.patch('paperweight.main.process_papers'), - mocker.patch('paperweight.main.get_abstracts'), - mocker.patch('paperweight.main.compile_and_send_notifications'), - mocker.patch('paperweight.main.logger') - ) - -def test_main_function_error_handling(mock_main_dependencies): - mock_load_config, _, _, _, _, _, mock_logger = mock_main_dependencies - mock_load_config.side_effect = yaml.YAMLError("Invalid YAML") - - main() - mock_logger.error.assert_called_with("Configuration error: Invalid YAML") - -def test_main_function_notification_success(mock_main_dependencies): - _, _, _, _, _, mock_notifications, mock_logger = mock_main_dependencies - mock_notifications.return_value = True - - main() - mock_logger.info.assert_called_with("Notifications compiled and sent successfully") - -def test_main_function_notification_failure(mock_main_dependencies): - _, _, _, _, _, mock_notifications, mock_logger = mock_main_dependencies - mock_notifications.return_value = False - - main() - mock_logger.warning.assert_called_with("Failed to send notifications") - diff --git a/tests/test_notifier.py b/tests/test_notifier.py index 0e968a6..6782250 100644 --- a/tests/test_notifier.py +++ b/tests/test_notifier.py @@ -1,6 +1,14 @@ +from datetime import date from unittest.mock import MagicMock, patch -from paperweight.notifier import compile_and_send_notifications, send_email_notification +from paperweight.notifier import ( + compile_and_send_notifications, + render_atom_feed, + render_json_digest, + render_text_digest, + send_email_notification, + write_output, +) @patch('paperweight.notifier.smtplib.SMTP') @@ -24,6 +32,7 @@ def test_send_email_notification(mock_smtp): mock_server.sendmail.assert_called_once() mock_server.quit.assert_called_once() + @patch('paperweight.notifier.send_email_notification') def test_compile_and_send_notifications_empty_list(mock_send_email): config = { @@ -38,105 +47,68 @@ def test_compile_and_send_notifications_empty_list(mock_send_email): compile_and_send_notifications([], config) mock_send_email.assert_not_called() -@patch('paperweight.notifier.send_email_notification') -def test_compile_and_send_notifications_default_sort_order(mock_send_email): - papers = [ - {'title': 'Paper A', 'date': '2023-01-01', 'summary': 'Summary A', 'link': 'http://a.com', 'relevance_score': 0.8}, - {'title': 'Paper B', 'date': '2023-01-02', 'summary': 'Summary B', 'link': 'http://b.com', 'relevance_score': 0.9}, - {'title': 'Paper C', 'date': '2023-01-03', 'summary': 'Summary C', 'link': 'http://c.com', 'relevance_score': 0.7}, - ] - config = { - 'email': { - 'from': 'sender@example.com', - 'to': 'recipient@example.com', - 'password': 'password123', - 'smtp_server': 'smtp.example.com', - 'smtp_port': 587 - } - } - compile_and_send_notifications(papers, config) - - mock_send_email.assert_called_once() - _, body, _ = mock_send_email.call_args[0] - - # Check if the order of papers in the email body matches the input order - assert body.index('Paper A') < body.index('Paper B') < body.index('Paper C') - -@patch('paperweight.notifier.send_email_notification') -def test_compile_and_send_notifications_explicit_relevance_sort(mock_send_email): +def test_render_text_digest_deterministic(): papers = [ - {'title': 'Paper A', 'date': '2023-01-01', 'summary': 'Summary A', 'link': 'http://a.com', 'relevance_score': 0.8}, - {'title': 'Paper B', 'date': '2023-01-02', 'summary': 'Summary B', 'link': 'http://b.com', 'relevance_score': 0.9}, - {'title': 'Paper C', 'date': '2023-01-03', 'summary': 'Summary C', 'link': 'http://c.com', 'relevance_score': 0.7}, + { + "title": "B Paper", + "date": date(2024, 1, 2), + "summary": "Summary B", + "link": "http://arxiv.org/abs/2", + "relevance_score": 2.0, + "triage_rationale": "Matched transformer + planning", + }, + { + "title": "A Paper", + "date": date(2024, 1, 1), + "summary": "Summary A", + "link": "http://arxiv.org/abs/1", + "relevance_score": 1.0, + "triage_rationale": "Matched profile keywords", + }, ] - config = { - 'email': { - 'from': 'sender@example.com', - 'to': 'recipient@example.com', - 'password': 'password123', - 'smtp_server': 'smtp.example.com', - 'smtp_port': 587, - 'sort_order': 'relevance' - } - } + digest = render_text_digest(papers, sort_order="alphabetical") + assert "1. A Paper" in digest + assert "2. B Paper" in digest + assert "Why: Matched profile keywords" in digest - compile_and_send_notifications(papers, config) - mock_send_email.assert_called_once() - _, body, _ = mock_send_email.call_args[0] - - # Check if the order of papers in the email body matches the input order - assert body.index('Paper A') < body.index('Paper B') < body.index('Paper C') - -@patch('paperweight.notifier.send_email_notification') -def test_compile_and_send_notifications_alphabetical_sort(mock_send_email): +def test_render_atom_feed_contains_required_elements(): papers = [ - {'title': 'Paper B', 'date': '2023-01-02', 'summary': 'Summary B', 'link': 'http://b.com', 'relevance_score': 0.9}, - {'title': 'Paper A', 'date': '2023-01-01', 'summary': 'Summary A', 'link': 'http://a.com', 'relevance_score': 0.8}, - {'title': 'Paper C', 'date': '2023-01-03', 'summary': 'Summary C', 'link': 'http://c.com', 'relevance_score': 0.7}, - ] - config = { - 'email': { - 'from': 'sender@example.com', - 'to': 'recipient@example.com', - 'password': 'password123', - 'smtp_server': 'smtp.example.com', - 'smtp_port': 587, - 'sort_order': 'alphabetical' + { + "title": "Test Paper", + "date": date(2024, 1, 2), + "summary": "Summary text", + "link": "http://arxiv.org/abs/2401.12345", + "relevance_score": 5.5, } - } + ] + feed = render_atom_feed(papers) + assert "" in feed + assert "Test Paper" in feed + assert "http://arxiv.org/abs/2401.12345" in feed - compile_and_send_notifications(papers, config) - mock_send_email.assert_called_once() - _, body, _ = mock_send_email.call_args[0] +def test_write_output_to_file(tmp_path): + target = tmp_path / "digest.txt" + write_output("hello\n", str(target)) + assert target.read_text(encoding="utf-8") == "hello\n" - # Check if the order of papers in the email body is alphabetical - assert body.index('Paper A') < body.index('Paper B') < body.index('Paper C') -@patch('paperweight.notifier.send_email_notification') -def test_compile_and_send_notifications_publication_time_sort(mock_send_email): +def test_render_json_digest_contains_expected_fields(): papers = [ - {'title': 'Paper B', 'date': '2023-01-02', 'summary': 'Summary B', 'link': 'http://b.com', 'relevance_score': 0.9}, - {'title': 'Paper A', 'date': '2023-01-01', 'summary': 'Summary A', 'link': 'http://a.com', 'relevance_score': 0.8}, - {'title': 'Paper C', 'date': '2023-01-03', 'summary': 'Summary C', 'link': 'http://c.com', 'relevance_score': 0.7}, - ] - config = { - 'email': { - 'from': 'sender@example.com', - 'to': 'recipient@example.com', - 'password': 'password123', - 'smtp_server': 'smtp.example.com', - 'smtp_port': 587, - 'sort_order': 'publication_time' + { + "title": "Test Paper", + "date": date(2024, 1, 2), + "summary": "Summary text", + "link": "http://arxiv.org/abs/2401.12345", + "relevance_score": 5.5, + "triage_rationale": "Matched core interests", } - } - - compile_and_send_notifications(papers, config) - - mock_send_email.assert_called_once() - _, body, _ = mock_send_email.call_args[0] - - # Check if the order of papers in the email body is by publication time (most recent first) - assert body.index('Paper C') < body.index('Paper B') < body.index('Paper A') + ] + payload = render_json_digest(papers) + assert '"title": "Test Paper"' in payload + assert '"why": "Matched core interests"' in payload + assert '"score": 5.5' in payload diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py new file mode 100644 index 0000000..4634c5d --- /dev/null +++ b/tests/test_pipeline.py @@ -0,0 +1,409 @@ +"""Integration tests for the paperweight pipeline. + +This file tests the complete data flow through the system: +- Fetching papers from arXiv +- Processing and scoring papers +- Generating summaries +- Sending notifications +- Database storage (when enabled) + +Also includes error handling tests for the main entry point. +""" + +import os +import time +from datetime import date +from pathlib import Path +from unittest.mock import MagicMock + +import pytest +import requests +import yaml + +from paperweight.analyzer import get_abstracts +from paperweight.db import DatabaseConnectionError, connect_db, is_db_enabled +from paperweight.logging_config import setup_logging +from paperweight.main import main +from paperweight.notifier import compile_and_send_notifications +from paperweight.processor import process_papers +from paperweight.scraper import get_recent_papers +from paperweight.storage import ( + create_run, + finish_run, + insert_artifacts, + insert_scores, + insert_summaries, + upsert_papers, +) +from paperweight.utils import get_package_version, hash_config + +ROOT = Path(__file__).parent.parent +LIVE_INTEGRATION_ENV = "PAPERWEIGHT_LIVE_INTEGRATION" +MAILPIT_HOST_ENV = "PAPERWEIGHT_MAILPIT_HOST" +MAILPIT_PORT_ENV = "PAPERWEIGHT_MAILPIT_PORT" +MAILPIT_HTTP_PORT_ENV = "PAPERWEIGHT_MAILPIT_HTTP_PORT" + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +@pytest.fixture +def integration_config(tmp_path): + """Load and patch config for integration testing.""" + config_path = ROOT / "config.yaml" + if not config_path.exists(): + pytest.skip("Config file not found") + + with config_path.open("r") as handle: + config = yaml.safe_load(handle) + + # Fast, predictable test settings + config["arxiv"]["max_results"] = 2 + config["arxiv"]["categories"] = ["cs.AI"] + config["processor"]["min_score"] = 0 + config["analyzer"]["type"] = "abstract" + # Route email through Mailpit to avoid real sends + email_config = config.setdefault("notifier", {}).setdefault("email", {}) + email_config.setdefault("from", "paperweight@example.com") + email_config.setdefault("to", "paperweight@example.com") + email_config["smtp_server"] = os.getenv(MAILPIT_HOST_ENV, "localhost") + email_config["smtp_port"] = int(os.getenv(MAILPIT_PORT_ENV, "1025")) + email_config["use_tls"] = False + email_config["use_auth"] = False + # Use a separate log file for tests + config["logging"]["file"] = str(tmp_path / "test_paperweight.log") + + return config + + +@pytest.fixture +def mock_main_dependencies(mocker): + """Mock all external dependencies for main() tests.""" + # Mock sys.argv to prevent argparse from picking up pytest arguments + mocker.patch('sys.argv', ['paperweight']) + + # Mock configuration and logging + mock_load_config = mocker.patch('paperweight.main.load_config') + mock_load_config.return_value = { + "logging": {"level": "INFO"}, + "processor": {}, + "analyzer": {"type": "abstract"}, + "notifier": {"email": {}}, + "db": {"enabled": False}, + } + mock_setup_logging = mocker.patch('paperweight.main.setup_logging') + + # Mock paper fetching and processing + mock_get_recent_papers = mocker.patch('paperweight.main.get_recent_papers') + mock_get_recent_papers.return_value = [{"id": "1234.5678", "title": "Test Paper"}] + mock_triage_papers = mocker.patch("paperweight.main.triage_papers") + mock_triage_papers.side_effect = lambda papers, _config: papers + mock_hydrate_papers = mocker.patch("paperweight.main.hydrate_papers_with_content") + mock_hydrate_papers.side_effect = lambda papers, _config: papers + + mock_process_papers = mocker.patch('paperweight.main.process_papers') + mock_process_papers.return_value = [ + {"id": "1234.5678", "title": "Test Paper", "relevance_score": 0.8} + ] + + mock_get_abstracts = mocker.patch('paperweight.main.get_abstracts') + mock_get_abstracts.return_value = ["Test summary"] + + # Mock digest rendering/writing + mock_render_text_digest = mocker.patch('paperweight.main.render_text_digest') + mock_render_text_digest.return_value = "digest" + mock_render_json_digest = mocker.patch('paperweight.main.render_json_digest') + mock_render_json_digest.return_value = "[]" + mock_write_output = mocker.patch('paperweight.main.write_output') + mock_render_atom_feed = mocker.patch('paperweight.main.render_atom_feed') + mock_render_atom_feed.return_value = "" + + # Mock notifications + mock_notifications = mocker.patch( + 'paperweight.main.compile_and_send_notifications' + ) + mock_notifications.return_value = True + + # Mock database functions + mock_is_db_enabled = mocker.patch('paperweight.main.is_db_enabled') + mock_is_db_enabled.return_value = False + + # Mock logger + mock_logger = mocker.patch('paperweight.main.logger') + + return { + 'load_config': mock_load_config, + 'setup_logging': mock_setup_logging, + 'get_recent_papers': mock_get_recent_papers, + 'triage_papers': mock_triage_papers, + 'hydrate_papers_with_content': mock_hydrate_papers, + 'process_papers': mock_process_papers, + 'get_abstracts': mock_get_abstracts, + 'render_text_digest': mock_render_text_digest, + 'render_json_digest': mock_render_json_digest, + 'write_output': mock_write_output, + 'render_atom_feed': mock_render_atom_feed, + 'notifications': mock_notifications, + 'logger': mock_logger, + 'is_db_enabled': mock_is_db_enabled, + } + + +# --------------------------------------------------------------------------- +# Full Pipeline Tests +# --------------------------------------------------------------------------- + +@pytest.mark.integration +@pytest.mark.skipif( + not os.getenv(LIVE_INTEGRATION_ENV), + reason=f"Set {LIVE_INTEGRATION_ENV}=1 to run live integration test." +) +def test_pipeline_end_to_end(integration_config): # noqa: C901 + """Full pipeline: fetch, process, summarize, store, notify.""" + setup_logging(integration_config["logging"]) + + # Mailpit config + mailpit_host = os.getenv(MAILPIT_HOST_ENV, "localhost") + mailpit_http_port = os.getenv(MAILPIT_HTTP_PORT_ENV, "8025") + mailpit_url = f"http://{mailpit_host}:{mailpit_http_port}/api/v1/messages" + + # Check Mailpit before run + try: + resp = requests.get(mailpit_url, timeout=2) + resp.raise_for_status() + start_count = len(resp.json().get("messages", [])) + except Exception as e: + pytest.skip(f"Mailpit not accessible: {e}") + + # Pipeline Steps + db_enabled = is_db_enabled(integration_config) + run_id = None + run_status = "failed" + run_notes = None + + try: + if db_enabled: + config_hash = hash_config(integration_config) + pipeline_version = get_package_version() + with connect_db(integration_config["db"]) as conn: + run_id = create_run(conn, config_hash, pipeline_version, "pytest_integration") + conn.commit() + + # 1. Fetch + papers = get_recent_papers(integration_config, force_refresh=True) + assert len(papers) > 0, "No papers fetched" + + if db_enabled: + with connect_db(integration_config["db"]) as conn: + paper_id_map = upsert_papers(conn, papers) + insert_artifacts(conn, papers, paper_id_map) + conn.commit() + + # 2. Process + processed = process_papers(papers, integration_config["processor"]) + assert len(processed) > 0, "Processor filtered all papers" + + # 3. Summarize + summaries = get_abstracts(processed, integration_config["analyzer"]) + for paper, summary in zip(processed, summaries): + paper["summary"] = summary or paper.get("abstract", "") + assert paper["summary"], "Paper summary is empty" + + if db_enabled: + with connect_db(integration_config["db"]) as conn: + insert_scores(conn, run_id, processed, paper_id_map) + insert_summaries(conn, run_id, processed, paper_id_map) + conn.commit() + + # 4. Notify + notification_sent = compile_and_send_notifications(processed, integration_config["notifier"]) + assert notification_sent, "Notification send failed" + + # 5. Verify Email + timeout = 10 + start_time = time.time() + email_received = False + + while time.time() - start_time < timeout: + try: + resp = requests.get(mailpit_url, timeout=2) + resp.raise_for_status() + current_count = len(resp.json().get("messages", [])) + if current_count > start_count: + email_received = True + break + except Exception: + pass + time.sleep(1) + + assert email_received, "Mailpit did not receive the notification email" + + run_status = "success" + + except Exception as e: + run_notes = str(e) + raise + finally: + if db_enabled and run_id: + with connect_db(integration_config["db"]) as conn: + finish_run(conn, run_id, run_status, run_notes) + conn.commit() + + +@pytest.mark.integration +def test_pipeline_end_to_end_stubbed(monkeypatch, tmp_path): + """Full pipeline with stubbed external calls.""" + config = { + "arxiv": {"categories": ["cs.AI"], "max_results": 2}, + "processor": { + "keywords": ["ai", "transformer"], + "exclusion_keywords": [], + "important_words": [], + "title_keyword_weight": 3, + "abstract_keyword_weight": 2, + "content_keyword_weight": 1, + "exclusion_keyword_penalty": 5, + "important_words_weight": 0.5, + "min_score": 0, + }, + "analyzer": {"type": "abstract"}, + "notifier": { + "email": { + "from": "paperweight@example.com", + "to": "paperweight@example.com", + "smtp_server": "localhost", + "smtp_port": 1025, + "use_tls": False, + "use_auth": False, + } + }, + "logging": {"level": "INFO", "file": str(tmp_path / "test_paperweight.log")}, + "db": {"enabled": False}, + } + + fake_papers = [ + { + "title": "Transformer Advances", + "link": "http://arxiv.org/abs/2401.12345", + "date": date(2024, 1, 15), + "abstract": "Transformer models for AI.", + }, + { + "title": "AI Systems at Scale", + "link": "http://arxiv.org/abs/2401.67890", + "date": date(2024, 1, 14), + "abstract": "Scaling AI systems.", + }, + ] + + def fake_fetch_recent_papers(_config, _days): + return list(fake_papers) + + def fake_fetch_paper_contents(paper_ids): + return [(paper_id, b"stub content", "pdf") for paper_id in paper_ids] + + monkeypatch.setattr( + "paperweight.scraper.fetch_recent_papers", fake_fetch_recent_papers + ) + monkeypatch.setattr( + "paperweight.scraper.fetch_paper_contents", fake_fetch_paper_contents + ) + monkeypatch.setattr( + "paperweight.scraper.extract_text_from_source", lambda _c, _m: "stub text" + ) + monkeypatch.setattr("paperweight.scraper.get_last_processed_date", lambda: None) + monkeypatch.setattr("paperweight.scraper.save_last_processed_date", lambda _d: None) + + send_email = MagicMock(return_value=True) + monkeypatch.setattr("paperweight.notifier.send_email_notification", send_email) + + setup_logging(config["logging"]) + + papers = get_recent_papers(config, force_refresh=True) + assert len(papers) == 2 + + processed = process_papers(papers, config["processor"]) + summaries = get_abstracts(processed, config["analyzer"]) + for paper, summary in zip(processed, summaries): + paper["summary"] = summary or paper.get("abstract", "") + assert paper["summary"] + + notification_sent = compile_and_send_notifications(processed, config["notifier"]) + assert notification_sent is True + send_email.assert_called_once() + + +# --------------------------------------------------------------------------- +# Error Handling Tests (absorbed from test_main.py) +# --------------------------------------------------------------------------- + +class TestMainErrorHandling: + """Tests for error handling in the main entry point.""" + + def test_config_yaml_error(self, mock_main_dependencies): + """YAML parsing errors are logged.""" + mock_main_dependencies['load_config'].side_effect = yaml.YAMLError("Invalid YAML") + + main() + mock_main_dependencies['logger'].error.assert_called_with( + "Configuration error: Invalid YAML" + ) + + def test_network_error(self, mock_main_dependencies): + """Network errors are logged.""" + mock_main_dependencies['load_config'].side_effect = requests.RequestException( + "Connection failed" + ) + + main() + mock_main_dependencies['logger'].error.assert_called_with( + "Network error occurred: Connection failed" + ) + + def test_database_unreachable(self, mock_main_dependencies, mocker): + """Database connection errors are logged.""" + mocker.patch( + 'paperweight.main.setup_and_get_papers', + side_effect=DatabaseConnectionError("Database enabled but unreachable."), + ) + + main() + mock_main_dependencies['logger'].error.assert_called_with( + "Database error: Database enabled but unreachable." + ) + + def test_no_papers_found(self, mock_main_dependencies): + """When no papers are found, notification is not called.""" + mock_main_dependencies['get_recent_papers'].return_value = [] + + main() + mock_main_dependencies['notifications'].assert_not_called() + mock_main_dependencies['logger'].info.assert_any_call( + "No new papers to process. Exiting." + ) + + def test_default_delivery_writes_digest(self, mock_main_dependencies): + """Default mode renders and writes stdout digest.""" + main() + mock_main_dependencies['get_recent_papers'].assert_called_once_with( + mock_main_dependencies['load_config'].return_value, include_content=False + ) + mock_main_dependencies['triage_papers'].assert_called_once() + mock_main_dependencies['hydrate_papers_with_content'].assert_called_once() + mock_main_dependencies['render_text_digest'].assert_called_once() + mock_main_dependencies['write_output'].assert_called_once() + mock_main_dependencies['notifications'].assert_not_called() + + def test_email_delivery_uses_notifier(self, mock_main_dependencies, monkeypatch): + """Email mode delegates to notifier adapter.""" + monkeypatch.setattr('sys.argv', ['paperweight', '--delivery', 'email']) + main() + mock_main_dependencies['notifications'].assert_called_once() + + def test_json_delivery_uses_json_renderer(self, mock_main_dependencies, monkeypatch): + """JSON mode renders JSON payload and writes output.""" + monkeypatch.setattr('sys.argv', ['paperweight', '--delivery', 'json']) + main() + mock_main_dependencies['render_json_digest'].assert_called_once() + mock_main_dependencies['write_output'].assert_called_once() diff --git a/tests/test_processor.py b/tests/test_processor.py index b20fa28..69015d8 100644 --- a/tests/test_processor.py +++ b/tests/test_processor.py @@ -1,3 +1,9 @@ +"""Tests for the paper scoring processor. + +This file tests the scoring algorithm that ranks papers by relevance +based on keywords, exclusion criteria, and important words. +""" + import pytest from paperweight.processor import ( @@ -7,43 +13,10 @@ ) -def test_calculate_paper_score(): - paper = { - 'title': 'Test Paper on AI', - 'abstract': 'This is a test abstract about artificial intelligence.', - 'content': 'This is the main content of the paper discussing AI techniques.' - } - config = { - 'keywords': ['AI', 'artificial intelligence'], - 'exclusion_keywords': ['biology'], - 'important_words': ['neural networks'], - 'title_keyword_weight': 3, - 'abstract_keyword_weight': 2, - 'content_keyword_weight': 1, - 'exclusion_keyword_penalty': 5, - 'important_words_weight': 0.5 - } - - score, breakdown = calculate_paper_score(paper, config) - assert score > 0 - assert 'keyword_matching' in breakdown - assert 'exclusion_penalty' in breakdown - assert 'important_words' in breakdown - -def test_process_papers(): - papers = [ - { - 'title': 'AI in Healthcare', - 'abstract': 'This paper discusses the applications of AI in healthcare.', - 'content': 'Artificial Intelligence has numerous applications in healthcare...' - }, - { - 'title': 'Quantum Computing Advances', - 'abstract': 'Recent advancements in quantum computing are presented.', - 'content': 'Quantum computing has seen significant progress in recent years...' - } - ] - processor_config = { +@pytest.fixture +def processor_config(): + """Standard processor configuration for tests.""" + return { 'keywords': ['AI', 'healthcare', 'quantum', 'computing'], 'exclusion_keywords': ['biology'], 'important_words': ['artificial intelligence'], @@ -52,122 +25,85 @@ def test_process_papers(): 'content_keyword_weight': 1, 'exclusion_keyword_penalty': 5, 'important_words_weight': 0.5, - 'min_score': 5, - } - processed_papers = process_papers(papers, processor_config) - - assert len(processed_papers) == 2 - assert processed_papers[0]['relevance_score'] > processed_papers[1]['relevance_score'] - assert 'score_breakdown' in processed_papers[0] - -def test_process_papers_empty_input(): - processor_config = { - 'keywords': ['AI'], - 'exclusion_keywords': ['biology'], - 'important_words': ['neural networks'], - 'title_keyword_weight': 3, - 'abstract_keyword_weight': 2, - 'content_keyword_weight': 1, - 'exclusion_keyword_penalty': 5, - 'important_words_weight': 0.5, - 'min_score': 5, - } - result = process_papers([], processor_config) - assert result == [] - -def test_normalize_scores_edge_cases(): - papers = [ - {'relevance_score': 10}, - {'relevance_score': 10}, - {'relevance_score': 10} - ] - normalized_papers = normalize_scores(papers) - assert all(paper['normalized_score'] == 1.0 for paper in normalized_papers) - -def test_calculate_paper_score_various_inputs(): - paper = { - 'title': 'AI in Healthcare', - 'abstract': 'This paper discusses AI applications in healthcare.', - 'content': 'Artificial Intelligence has numerous applications in healthcare...' - } - config = { - 'keywords': ['AI', 'healthcare'], - 'exclusion_keywords': ['biology'], - 'important_words': ['artificial intelligence'], - 'title_keyword_weight': 3, - 'abstract_keyword_weight': 2, - 'content_keyword_weight': 1, - 'exclusion_keyword_penalty': 5, - 'important_words_weight': 0.5 - } - score, breakdown = calculate_paper_score(paper, config) - assert score > 0 - assert 'keyword_matching' in breakdown - assert 'exclusion_penalty' in breakdown - assert 'important_words' in breakdown - -def test_normalize_scores(): - papers = [ - {'relevance_score': 10}, - {'relevance_score': 20}, - {'relevance_score': 30}, - {'relevance_score': 40}, - ] - normalized_papers = normalize_scores(papers) - - assert normalized_papers[0]['normalized_score'] == 0.0 - assert normalized_papers[-1]['normalized_score'] == 1.0 - assert 0.0 < normalized_papers[1]['normalized_score'] < normalized_papers[2]['normalized_score'] < 1.0 - -def test_process_papers_with_normalization(): - papers = [ - {'title': 'Paper A', 'abstract': 'Abstract A', 'content': 'Content A'}, - {'title': 'Paper B', 'abstract': 'Abstract B', 'content': 'Content B'}, - {'title': 'Paper C', 'abstract': 'Abstract C', 'content': 'Content C'}, - ] - processor_config = { - 'keywords': ['A', 'B', 'C'], - 'exclusion_keywords': [], - 'important_words': [], - 'title_keyword_weight': 3, - 'abstract_keyword_weight': 2, - 'content_keyword_weight': 1, - 'exclusion_keyword_penalty': 5, - 'important_words_weight': 0.5, 'min_score': 0, } - processed_papers = process_papers(papers, processor_config) - - assert len(processed_papers) == 3 - assert all('normalized_score' in paper for paper in processed_papers) - assert processed_papers[0]['normalized_score'] >= processed_papers[1]['normalized_score'] >= processed_papers[2]['normalized_score'] - -@pytest.mark.parametrize("use_normalized_ranking", [True, False]) -def test_process_papers_ranking_consistency(use_normalized_ranking): - papers = [ - {'title': 'Paper A', 'abstract': 'Abstract A', 'content': 'Content A'}, - {'title': 'Paper B', 'abstract': 'Abstract B', 'content': 'Content B'}, - {'title': 'Paper C', 'abstract': 'Abstract C', 'content': 'Content C'}, - ] - processor_config = { - 'keywords': ['A', 'B', 'C'], - 'exclusion_keywords': [], - 'important_words': [], - 'title_keyword_weight': 3, - 'abstract_keyword_weight': 2, - 'content_keyword_weight': 1, - 'exclusion_keyword_penalty': 5, - 'important_words_weight': 0.5, - 'min_score': 0, - } - processed_papers = process_papers(papers, processor_config) +class TestCalculatePaperScore: + """Tests for the calculate_paper_score function.""" - if use_normalized_ranking: - assert all('normalized_score' in paper for paper in processed_papers) - scores = [paper['normalized_score'] for paper in processed_papers] - else: - scores = [paper['relevance_score'] for paper in processed_papers] + def test_score_breakdown_structure(self, processor_config): + """Score calculation returns score and breakdown dict.""" + paper = { + 'title': 'AI in Healthcare', + 'abstract': 'This paper discusses AI applications in healthcare.', + 'content': 'Artificial Intelligence has numerous applications in healthcare...' + } - assert scores == sorted(scores, reverse=True) + score, breakdown = calculate_paper_score(paper, processor_config) + + assert score > 0 + assert 'keyword_matching' in breakdown + assert 'exclusion_penalty' in breakdown + assert 'important_words' in breakdown + + +class TestProcessPapers: + """Tests for the process_papers function.""" + + def test_papers_sorted_by_relevance(self, processor_config): + """Papers are sorted by relevance score, highest first.""" + papers = [ + { + 'title': 'AI in Healthcare', + 'abstract': 'This paper discusses the applications of AI in healthcare.', + 'content': 'Artificial Intelligence has numerous applications in healthcare...' + }, + { + 'title': 'Quantum Computing Advances', + 'abstract': 'Recent advancements in quantum computing are presented.', + 'content': 'Quantum computing has seen significant progress in recent years...' + } + ] + processor_config['min_score'] = 5 + + processed = process_papers(papers, processor_config) + + assert len(processed) == 2 + assert processed[0]['relevance_score'] > processed[1]['relevance_score'] + assert 'score_breakdown' in processed[0] + assert 'normalized_score' in processed[0] + + def test_empty_input_returns_empty(self, processor_config): + """Empty paper list returns empty result.""" + result = process_papers([], processor_config) + assert result == [] + + +class TestNormalizeScores: + """Tests for the normalize_scores function.""" + + def test_normalization_range(self): + """Scores are normalized to 0-1 range.""" + papers = [ + {'relevance_score': 10}, + {'relevance_score': 20}, + {'relevance_score': 30}, + {'relevance_score': 40}, + ] + normalized = normalize_scores(papers) + + assert normalized[0]['normalized_score'] == 0.0 + assert normalized[-1]['normalized_score'] == 1.0 + assert 0.0 < normalized[1]['normalized_score'] < normalized[2]['normalized_score'] < 1.0 + + def test_equal_scores_normalize_to_one(self): + """When all scores are equal, normalized scores are 1.0.""" + papers = [ + {'relevance_score': 10}, + {'relevance_score': 10}, + {'relevance_score': 10} + ] + normalized = normalize_scores(papers) + + assert all(paper['normalized_score'] == 1.0 for paper in normalized) diff --git a/tests/test_scraper.py b/tests/test_scraper.py index 4888e88..3d585ce 100644 --- a/tests/test_scraper.py +++ b/tests/test_scraper.py @@ -1,33 +1,35 @@ -import os from datetime import date, datetime from unittest.mock import MagicMock, patch import pytest -from requests.exceptions import HTTPError - -from paperweight.scraper import extract_text_from_source, fetch_arxiv_papers - - -@patch('paperweight.scraper.requests.get') -def test_fetch_arxiv_papers(mock_get): - mock_response = MagicMock() - mock_response.content = ''' - - - http://arxiv.org/abs/2401.12345 - 2024-01-15T00:00:00Z - Test Paper 1 - This is test abstract 1. - - - http://arxiv.org/abs/2401.67890 - 2024-01-14T00:00:00Z - Test Paper 2 - This is test abstract 2. - - - ''' - mock_get.return_value = mock_response + +from paperweight.db import DatabaseConnectionError +from paperweight.scraper import ( + extract_text_from_source, + fetch_arxiv_papers, + get_recent_papers, + hydrate_papers_with_content, +) + + +@patch('paperweight.scraper.arxiv.Client') +def test_fetch_arxiv_papers(MockClient): + mock_client_instance = MockClient.return_value + + # Mock results + result1 = MagicMock() + result1.title = 'Test Paper 1' + result1.entry_id = 'http://arxiv.org/abs/2401.12345' + result1.published = datetime(2024, 1, 15) + result1.summary = 'This is test abstract 1.' + + result2 = MagicMock() + result2.title = 'Test Paper 2' + result2.entry_id = 'http://arxiv.org/abs/2401.67890' + result2.published = datetime(2024, 1, 14, 12, 0, 0) + result2.summary = 'This is test abstract 2.' + + mock_client_instance.results.return_value = [result1, result2] start_date = datetime(2024, 1, 14).date() papers = fetch_arxiv_papers('cs.AI', start_date, max_results=2) @@ -38,110 +40,141 @@ def test_fetch_arxiv_papers(mock_get): assert papers[0]['date'] == datetime(2024, 1, 15).date() assert papers[1]['date'] == datetime(2024, 1, 14).date() -def test_extract_text_from_source(): - print("Executing test_extract_text_from_source") # Add this line - # Test PDF extraction - current_dir = os.path.dirname(os.path.abspath(__file__)) - pdf_path = os.path.join(current_dir, 'test_data', 'test.pdf') - assert os.path.exists(pdf_path), f"Test PDF file not found at {pdf_path}" +@patch('paperweight.scraper.arxiv.Client') +def test_fetch_arxiv_papers_error(MockClient): + mock_client_instance = MockClient.return_value + mock_client_instance.results.side_effect = Exception("General Error") - with open(pdf_path, 'rb') as f: - pdf_content = f.read() - pdf_text = extract_text_from_source(pdf_content, 'pdf') - assert "Test PDF content" in pdf_text + with pytest.raises(Exception, match="General Error"): + fetch_arxiv_papers('cs.AI', date.today(), max_results=10) - # Test LaTeX source extraction - latex_content = b''' - \\documentclass{article} - \\begin{document} - This is a test LaTeX document. - \\end{document} - ''' - latex_text = extract_text_from_source(latex_content, 'source') - assert "This is a test LaTeX document." in latex_text -@patch('paperweight.scraper.requests.get') -def test_fetch_arxiv_papers_invalid_category(mock_get): - mock_response = MagicMock() - mock_response.status_code = 400 - mock_response.text = "Invalid field: cat" - mock_response.raise_for_status.side_effect = HTTPError("400 Client Error: Bad Request") - mock_get.return_value = mock_response - - with pytest.raises(ValueError, match="Invalid arXiv category: invalid_category. Please check your configuration."): - fetch_arxiv_papers('invalid_category', date.today(), max_results=10) - -@patch('paperweight.scraper.requests.get') -def test_fetch_arxiv_papers_other_http_error(mock_get): - mock_response = MagicMock() - mock_response.status_code = 500 - mock_response.raise_for_status.side_effect = HTTPError("500 Server Error: Internal Server Error") - mock_get.return_value = mock_response - - with pytest.raises(HTTPError, match="500 Server Error: Internal Server Error"): - fetch_arxiv_papers('cs.AI', date.today(), max_results=10) +@patch('paperweight.scraper.arxiv.Client') +def test_fetch_arxiv_papers_max_results(MockClient): + mock_client_instance = MockClient.return_value -@patch('paperweight.scraper.requests.get') -def test_fetch_arxiv_papers_max_results(mock_get): - mock_response = MagicMock() - mock_response.content = ''' - - - http://arxiv.org/abs/2401.12345 - 2024-01-15T00:00:00Z - Test Paper 1 - This is test abstract 1. - - - http://arxiv.org/abs/2401.67890 - 2024-01-14T00:00:00Z - Test Paper 2 - This is test abstract 2. - - - http://arxiv.org/abs/2401.11111 - 2024-01-13T00:00:00Z - Test Paper 3 - This is test abstract 3. - - - ''' - mock_get.return_value = mock_response + result1 = MagicMock() + result1.title = 'Test Paper 1' + result1.entry_id = 'http://arxiv.org/abs/2401.12345' + result1.published = datetime(2024, 1, 15) + result1.summary = 'Summary 1' + + result2 = MagicMock() + result2.title = 'Test Paper 2' + result2.entry_id = 'http://arxiv.org/abs/2401.67890' + result2.published = datetime(2024, 1, 14) + result2.summary = 'Summary 2' + + result3 = MagicMock() + result3.title = 'Test Paper 3' + result3.entry_id = 'http://arxiv.org/abs/2401.11111' + result3.published = datetime(2024, 1, 13) + result3.summary = 'Summary 3' + + # We simulate the iterator returning these + mock_client_instance.results.return_value = [result1, result2, result3] start_date = datetime(2024, 1, 13).date() # Test with max_results=2 + # We need to reset the mock if we want to run multiple calls in one test safely regarding return values if they were stateful iterators, + # but here it returns a list which is iterable multiple times. + papers = fetch_arxiv_papers('cs.AI', start_date, max_results=2) assert len(papers) == 2 assert papers[0]['title'] == 'Test Paper 1' assert papers[1]['title'] == 'Test Paper 2' - # Test with max_results=None (should return all papers) + # Test with max_results=None papers = fetch_arxiv_papers('cs.AI', start_date, max_results=None) assert len(papers) == 3 assert papers[2]['title'] == 'Test Paper 3' - # Test with max_results=0 (should be treated as None and return all papers) + # Test with max_results=0 papers = fetch_arxiv_papers('cs.AI', start_date, max_results=0) assert len(papers) == 3 assert papers[2]['title'] == 'Test Paper 3' - # Verify that the max_results parameter is passed correctly to the API - calls = mock_get.call_args_list - assert len(calls) == 3 - # Check max_results=2 call - assert calls[0][1]['params']['max_results'] == 2 - assert 'max_results' not in calls[1][1]['params'] - assert 'max_results' not in calls[2][1]['params'] - - # Verify that other parameters are correct - for call in calls: - assert call[1]['params']['search_query'] == 'cat:cs.AI' - assert call[1]['params']['sortBy'] == 'submittedDate' - assert call[1]['params']['sortOrder'] == 'descending' +def test_extract_text_from_latex_source(): + """Extract text from LaTeX source content.""" + latex_content = b''' + \\documentclass{article} + \\begin{document} + This is a test LaTeX document. + \\end{document} + ''' + latex_text = extract_text_from_source(latex_content, 'source') + assert "This is a test LaTeX document." in latex_text def test_extract_text_from_source_invalid_type(): with pytest.raises(ValueError, match="Invalid source type: invalid_type"): extract_text_from_source(b'content', 'invalid_type') + +def test_get_recent_papers_db_unreachable(): + config = { + 'db': { + 'enabled': True, + 'host': 'localhost', + 'port': 5432, + 'database': 'paperweight', + 'user': 'paperweight', + 'password': 'pass', + 'sslmode': 'prefer' + } + } + with patch('paperweight.scraper.connect_db', side_effect=Exception("boom")): + with pytest.raises(DatabaseConnectionError, match="Database enabled but unreachable"): + get_recent_papers(config) + + +def test_hydrate_papers_with_content(monkeypatch): + papers = [ + { + "title": "Test Paper", + "link": "http://arxiv.org/abs/2401.12345", + "date": datetime(2024, 1, 15).date(), + "abstract": "Test abstract", + } + ] + config = {"db": {"enabled": False}} + + monkeypatch.setattr( + "paperweight.scraper.fetch_paper_contents", + lambda _ids: [("2401.12345", b"pdf-bytes", "pdf")], + ) + monkeypatch.setattr( + "paperweight.scraper.extract_text_from_source", lambda _content, _method: "text" + ) + + hydrated = hydrate_papers_with_content(papers, config) + assert len(hydrated) == 1 + assert hydrated[0]["id"] == "2401.12345" + assert hydrated[0]["content"] == "text" + + +def test_get_recent_papers_without_content(monkeypatch): + config = { + "arxiv": {"categories": ["cs.AI"], "max_results": 2}, + "db": {"enabled": False}, + } + fake_papers = [ + { + "title": "Test Paper", + "link": "http://arxiv.org/abs/2401.12345", + "date": datetime(2024, 1, 15).date(), + "abstract": "Test abstract", + } + ] + + monkeypatch.setattr("paperweight.scraper.get_last_processed_date", lambda: None) + monkeypatch.setattr("paperweight.scraper.save_last_processed_date", lambda _d: None) + monkeypatch.setattr("paperweight.scraper.fetch_recent_papers", lambda _c, _d: fake_papers) + fetch_content = MagicMock() + monkeypatch.setattr("paperweight.scraper.fetch_paper_contents", fetch_content) + + papers = get_recent_papers(config, force_refresh=True, include_content=False) + assert len(papers) == 1 + assert papers[0]["content"] == "" + fetch_content.assert_not_called() diff --git a/tests/test_utils.py b/tests/test_utils.py deleted file mode 100644 index e339502..0000000 --- a/tests/test_utils.py +++ /dev/null @@ -1,136 +0,0 @@ -import os -import tempfile -from unittest.mock import patch - -import pytest -import yaml - -from paperweight.utils import expand_env_vars, load_config, override_with_env - - -@pytest.fixture -def sample_config(): - return { - 'arxiv': {'categories': ['cs.AI'], 'max_results': 50}, - 'processor': {'keywords': ['AI']}, - 'analyzer': {'type': 'summary', 'llm_provider': 'openai'}, - 'notifier': {'email': {'to': 'test@example.com', 'from': 'sender@example.com', 'password': 'pass', 'smtp_server': 'smtp.example.com', 'smtp_port': 587}}, - 'logging': {'level': 'INFO'} - } - -@pytest.fixture -def config_file(sample_config): - with tempfile.NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) as temp_config: - yaml.dump(sample_config, temp_config) - yield temp_config.name - os.unlink(temp_config.name) - -def test_load_config_basic(config_file): - with patch.dict(os.environ, {'OPENAI_API_KEY': 'dummy_key'}): - config = load_config(config_path=config_file) - assert isinstance(config, dict) - assert 'arxiv' in config - assert 'processor' in config - assert 'notifier' in config - assert config['arxiv']['max_results'] == 50 - -def test_load_config_env_vars(config_file): - with patch.dict(os.environ, { - 'PAPERWEIGHT_MAX_RESULTS': '100', - 'OPENAI_API_KEY': 'test_api_key' - }): - config = load_config(config_path=config_file) - assert config['arxiv']['max_results'] == 100 - assert config['analyzer']['api_key'] == 'test_api_key' - -def test_load_config_missing_env_vars(config_file, sample_config): - sample_config['notifier']['email']['password'] = '$MISSING_VAR' - with open(config_file, 'w') as f: - yaml.dump(sample_config, f) - with patch.dict(os.environ, {'OPENAI_API_KEY': 'dummy_key'}): - config = load_config(config_path=config_file) - assert config['notifier']['email']['password'] == '$MISSING_VAR' - -def test_load_config_logging(config_file): - with patch('paperweight.utils.logger') as mock_logger: - with patch.dict(os.environ, {'OPENAI_API_KEY': 'dummy_key'}): - load_config(config_path=config_file) - mock_logger.info.assert_called_with("Configuration loaded and validated successfully") - -def test_load_config_missing_api_key(config_file): - with patch('paperweight.utils.load_dotenv', return_value=None): - with patch.dict(os.environ, {}, clear=True): - with pytest.raises(ValueError, match="Missing API key for openai"): - load_config(config_path=config_file) - -def test_load_config_with_api_key(config_file, sample_config): - sample_config['analyzer']['type'] = 'summary' - sample_config['analyzer']['llm_provider'] = 'openai' - with open(config_file, 'w') as f: - yaml.dump(sample_config, f) - - with patch.dict(os.environ, {'OPENAI_API_KEY': 'test_api_key'}): - config = load_config(config_path=config_file) - assert config['analyzer']['api_key'] == 'test_api_key' - -def test_load_config_non_summary_type(config_file, sample_config): - sample_config['analyzer']['type'] = 'abstract' - with open(config_file, 'w') as f: - yaml.dump(sample_config, f) - - with patch.dict(os.environ, {}, clear=True): - config = load_config(config_path=config_file) - assert 'api_key' not in config['analyzer'] - -def test_expand_env_vars(): - with patch.dict(os.environ, {'TEST_VAR': 'test_value', 'NESTED_VAR': 'nested_value'}): - config = { - 'simple': '$TEST_VAR', - 'nested': {'key': '${NESTED_VAR}', 'list': ['$TEST_VAR', '${NESTED_VAR}']}, - 'untouched': 123 - } - expanded = expand_env_vars(config) - assert expanded['simple'] == 'test_value' - assert expanded['nested']['key'] == 'nested_value' - assert expanded['nested']['list'] == ['test_value', 'nested_value'] - assert expanded['untouched'] == 123 - -def test_override_with_env(): - config = { - 'max_results': 50, - 'enable_feature': False, - 'api_url': 'https://api.example.com', - 'timeout': 30.5 - } - with patch.dict(os.environ, { - 'PAPERWEIGHT_MAX_RESULTS': '100', - 'PAPERWEIGHT_ENABLE_FEATURE': 'true', - 'PAPERWEIGHT_API_URL': 'https://new-api.example.com', - 'PAPERWEIGHT_TIMEOUT': '60.5' - }): - overridden = override_with_env(config) - assert overridden['max_results'] == 100 - assert overridden['enable_feature'] - assert overridden['api_url'] == 'https://new-api.example.com' - assert overridden['timeout'] == 60.5 - -def test_check_arxiv_section(): - from paperweight.utils import _check_arxiv_section - valid_config = {'categories': ['cs.AI'], 'max_results': 50} - _check_arxiv_section(valid_config) # Should not raise an exception - - with pytest.raises(ValueError, match="'max_results' in 'arxiv' section must be a non-negative integer"): - _check_arxiv_section({'categories': ['cs.AI'], 'max_results': -1}) - -def test_load_config_env_expansion_and_override(config_file, sample_config): - sample_config['arxiv']['max_results'] = '$ENV_MAX_RESULTS' - with open(config_file, 'w') as f: - yaml.dump(sample_config, f) - with patch.dict(os.environ, { - 'ENV_MAX_RESULTS': '50', - 'PAPERWEIGHT_MAX_RESULTS': '100', - 'OPENAI_API_KEY': 'dummy_api_key' - }): - config = load_config(config_path=config_file) - assert config['arxiv']['max_results'] == 100 - assert config['analyzer']['api_key'] == 'dummy_api_key' diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..33326af --- /dev/null +++ b/uv.lock @@ -0,0 +1,1274 @@ +version = 1 +revision = 3 +requires-python = ">=3.11, <3.14" +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version == '3.12.*'", + "python_full_version < '3.12'", +] + +[[package]] +name = "academic-paperweight" +version = "0.2.0" +source = { editable = "." } +dependencies = [ + { name = "arxiv" }, + { name = "pollux-ai" }, + { name = "psycopg", extra = ["binary"] }, + { name = "pypdf" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tenacity" }, + { name = "tiktoken" }, +] + +[package.optional-dependencies] +dev = [ + { name = "html2text" }, + { name = "mypy" }, + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, + { name = "ruff" }, + { name = "types-pyyaml" }, + { name = "types-requests" }, +] + +[package.metadata] +requires-dist = [ + { name = "arxiv", specifier = ">=2.1.0" }, + { name = "html2text", marker = "extra == 'dev'", specifier = ">=2024.2.26" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.11.2" }, + { name = "pollux-ai", specifier = ">=1.0.0a0" }, + { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=3.8.0" }, + { name = "psycopg", extras = ["binary"], specifier = ">=3.2.1" }, + { name = "pypdf", specifier = ">=4.3.1" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=9.0.2" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=5.0.0" }, + { name = "pytest-mock", marker = "extra == 'dev'", specifier = ">=3.10.0" }, + { name = "python-dotenv", specifier = ">=1.0.1" }, + { name = "pyyaml", specifier = ">=6.0.2" }, + { name = "requests", specifier = ">=2.31.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.6.4" }, + { name = "tenacity", specifier = ">=9.0.0" }, + { name = "tiktoken", specifier = ">=0.9.0" }, + { name = "types-pyyaml", marker = "extra == 'dev'", specifier = ">=6.0.12.20240808" }, + { name = "types-requests", marker = "extra == 'dev'", specifier = ">=2.32.0.20240907" }, +] +provides-extras = ["dev"] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "arxiv" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "feedparser" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/94/f376f763d6e4c08a198efdc8d6e08fc6f46f38536bbf08e26111197fef8f/arxiv-2.1.0.tar.gz", hash = "sha256:eb4b1d5ab9dfd66027c344bb324c20be21d56fe15f6ce216ed5b209df747dea8", size = 16791, upload-time = "2023-12-18T06:17:27.198Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/16/532c2aa4bc83b2356820efd4d1f619e45178dc3a0dc0cde16fbccdc43fc1/arxiv-2.1.0-py3-none-any.whl", hash = "sha256:d634a0a59c9f05baf524eaa65563bb0a4532d2b4727a1162a1a9ba7e1e6e48cc", size = 11469, upload-time = "2023-12-18T06:17:25.216Z" }, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, +] + +[[package]] +name = "cfgv" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ad/49/349848445b0e53660e258acbcc9b0d014895b6739237920886672240f84b/coverage-7.13.2.tar.gz", hash = "sha256:044c6951ec37146b72a50cc81ef02217d27d4c3640efd2640311393cbbf143d3", size = 826523, upload-time = "2026-01-25T13:00:04.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/01/abca50583a8975bb6e1c59eff67ed8e48bb127c07dad5c28d9e96ccc09ec/coverage-7.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:060ebf6f2c51aff5ba38e1f43a2095e087389b1c69d559fde6049a4b0001320e", size = 218971, upload-time = "2026-01-25T12:57:36.953Z" }, + { url = "https://files.pythonhosted.org/packages/eb/0e/b6489f344d99cd1e5b4d5e1be52dfd3f8a3dc5112aa6c33948da8cabad4e/coverage-7.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1ea8ca9db5e7469cd364552985e15911548ea5b69c48a17291f0cac70484b2e", size = 219473, upload-time = "2026-01-25T12:57:38.934Z" }, + { url = "https://files.pythonhosted.org/packages/17/11/db2f414915a8e4ec53f60b17956c27f21fb68fcf20f8a455ce7c2ccec638/coverage-7.13.2-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b780090d15fd58f07cf2011943e25a5f0c1c894384b13a216b6c86c8a8a7c508", size = 249896, upload-time = "2026-01-25T12:57:40.365Z" }, + { url = "https://files.pythonhosted.org/packages/80/06/0823fe93913663c017e508e8810c998c8ebd3ec2a5a85d2c3754297bdede/coverage-7.13.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:88a800258d83acb803c38175b4495d293656d5fac48659c953c18e5f539a274b", size = 251810, upload-time = "2026-01-25T12:57:42.045Z" }, + { url = "https://files.pythonhosted.org/packages/61/dc/b151c3cc41b28cdf7f0166c5fa1271cbc305a8ec0124cce4b04f74791a18/coverage-7.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6326e18e9a553e674d948536a04a80d850a5eeefe2aae2e6d7cf05d54046c01b", size = 253920, upload-time = "2026-01-25T12:57:44.026Z" }, + { url = "https://files.pythonhosted.org/packages/2d/35/e83de0556e54a4729a2b94ea816f74ce08732e81945024adee46851c2264/coverage-7.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:59562de3f797979e1ff07c587e2ac36ba60ca59d16c211eceaa579c266c5022f", size = 250025, upload-time = "2026-01-25T12:57:45.624Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/af2eb9c3926ce3ea0d58a0d2516fcbdacf7a9fc9559fe63076beaf3f2596/coverage-7.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27ba1ed6f66b0e2d61bfa78874dffd4f8c3a12f8e2b5410e515ab345ba7bc9c3", size = 251612, upload-time = "2026-01-25T12:57:47.713Z" }, + { url = "https://files.pythonhosted.org/packages/26/62/5be2e25f3d6c711d23b71296f8b44c978d4c8b4e5b26871abfc164297502/coverage-7.13.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8be48da4d47cc68754ce643ea50b3234557cbefe47c2f120495e7bd0a2756f2b", size = 249670, upload-time = "2026-01-25T12:57:49.378Z" }, + { url = "https://files.pythonhosted.org/packages/b3/51/400d1b09a8344199f9b6a6fc1868005d766b7ea95e7882e494fa862ca69c/coverage-7.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2a47a4223d3361b91176aedd9d4e05844ca67d7188456227b6bf5e436630c9a1", size = 249395, upload-time = "2026-01-25T12:57:50.86Z" }, + { url = "https://files.pythonhosted.org/packages/e0/36/f02234bc6e5230e2f0a63fd125d0a2093c73ef20fdf681c7af62a140e4e7/coverage-7.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c6f141b468740197d6bd38f2b26ade124363228cc3f9858bd9924ab059e00059", size = 250298, upload-time = "2026-01-25T12:57:52.287Z" }, + { url = "https://files.pythonhosted.org/packages/b0/06/713110d3dd3151b93611c9cbfc65c15b4156b44f927fced49ac0b20b32a4/coverage-7.13.2-cp311-cp311-win32.whl", hash = "sha256:89567798404af067604246e01a49ef907d112edf2b75ef814b1364d5ce267031", size = 221485, upload-time = "2026-01-25T12:57:53.876Z" }, + { url = "https://files.pythonhosted.org/packages/16/0c/3ae6255fa1ebcb7dec19c9a59e85ef5f34566d1265c70af5b2fc981da834/coverage-7.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:21dd57941804ae2ac7e921771a5e21bbf9aabec317a041d164853ad0a96ce31e", size = 222421, upload-time = "2026-01-25T12:57:55.433Z" }, + { url = "https://files.pythonhosted.org/packages/b5/37/fabc3179af4d61d89ea47bd04333fec735cd5e8b59baad44fed9fc4170d7/coverage-7.13.2-cp311-cp311-win_arm64.whl", hash = "sha256:10758e0586c134a0bafa28f2d37dd2cdb5e4a90de25c0fc0c77dabbad46eca28", size = 221088, upload-time = "2026-01-25T12:57:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/46/39/e92a35f7800222d3f7b2cbb7bbc3b65672ae8d501cb31801b2d2bd7acdf1/coverage-7.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f106b2af193f965d0d3234f3f83fc35278c7fb935dfbde56ae2da3dd2c03b84d", size = 219142, upload-time = "2026-01-25T12:58:00.448Z" }, + { url = "https://files.pythonhosted.org/packages/45/7a/8bf9e9309c4c996e65c52a7c5a112707ecdd9fbaf49e10b5a705a402bbb4/coverage-7.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f45d21dc4d5d6bd29323f0320089ef7eae16e4bef712dff79d184fa7330af3", size = 219503, upload-time = "2026-01-25T12:58:02.451Z" }, + { url = "https://files.pythonhosted.org/packages/87/93/17661e06b7b37580923f3f12406ac91d78aeed293fb6da0b69cc7957582f/coverage-7.13.2-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:fae91dfecd816444c74531a9c3d6ded17a504767e97aa674d44f638107265b99", size = 251006, upload-time = "2026-01-25T12:58:04.059Z" }, + { url = "https://files.pythonhosted.org/packages/12/f0/f9e59fb8c310171497f379e25db060abef9fa605e09d63157eebec102676/coverage-7.13.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:264657171406c114787b441484de620e03d8f7202f113d62fcd3d9688baa3e6f", size = 253750, upload-time = "2026-01-25T12:58:05.574Z" }, + { url = "https://files.pythonhosted.org/packages/e5/b1/1935e31add2232663cf7edd8269548b122a7d100047ff93475dbaaae673e/coverage-7.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae47d8dcd3ded0155afbb59c62bd8ab07ea0fd4902e1c40567439e6db9dcaf2f", size = 254862, upload-time = "2026-01-25T12:58:07.647Z" }, + { url = "https://files.pythonhosted.org/packages/af/59/b5e97071ec13df5f45da2b3391b6cdbec78ba20757bc92580a5b3d5fa53c/coverage-7.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a0b33e9fd838220b007ce8f299114d406c1e8edb21336af4c97a26ecfd185aa", size = 251420, upload-time = "2026-01-25T12:58:09.309Z" }, + { url = "https://files.pythonhosted.org/packages/3f/75/9495932f87469d013dc515fb0ce1aac5fa97766f38f6b1a1deb1ee7b7f3a/coverage-7.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b3becbea7f3ce9a2d4d430f223ec15888e4deb31395840a79e916368d6004cce", size = 252786, upload-time = "2026-01-25T12:58:10.909Z" }, + { url = "https://files.pythonhosted.org/packages/6a/59/af550721f0eb62f46f7b8cb7e6f1860592189267b1c411a4e3a057caacee/coverage-7.13.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f819c727a6e6eeb8711e4ce63d78c620f69630a2e9d53bc95ca5379f57b6ba94", size = 250928, upload-time = "2026-01-25T12:58:12.449Z" }, + { url = "https://files.pythonhosted.org/packages/9b/b1/21b4445709aae500be4ab43bbcfb4e53dc0811c3396dcb11bf9f23fd0226/coverage-7.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:4f7b71757a3ab19f7ba286e04c181004c1d61be921795ee8ba6970fd0ec91da5", size = 250496, upload-time = "2026-01-25T12:58:14.047Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b1/0f5d89dfe0392990e4f3980adbde3eb34885bc1effb2dc369e0bf385e389/coverage-7.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b7fc50d2afd2e6b4f6f2f403b70103d280a8e0cb35320cbbe6debcda02a1030b", size = 252373, upload-time = "2026-01-25T12:58:15.976Z" }, + { url = "https://files.pythonhosted.org/packages/01/c9/0cf1a6a57a9968cc049a6b896693faa523c638a5314b1fc374eb2b2ac904/coverage-7.13.2-cp312-cp312-win32.whl", hash = "sha256:292250282cf9bcf206b543d7608bda17ca6fc151f4cbae949fc7e115112fbd41", size = 221696, upload-time = "2026-01-25T12:58:17.517Z" }, + { url = "https://files.pythonhosted.org/packages/4d/05/d7540bf983f09d32803911afed135524570f8c47bb394bf6206c1dc3a786/coverage-7.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:eeea10169fac01549a7921d27a3e517194ae254b542102267bef7a93ed38c40e", size = 222504, upload-time = "2026-01-25T12:58:19.115Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/1a9f037a736ced0a12aacf6330cdaad5008081142a7070bc58b0f7930cbc/coverage-7.13.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a5b567f0b635b592c917f96b9a9cb3dbd4c320d03f4bf94e9084e494f2e8894", size = 221120, upload-time = "2026-01-25T12:58:21.334Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f0/3d3eac7568ab6096ff23791a526b0048a1ff3f49d0e236b2af6fb6558e88/coverage-7.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed75de7d1217cf3b99365d110975f83af0528c849ef5180a12fd91b5064df9d6", size = 219168, upload-time = "2026-01-25T12:58:23.376Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a6/f8b5cfeddbab95fdef4dcd682d82e5dcff7a112ced57a959f89537ee9995/coverage-7.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97e596de8fa9bada4d88fde64a3f4d37f1b6131e4faa32bad7808abc79887ddc", size = 219537, upload-time = "2026-01-25T12:58:24.932Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e6/8d8e6e0c516c838229d1e41cadcec91745f4b1031d4db17ce0043a0423b4/coverage-7.13.2-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:68c86173562ed4413345410c9480a8d64864ac5e54a5cda236748031e094229f", size = 250528, upload-time = "2026-01-25T12:58:26.567Z" }, + { url = "https://files.pythonhosted.org/packages/8e/78/befa6640f74092b86961f957f26504c8fba3d7da57cc2ab7407391870495/coverage-7.13.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7be4d613638d678b2b3773b8f687537b284d7074695a43fe2fbbfc0e31ceaed1", size = 253132, upload-time = "2026-01-25T12:58:28.251Z" }, + { url = "https://files.pythonhosted.org/packages/9d/10/1630db1edd8ce675124a2ee0f7becc603d2bb7b345c2387b4b95c6907094/coverage-7.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d7f63ce526a96acd0e16c4af8b50b64334239550402fb1607ce6a584a6d62ce9", size = 254374, upload-time = "2026-01-25T12:58:30.294Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1d/0d9381647b1e8e6d310ac4140be9c428a0277330991e0c35bdd751e338a4/coverage-7.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:406821f37f864f968e29ac14c3fccae0fec9fdeba48327f0341decf4daf92d7c", size = 250762, upload-time = "2026-01-25T12:58:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5636dfc9a7c871ee8776af83ee33b4c26bc508ad6cee1e89b6419a366582/coverage-7.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ee68e5a4e3e5443623406b905db447dceddffee0dceb39f4e0cd9ec2a35004b5", size = 252502, upload-time = "2026-01-25T12:58:33.961Z" }, + { url = "https://files.pythonhosted.org/packages/02/2a/7ff2884d79d420cbb2d12fed6fff727b6d0ef27253140d3cdbbd03187ee0/coverage-7.13.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2ee0e58cca0c17dd9c6c1cdde02bb705c7b3fbfa5f3b0b5afeda20d4ebff8ef4", size = 250463, upload-time = "2026-01-25T12:58:35.529Z" }, + { url = "https://files.pythonhosted.org/packages/91/c0/ba51087db645b6c7261570400fc62c89a16278763f36ba618dc8657a187b/coverage-7.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e5bbb5018bf76a56aabdb64246b5288d5ae1b7d0dd4d0534fe86df2c2992d1c", size = 250288, upload-time = "2026-01-25T12:58:37.226Z" }, + { url = "https://files.pythonhosted.org/packages/03/07/44e6f428551c4d9faf63ebcefe49b30e5c89d1be96f6a3abd86a52da9d15/coverage-7.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a55516c68ef3e08e134e818d5e308ffa6b1337cc8b092b69b24287bf07d38e31", size = 252063, upload-time = "2026-01-25T12:58:38.821Z" }, + { url = "https://files.pythonhosted.org/packages/c2/67/35b730ad7e1859dd57e834d1bc06080d22d2f87457d53f692fce3f24a5a9/coverage-7.13.2-cp313-cp313-win32.whl", hash = "sha256:5b20211c47a8abf4abc3319d8ce2464864fa9f30c5fcaf958a3eed92f4f1fef8", size = 221716, upload-time = "2026-01-25T12:58:40.484Z" }, + { url = "https://files.pythonhosted.org/packages/0d/82/e5fcf5a97c72f45fc14829237a6550bf49d0ab882ac90e04b12a69db76b4/coverage-7.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:14f500232e521201cf031549fb1ebdfc0a40f401cf519157f76c397e586c3beb", size = 222522, upload-time = "2026-01-25T12:58:43.247Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/25d7b2f946d239dd2d6644ca2cc060d24f97551e2af13b6c24c722ae5f97/coverage-7.13.2-cp313-cp313-win_arm64.whl", hash = "sha256:9779310cb5a9778a60c899f075a8514c89fa6d10131445c2207fc893e0b14557", size = 221145, upload-time = "2026-01-25T12:58:45Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f7/080376c029c8f76fadfe43911d0daffa0cbdc9f9418a0eead70c56fb7f4b/coverage-7.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5a1e41ce5df6b547cbc3d3699381c9e2c2c369c67837e716ed0f549d48e", size = 219861, upload-time = "2026-01-25T12:58:46.586Z" }, + { url = "https://files.pythonhosted.org/packages/42/11/0b5e315af5ab35f4c4a70e64d3314e4eec25eefc6dec13be3a7d5ffe8ac5/coverage-7.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b01899e82a04085b6561eb233fd688474f57455e8ad35cd82286463ba06332b7", size = 220207, upload-time = "2026-01-25T12:58:48.277Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0c/0874d0318fb1062117acbef06a09cf8b63f3060c22265adaad24b36306b7/coverage-7.13.2-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:838943bea48be0e2768b0cf7819544cdedc1bbb2f28427eabb6eb8c9eb2285d3", size = 261504, upload-time = "2026-01-25T12:58:49.904Z" }, + { url = "https://files.pythonhosted.org/packages/83/5e/1cd72c22ecb30751e43a72f40ba50fcef1b7e93e3ea823bd9feda8e51f9a/coverage-7.13.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:93d1d25ec2b27e90bcfef7012992d1f5121b51161b8bffcda756a816cf13c2c3", size = 263582, upload-time = "2026-01-25T12:58:51.582Z" }, + { url = "https://files.pythonhosted.org/packages/9b/da/8acf356707c7a42df4d0657020308e23e5a07397e81492640c186268497c/coverage-7.13.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93b57142f9621b0d12349c43fc7741fe578e4bc914c1e5a54142856cfc0bf421", size = 266008, upload-time = "2026-01-25T12:58:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/41/41/ea1730af99960309423c6ea8d6a4f1fa5564b2d97bd1d29dda4b42611f04/coverage-7.13.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f06799ae1bdfff7ccb8665d75f8291c69110ba9585253de254688aa8a1ccc6c5", size = 260762, upload-time = "2026-01-25T12:58:55.372Z" }, + { url = "https://files.pythonhosted.org/packages/22/fa/02884d2080ba71db64fdc127b311db60e01fe6ba797d9c8363725e39f4d5/coverage-7.13.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f9405ab4f81d490811b1d91c7a20361135a2df4c170e7f0b747a794da5b7f23", size = 263571, upload-time = "2026-01-25T12:58:57.52Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6b/4083aaaeba9b3112f55ac57c2ce7001dc4d8fa3fcc228a39f09cc84ede27/coverage-7.13.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f9ab1d5b86f8fbc97a5b3cd6280a3fd85fef3b028689d8a2c00918f0d82c728c", size = 261200, upload-time = "2026-01-25T12:58:59.255Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d2/aea92fa36d61955e8c416ede9cf9bf142aa196f3aea214bb67f85235a050/coverage-7.13.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:f674f59712d67e841525b99e5e2b595250e39b529c3bda14764e4f625a3fa01f", size = 260095, upload-time = "2026-01-25T12:59:01.066Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ae/04ffe96a80f107ea21b22b2367175c621da920063260a1c22f9452fd7866/coverage-7.13.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c6cadac7b8ace1ba9144feb1ae3cb787a6065ba6d23ffc59a934b16406c26573", size = 262284, upload-time = "2026-01-25T12:59:02.802Z" }, + { url = "https://files.pythonhosted.org/packages/1c/7a/6f354dcd7dfc41297791d6fb4e0d618acb55810bde2c1fd14b3939e05c2b/coverage-7.13.2-cp313-cp313t-win32.whl", hash = "sha256:14ae4146465f8e6e6253eba0cccd57423e598a4cb925958b240c805300918343", size = 222389, upload-time = "2026-01-25T12:59:04.563Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d5/080ad292a4a3d3daf411574be0a1f56d6dee2c4fdf6b005342be9fac807f/coverage-7.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9074896edd705a05769e3de0eac0a8388484b503b68863dd06d5e473f874fd47", size = 223450, upload-time = "2026-01-25T12:59:06.677Z" }, + { url = "https://files.pythonhosted.org/packages/88/96/df576fbacc522e9fb8d1c4b7a7fc62eb734be56e2cba1d88d2eabe08ea3f/coverage-7.13.2-cp313-cp313t-win_arm64.whl", hash = "sha256:69e526e14f3f854eda573d3cf40cffd29a1a91c684743d904c33dbdcd0e0f3e7", size = 221707, upload-time = "2026-01-25T12:59:08.363Z" }, + { url = "https://files.pythonhosted.org/packages/d2/db/d291e30fdf7ea617a335531e72294e0c723356d7fdde8fba00610a76bda9/coverage-7.13.2-py3-none-any.whl", hash = "sha256:40ce1ea1e25125556d8e76bd0b61500839a07944cc287ac21d5626f3e620cad5", size = 210943, upload-time = "2026-01-25T13:00:02.388Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "feedparser" +version = "6.0.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sgmllib3k" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/9a/824e3c036dec4f0adb4e7c36dcf4cbffc9ee317a4985218cb1663c7ab4ad/feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51", size = 286395, upload-time = "2022-05-21T13:54:12.625Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/1e/741fd94cf2855d251712868f2183cb6485a28daaa3947e1a7046dc036aca/feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f", size = 81103, upload-time = "2022-05-21T13:54:10.647Z" }, +] + +[[package]] +name = "filelock" +version = "3.20.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, +] + +[[package]] +name = "google-auth" +version = "2.48.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0c/41/242044323fbd746615884b1c16639749e73665b718209946ebad7ba8a813/google_auth-2.48.0.tar.gz", hash = "sha256:4f7e706b0cd3208a3d940a19a822c37a476ddba5450156c3e6624a71f7c841ce", size = 326522, upload-time = "2026-01-26T19:22:47.157Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f", size = 236499, upload-time = "2026-01-26T19:22:45.099Z" }, +] + +[package.optional-dependencies] +requests = [ + { name = "requests" }, +] + +[[package]] +name = "google-genai" +version = "1.62.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "google-auth", extra = ["requests"] }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "sniffio" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/4c/71b32b5c8db420cf2fd0d5ef8a672adbde97d85e5d44a0b4fca712264ef1/google_genai-1.62.0.tar.gz", hash = "sha256:709468a14c739a080bc240a4f3191df597bf64485b1ca3728e0fb67517774c18", size = 490888, upload-time = "2026-02-04T22:48:41.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/09/5f/4645d8a28c6e431d0dd6011003a852563f3da7037d36af53154925b099fd/google_genai-1.62.0-py3-none-any.whl", hash = "sha256:4c3daeff3d05fafee4b9a1a31f9c07f01bc22051081aa58b4d61f58d16d1bcc0", size = 724166, upload-time = "2026-02-04T22:48:39.956Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "html2text" +version = "2024.2.26" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/43/e1d53588561e533212117750ee79ad0ba02a41f52a08c1df3396bd466c05/html2text-2024.2.26.tar.gz", hash = "sha256:05f8e367d15aaabc96415376776cdd11afd5127a77fce6e36afc60c563ca2c32", size = 56527, upload-time = "2024-02-27T18:49:24.855Z" } + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "identify" +version = "2.6.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jiter" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/f9/eaca4633486b527ebe7e681c431f529b63fe2709e7c5242fc0f43f77ce63/jiter-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8f8a7e317190b2c2d60eb2e8aa835270b008139562d70fe732e1c0020ec53c9", size = 316435, upload-time = "2025-11-09T20:47:02.087Z" }, + { url = "https://files.pythonhosted.org/packages/10/c1/40c9f7c22f5e6ff715f28113ebaba27ab85f9af2660ad6e1dd6425d14c19/jiter-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2218228a077e784c6c8f1a8e5d6b8cb1dea62ce25811c356364848554b2056cd", size = 320548, upload-time = "2025-11-09T20:47:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/6b/1b/efbb68fe87e7711b00d2cfd1f26bb4bfc25a10539aefeaa7727329ffb9cb/jiter-0.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9354ccaa2982bf2188fd5f57f79f800ef622ec67beb8329903abf6b10da7d423", size = 351915, upload-time = "2025-11-09T20:47:05.171Z" }, + { url = "https://files.pythonhosted.org/packages/15/2d/c06e659888c128ad1e838123d0638f0efad90cc30860cb5f74dd3f2fc0b3/jiter-0.12.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f2607185ea89b4af9a604d4c7ec40e45d3ad03ee66998b031134bc510232bb7", size = 368966, upload-time = "2025-11-09T20:47:06.508Z" }, + { url = "https://files.pythonhosted.org/packages/6b/20/058db4ae5fb07cf6a4ab2e9b9294416f606d8e467fb74c2184b2a1eeacba/jiter-0.12.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a585a5e42d25f2e71db5f10b171f5e5ea641d3aa44f7df745aa965606111cc2", size = 482047, upload-time = "2025-11-09T20:47:08.382Z" }, + { url = "https://files.pythonhosted.org/packages/49/bb/dc2b1c122275e1de2eb12905015d61e8316b2f888bdaac34221c301495d6/jiter-0.12.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd9e21d34edff5a663c631f850edcb786719c960ce887a5661e9c828a53a95d9", size = 380835, upload-time = "2025-11-09T20:47:09.81Z" }, + { url = "https://files.pythonhosted.org/packages/23/7d/38f9cd337575349de16da575ee57ddb2d5a64d425c9367f5ef9e4612e32e/jiter-0.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a612534770470686cd5431478dc5a1b660eceb410abade6b1b74e320ca98de6", size = 364587, upload-time = "2025-11-09T20:47:11.529Z" }, + { url = "https://files.pythonhosted.org/packages/f0/a3/b13e8e61e70f0bb06085099c4e2462647f53cc2ca97614f7fedcaa2bb9f3/jiter-0.12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3985aea37d40a908f887b34d05111e0aae822943796ebf8338877fee2ab67725", size = 390492, upload-time = "2025-11-09T20:47:12.993Z" }, + { url = "https://files.pythonhosted.org/packages/07/71/e0d11422ed027e21422f7bc1883c61deba2d9752b720538430c1deadfbca/jiter-0.12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b1207af186495f48f72529f8d86671903c8c10127cac6381b11dddc4aaa52df6", size = 522046, upload-time = "2025-11-09T20:47:14.6Z" }, + { url = "https://files.pythonhosted.org/packages/9f/59/b968a9aa7102a8375dbbdfbd2aeebe563c7e5dddf0f47c9ef1588a97e224/jiter-0.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef2fb241de583934c9915a33120ecc06d94aa3381a134570f59eed784e87001e", size = 513392, upload-time = "2025-11-09T20:47:16.011Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e4/7df62002499080dbd61b505c5cb351aa09e9959d176cac2aa8da6f93b13b/jiter-0.12.0-cp311-cp311-win32.whl", hash = "sha256:453b6035672fecce8007465896a25b28a6b59cfe8fbc974b2563a92f5a92a67c", size = 206096, upload-time = "2025-11-09T20:47:17.344Z" }, + { url = "https://files.pythonhosted.org/packages/bb/60/1032b30ae0572196b0de0e87dce3b6c26a1eff71aad5fe43dee3082d32e0/jiter-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:ca264b9603973c2ad9435c71a8ec8b49f8f715ab5ba421c85a51cde9887e421f", size = 204899, upload-time = "2025-11-09T20:47:19.365Z" }, + { url = "https://files.pythonhosted.org/packages/49/d5/c145e526fccdb834063fb45c071df78b0cc426bbaf6de38b0781f45d956f/jiter-0.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:cb00ef392e7d684f2754598c02c409f376ddcef857aae796d559e6cacc2d78a5", size = 188070, upload-time = "2025-11-09T20:47:20.75Z" }, + { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, + { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, + { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, + { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, + { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, + { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, + { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, + { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a6/97209693b177716e22576ee1161674d1d58029eb178e01866a0422b69224/jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e", size = 313658, upload-time = "2025-11-09T20:47:44.424Z" }, + { url = "https://files.pythonhosted.org/packages/06/4d/125c5c1537c7d8ee73ad3d530a442d6c619714b95027143f1b61c0b4dfe0/jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1", size = 318605, upload-time = "2025-11-09T20:47:45.973Z" }, + { url = "https://files.pythonhosted.org/packages/99/bf/a840b89847885064c41a5f52de6e312e91fa84a520848ee56c97e4fa0205/jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf", size = 349803, upload-time = "2025-11-09T20:47:47.535Z" }, + { url = "https://files.pythonhosted.org/packages/8a/88/e63441c28e0db50e305ae23e19c1d8fae012d78ed55365da392c1f34b09c/jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44", size = 365120, upload-time = "2025-11-09T20:47:49.284Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7c/49b02714af4343970eb8aca63396bc1c82fa01197dbb1e9b0d274b550d4e/jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45", size = 479918, upload-time = "2025-11-09T20:47:50.807Z" }, + { url = "https://files.pythonhosted.org/packages/69/ba/0a809817fdd5a1db80490b9150645f3aae16afad166960bcd562be194f3b/jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87", size = 379008, upload-time = "2025-11-09T20:47:52.211Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c3/c9fc0232e736c8877d9e6d83d6eeb0ba4e90c6c073835cc2e8f73fdeef51/jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed", size = 361785, upload-time = "2025-11-09T20:47:53.512Z" }, + { url = "https://files.pythonhosted.org/packages/96/61/61f69b7e442e97ca6cd53086ddc1cf59fb830549bc72c0a293713a60c525/jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9", size = 386108, upload-time = "2025-11-09T20:47:54.893Z" }, + { url = "https://files.pythonhosted.org/packages/e9/2e/76bb3332f28550c8f1eba3bf6e5efe211efda0ddbbaf24976bc7078d42a5/jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626", size = 519937, upload-time = "2025-11-09T20:47:56.253Z" }, + { url = "https://files.pythonhosted.org/packages/84/d6/fa96efa87dc8bff2094fb947f51f66368fa56d8d4fc9e77b25d7fbb23375/jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c", size = 510853, upload-time = "2025-11-09T20:47:58.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/28/93f67fdb4d5904a708119a6ab58a8f1ec226ff10a94a282e0215402a8462/jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de", size = 204699, upload-time = "2025-11-09T20:47:59.686Z" }, + { url = "https://files.pythonhosted.org/packages/c4/1f/30b0eb087045a0abe2a5c9c0c0c8da110875a1d3be83afd4a9a4e548be3c/jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a", size = 204258, upload-time = "2025-11-09T20:48:01.01Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f4/2b4daf99b96bce6fc47971890b14b2a36aef88d7beb9f057fafa032c6141/jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60", size = 185503, upload-time = "2025-11-09T20:48:02.35Z" }, + { url = "https://files.pythonhosted.org/packages/39/ca/67bb15a7061d6fe20b9b2a2fd783e296a1e0f93468252c093481a2f00efa/jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6", size = 317965, upload-time = "2025-11-09T20:48:03.783Z" }, + { url = "https://files.pythonhosted.org/packages/18/af/1788031cd22e29c3b14bc6ca80b16a39a0b10e611367ffd480c06a259831/jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4", size = 345831, upload-time = "2025-11-09T20:48:05.55Z" }, + { url = "https://files.pythonhosted.org/packages/05/17/710bf8472d1dff0d3caf4ced6031060091c1320f84ee7d5dcbed1f352417/jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb", size = 361272, upload-time = "2025-11-09T20:48:06.951Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/1dcc4618b59761fef92d10bcbb0b038b5160be653b003651566a185f1a5c/jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7", size = 204604, upload-time = "2025-11-09T20:48:08.328Z" }, + { url = "https://files.pythonhosted.org/packages/d9/32/63cb1d9f1c5c6632a783c0052cde9ef7ba82688f7065e2f0d5f10a7e3edb/jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3", size = 185628, upload-time = "2025-11-09T20:48:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/5339ef1ecaa881c6948669956567a64d2670941925f245c434f494ffb0e5/jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:4739a4657179ebf08f85914ce50332495811004cc1747852e8b2041ed2aab9b8", size = 311144, upload-time = "2025-11-09T20:49:10.503Z" }, + { url = "https://files.pythonhosted.org/packages/27/74/3446c652bffbd5e81ab354e388b1b5fc1d20daac34ee0ed11ff096b1b01a/jiter-0.12.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:41da8def934bf7bec16cb24bd33c0ca62126d2d45d81d17b864bd5ad721393c3", size = 305877, upload-time = "2025-11-09T20:49:12.269Z" }, + { url = "https://files.pythonhosted.org/packages/a1/f4/ed76ef9043450f57aac2d4fbeb27175aa0eb9c38f833be6ef6379b3b9a86/jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c44ee814f499c082e69872d426b624987dbc5943ab06e9bbaa4f81989fdb79e", size = 340419, upload-time = "2025-11-09T20:49:13.803Z" }, + { url = "https://files.pythonhosted.org/packages/21/01/857d4608f5edb0664aa791a3d45702e1a5bcfff9934da74035e7b9803846/jiter-0.12.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd2097de91cf03eaa27b3cbdb969addf83f0179c6afc41bbc4513705e013c65d", size = 347212, upload-time = "2025-11-09T20:49:15.643Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, + { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, + { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, +] + +[[package]] +name = "mypy" +version = "1.11.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/86/5d7cbc4974fd564550b80fbb8103c05501ea11aa7835edf3351d90095896/mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79", size = 3078806, upload-time = "2024-08-24T22:50:11.357Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/aa/cc56fb53ebe14c64f1fe91d32d838d6f4db948b9494e200d2f61b820b85d/mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385", size = 10859630, upload-time = "2024-08-24T22:49:51.895Z" }, + { url = "https://files.pythonhosted.org/packages/04/c8/b19a760fab491c22c51975cf74e3d253b8c8ce2be7afaa2490fbf95a8c59/mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca", size = 10037973, upload-time = "2024-08-24T22:49:21.428Z" }, + { url = "https://files.pythonhosted.org/packages/88/57/7e7e39f2619c8f74a22efb9a4c4eff32b09d3798335625a124436d121d89/mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104", size = 12416659, upload-time = "2024-08-24T22:49:35.02Z" }, + { url = "https://files.pythonhosted.org/packages/fc/a6/37f7544666b63a27e46c48f49caeee388bf3ce95f9c570eb5cfba5234405/mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4", size = 12897010, upload-time = "2024-08-24T22:49:29.725Z" }, + { url = "https://files.pythonhosted.org/packages/84/8b/459a513badc4d34acb31c736a0101c22d2bd0697b969796ad93294165cfb/mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6", size = 9562873, upload-time = "2024-08-24T22:49:40.448Z" }, + { url = "https://files.pythonhosted.org/packages/35/3a/ed7b12ecc3f6db2f664ccf85cb2e004d3e90bec928e9d7be6aa2f16b7cdf/mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318", size = 10990335, upload-time = "2024-08-24T22:49:54.245Z" }, + { url = "https://files.pythonhosted.org/packages/04/e4/1a9051e2ef10296d206519f1df13d2cc896aea39e8683302f89bf5792a59/mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36", size = 10007119, upload-time = "2024-08-24T22:49:03.451Z" }, + { url = "https://files.pythonhosted.org/packages/f3/3c/350a9da895f8a7e87ade0028b962be0252d152e0c2fbaafa6f0658b4d0d4/mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987", size = 12506856, upload-time = "2024-08-24T22:50:08.804Z" }, + { url = "https://files.pythonhosted.org/packages/b6/49/ee5adf6a49ff13f4202d949544d3d08abb0ea1f3e7f2a6d5b4c10ba0360a/mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca", size = 12952066, upload-time = "2024-08-24T22:50:03.89Z" }, + { url = "https://files.pythonhosted.org/packages/27/c0/b19d709a42b24004d720db37446a42abadf844d5c46a2c442e2a074d70d9/mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70", size = 9664000, upload-time = "2024-08-24T22:49:59.703Z" }, + { url = "https://files.pythonhosted.org/packages/42/3a/bdf730640ac523229dd6578e8a581795720a9321399de494374afc437ec5/mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12", size = 2619625, upload-time = "2024-08-24T22:50:01.842Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, +] + +[[package]] +name = "openai" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/a2/677f22c4b487effb8a09439fb6134034b5f0a39ca27df8b95fac23a93720/openai-2.17.0.tar.gz", hash = "sha256:47224b74bd20f30c6b0a6a329505243cb2f26d5cf84d9f8d0825ff8b35e9c999", size = 631445, upload-time = "2026-02-05T16:27:40.953Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/97/284535aa75e6e84ab388248b5a323fc296b1f70530130dee37f7f4fbe856/openai-2.17.0-py3-none-any.whl", hash = "sha256:4f393fd886ca35e113aac7ff239bcd578b81d8f104f5aedc7d3693eb2af1d338", size = 1069524, upload-time = "2026-02-05T16:27:38.941Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pollux-ai" +version = "1.0.0rc1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-genai" }, + { name = "httpx" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/ec/ea9c607a072bc92a7d66f81b212b45f79e4b00eacafea856e60321680da8/pollux_ai-1.0.0rc1.tar.gz", hash = "sha256:350d6af66f77c9a20f1ada28f95880413d1b5983110a987a7de499b15a24e91c", size = 247487, upload-time = "2026-02-14T13:50:15.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/97/ae947b937d0e60ab0f00af872fc7d765a50e6eabaf28a9a6bb266a1d8522/pollux_ai-1.0.0rc1-py3-none-any.whl", hash = "sha256:9494251a8d3c65408b23c9a3b5424f13ac913fc140f4b4e287a0765c5673c5f7", size = 33486, upload-time = "2026-02-14T13:50:13.804Z" }, +] + +[[package]] +name = "pre-commit" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/10/97ee2fa54dff1e9da9badbc5e35d0bbaef0776271ea5907eccf64140f72f/pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af", size = 177815, upload-time = "2024-07-28T19:59:01.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/92/caae8c86e94681b42c246f0bca35c059a2f0529e5b92619f6aba4cf7e7b6/pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f", size = 204643, upload-time = "2024-07-28T19:58:59.335Z" }, +] + +[[package]] +name = "psycopg" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/8e/f176997fd790d3dce9fa0ca695391beaeee39af7ecd6d426c4c063cf6744/psycopg-3.2.1.tar.gz", hash = "sha256:dc8da6dc8729dacacda3cc2f17d2c9397a70a66cf0d2b69c91065d60d5f00cb7", size = 155313, upload-time = "2024-07-01T03:35:50.314Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/0f755db36f47f96464463385552f8f132a981731356837c9a30a11ab2d35/psycopg-3.2.1-py3-none-any.whl", hash = "sha256:ece385fb413a37db332f97c49208b36cf030ff02b199d7635ed2fbd378724175", size = 197743, upload-time = "2024-07-01T03:30:14.942Z" }, +] + +[package.optional-dependencies] +binary = [ + { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, +] + +[[package]] +name = "psycopg-binary" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/68/f49dd22dc9f9869597d90fff73dcc8c9754304cdfeefa5f463abb4a1fcce/psycopg_binary-3.2.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:62b1b7b07e00ee490afb39c0a47d8282a9c2822c7cfed9553a04b0058adf7e7f", size = 3388952, upload-time = "2024-07-01T03:31:32.649Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3c/90210e090be228e9876bc210576cfd75e240505f16c92fa8b11839acbf35/psycopg_binary-3.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f8afb07114ea9b924a4a0305ceb15354ccf0ef3c0e14d54b8dbeb03e50182dd7", size = 3506474, upload-time = "2024-07-01T03:31:38.376Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2a/d45ff1f4b8d5b334695f3f5a68c722dbf483b65348f2e2639cf2f45c7b73/psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bb515d042f6a345714ec0403df68ccf13f73b05e567837d80c886c7c9d3805", size = 4464849, upload-time = "2024-07-01T03:31:47.787Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ce/60562887f1363747ce2e074841548f96b433dd50e78d822c88e7ad6ec817/psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6418712ba63cebb0c88c050b3997185b0ef54173b36568522d5634ac06153040", size = 4263085, upload-time = "2024-07-01T03:31:55.249Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4f/af3cb85b967d2616c9c4e2bea9e865c8d0c38fc83ce5db1ef050ceba2bea/psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:101472468d59c74bb8565fab603e032803fd533d16be4b2d13da1bab8deb32a3", size = 4514411, upload-time = "2024-07-01T03:31:59.738Z" }, + { url = "https://files.pythonhosted.org/packages/1d/00/685055d15f70e57d24cffe59021d53d428cdd7126b87442b5b07c9ffd222/psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa3931f308ab4a479d0ee22dc04bea867a6365cac0172e5ddcba359da043854b", size = 4207636, upload-time = "2024-07-01T03:32:04.407Z" }, + { url = "https://files.pythonhosted.org/packages/72/9f/d6f6c8f60c4ebcc270efda17ab22110b24934f610dc7d5d3e2dc1e9eecbc/psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dc314a47d44fe1a8069b075a64abffad347a3a1d8652fed1bab5d3baea37acb2", size = 3132484, upload-time = "2024-07-01T03:32:09.851Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/742cca374ab3725606f79a9b3b2429bba73917e1d14d52ba39d83dec0a3c/psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cc304a46be1e291031148d9d95c12451ffe783ff0cc72f18e2cc7ec43cdb8c68", size = 3111128, upload-time = "2024-07-01T03:32:16.741Z" }, + { url = "https://files.pythonhosted.org/packages/61/a9/046536ef56a785e12c72c2a2507058473889bd7d625fbce142f1a1662bc2/psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f9e13600647087df5928875559f0eb8f496f53e6278b7da9511b4b3d0aff960", size = 3213088, upload-time = "2024-07-01T03:32:22.139Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/a988739a5d8e72c553a44abba71217c601400e5164a874916e2aa4285139/psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b140182830c76c74d17eba27df3755a46442ce8d4fb299e7f1cf2f74a87c877b", size = 3252404, upload-time = "2024-07-01T03:32:29.682Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/bfefaa5417e05f77c12f1cd099da7a00666fb2c8aef5996014f255a29857/psycopg_binary-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:3c838806eeb99af39f934b7999e35f947a8e577997cc892c12b5053a97a9057f", size = 2925802, upload-time = "2024-07-01T03:32:34.955Z" }, + { url = "https://files.pythonhosted.org/packages/50/5d/51d39aafab4384a744d5e927b7867f3dadd8537249e8173e34aaf894db94/psycopg_binary-3.2.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7066d3dca196ed0dc6172f9777b2d62e4f138705886be656cccff2d555234d60", size = 3359766, upload-time = "2024-07-01T03:32:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/e4/7b/75be686af04e2019b53a9ff22de3aa750db7d34f532e4b949ed15a78b627/psycopg_binary-3.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:28ada5f610468c57d8a4a055a8ea915d0085a43d794266c4f3b9d02f4288f4db", size = 3503325, upload-time = "2024-07-01T03:32:45.464Z" }, + { url = "https://files.pythonhosted.org/packages/3f/9a/28da916a65fb40fb3e1a97e1ae0a26860d8c1265c6e9766bd6c47abc437b/psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e8213bf50af073b1aa8dc3cff123bfeedac86332a16c1b7274910bc88a847c7", size = 4443593, upload-time = "2024-07-01T03:32:54.341Z" }, + { url = "https://files.pythonhosted.org/packages/b0/9a/3dc1237a2ef3344b347af79e1aad2a60277cfafa2846f54cb13e1cd8c528/psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74d623261655a169bc84a9669890975c229f2fa6e19a7f2d10a77675dcf1a707", size = 4247005, upload-time = "2024-07-01T03:33:01.162Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a9/06491cb0338b6f0868d349d2a526586dc165e508b64daa2ff45f9db7ba4b/psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42781ba94e8842ee98bca5a7d0c44cc9d067500fedca2d6a90fa3609b6d16b42", size = 4484179, upload-time = "2024-07-01T03:33:09.385Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5f/b1116467dd18b4efc1aa7f03c96da751724a43c6a630979c61f60a9fbe5f/psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e6669091d09f8ba36e10ce678a6d9916e110446236a9b92346464a3565635e", size = 4186490, upload-time = "2024-07-01T03:33:15.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/87/6092d1701d36c5aeb74c35cb54266fd44ee0f7711cafa4c0bffd873bdb61/psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b09e8a576a2ac69d695032ee76f31e03b30781828b5dd6d18c6a009e5a3d1c35", size = 3109385, upload-time = "2024-07-01T03:33:20.174Z" }, + { url = "https://files.pythonhosted.org/packages/62/61/4ad7e29d09202478b6f568fff19efa978a4f2c25cb5efcd73544a4ee8be7/psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8f28ff0cb9f1defdc4a6f8c958bf6787274247e7dfeca811f6e2f56602695fb1", size = 3094397, upload-time = "2024-07-01T03:33:25.285Z" }, + { url = "https://files.pythonhosted.org/packages/b7/dd/0ae42c64bf524d1fcf9bf861ab09d331e693ae00e527ba08131b2d3729a3/psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4c84fcac8a3a3479ac14673095cc4e1fdba2935499f72c436785ac679bec0d1a", size = 3184097, upload-time = "2024-07-01T03:33:31.268Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f0/09329ebb0cd03e2ee5786fc9914ac904f4965b78627f15826f8258fde734/psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:950fd666ec9e9fe6a8eeb2b5a8f17301790e518953730ad44d715b59ffdbc67f", size = 3228517, upload-time = "2024-07-01T03:33:37.824Z" }, + { url = "https://files.pythonhosted.org/packages/60/2f/979228189adbeb59afce626f1e7c3bf73cc7ff94217099a2ddfd6fd132ff/psycopg_binary-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:334046a937bb086c36e2c6889fe327f9f29bfc085d678f70fac0b0618949f674", size = 2911959, upload-time = "2024-07-01T03:33:43.357Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586, upload-time = "2026-01-16T18:04:18.534Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371, upload-time = "2026-01-16T18:04:17.174Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pypdf" +version = "4.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/65/2ed7c9e1d31d860f096061b3dd2d665f501e09faaa0409a3f0d719d2a16d/pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b", size = 293266, upload-time = "2024-07-21T19:35:20.207Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/60/eccdd92dd4af3e4bea6d6a342f7588c618a15b9bec4b968af581e498bcc4/pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418", size = 295825, upload-time = "2024-07-21T19:35:18.126Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042, upload-time = "2024-03-24T20:16:34.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990, upload-time = "2024-03-24T20:16:32.444Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/2b/137a7db414aeaf3d753d415a2bc3b90aba8c5f61dff7a7a736d84b2ec60d/pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f", size = 28384, upload-time = "2022-10-05T18:52:51.57Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/84/c951790e199cd54ddbf1021965b62a5415b81193ebdb4f4af2659fd06a73/pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b", size = 9275, upload-time = "2022-10-05T18:52:50.191Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload-time = "2024-01-23T06:33:00.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "regex" +version = "2026.1.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/c9/0c80c96eab96948363d270143138d671d5731c3a692b417629bf3492a9d6/regex-2026.1.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ae6020fb311f68d753b7efa9d4b9a5d47a5d6466ea0d5e3b5a471a960ea6e4a", size = 488168, upload-time = "2026-01-14T23:14:16.129Z" }, + { url = "https://files.pythonhosted.org/packages/17/f0/271c92f5389a552494c429e5cc38d76d1322eb142fb5db3c8ccc47751468/regex-2026.1.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eddf73f41225942c1f994914742afa53dc0d01a6e20fe14b878a1b1edc74151f", size = 290636, upload-time = "2026-01-14T23:14:17.715Z" }, + { url = "https://files.pythonhosted.org/packages/a0/f9/5f1fd077d106ca5655a0f9ff8f25a1ab55b92128b5713a91ed7134ff688e/regex-2026.1.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e8cd52557603f5c66a548f69421310886b28b7066853089e1a71ee710e1cdc1", size = 288496, upload-time = "2026-01-14T23:14:19.326Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e1/8f43b03a4968c748858ec77f746c286d81f896c2e437ccf050ebc5d3128c/regex-2026.1.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5170907244b14303edc5978f522f16c974f32d3aa92109fabc2af52411c9433b", size = 793503, upload-time = "2026-01-14T23:14:20.922Z" }, + { url = "https://files.pythonhosted.org/packages/8d/4e/a39a5e8edc5377a46a7c875c2f9a626ed3338cb3bb06931be461c3e1a34a/regex-2026.1.15-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2748c1ec0663580b4510bd89941a31560b4b439a0b428b49472a3d9944d11cd8", size = 860535, upload-time = "2026-01-14T23:14:22.405Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1c/9dce667a32a9477f7a2869c1c767dc00727284a9fa3ff5c09a5c6c03575e/regex-2026.1.15-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2f2775843ca49360508d080eaa87f94fa248e2c946bbcd963bb3aae14f333413", size = 907225, upload-time = "2026-01-14T23:14:23.897Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3c/87ca0a02736d16b6262921425e84b48984e77d8e4e572c9072ce96e66c30/regex-2026.1.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9ea2604370efc9a174c1b5dcc81784fb040044232150f7f33756049edfc9026", size = 800526, upload-time = "2026-01-14T23:14:26.039Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/647d5715aeea7c87bdcbd2f578f47b415f55c24e361e639fe8c0cc88878f/regex-2026.1.15-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0dcd31594264029b57bf16f37fd7248a70b3b764ed9e0839a8f271b2d22c0785", size = 773446, upload-time = "2026-01-14T23:14:28.109Z" }, + { url = "https://files.pythonhosted.org/packages/af/89/bf22cac25cb4ba0fe6bff52ebedbb65b77a179052a9d6037136ae93f42f4/regex-2026.1.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c08c1f3e34338256732bd6938747daa3c0d5b251e04b6e43b5813e94d503076e", size = 783051, upload-time = "2026-01-14T23:14:29.929Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f4/6ed03e71dca6348a5188363a34f5e26ffd5db1404780288ff0d79513bce4/regex-2026.1.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e43a55f378df1e7a4fa3547c88d9a5a9b7113f653a66821bcea4718fe6c58763", size = 854485, upload-time = "2026-01-14T23:14:31.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/8e8560bd78caded8eb137e3e47612430a05b9a772caf60876435192d670a/regex-2026.1.15-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:f82110ab962a541737bd0ce87978d4c658f06e7591ba899192e2712a517badbb", size = 762195, upload-time = "2026-01-14T23:14:32.802Z" }, + { url = "https://files.pythonhosted.org/packages/38/6b/61fc710f9aa8dfcd764fe27d37edfaa023b1a23305a0d84fccd5adb346ea/regex-2026.1.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:27618391db7bdaf87ac6c92b31e8f0dfb83a9de0075855152b720140bda177a2", size = 845986, upload-time = "2026-01-14T23:14:34.898Z" }, + { url = "https://files.pythonhosted.org/packages/fd/2e/fbee4cb93f9d686901a7ca8d94285b80405e8c34fe4107f63ffcbfb56379/regex-2026.1.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bfb0d6be01fbae8d6655c8ca21b3b72458606c4aec9bbc932db758d47aba6db1", size = 788992, upload-time = "2026-01-14T23:14:37.116Z" }, + { url = "https://files.pythonhosted.org/packages/ed/14/3076348f3f586de64b1ab75a3fbabdaab7684af7f308ad43be7ef1849e55/regex-2026.1.15-cp311-cp311-win32.whl", hash = "sha256:b10e42a6de0e32559a92f2f8dc908478cc0fa02838d7dbe764c44dca3fa13569", size = 265893, upload-time = "2026-01-14T23:14:38.426Z" }, + { url = "https://files.pythonhosted.org/packages/0f/19/772cf8b5fc803f5c89ba85d8b1870a1ca580dc482aa030383a9289c82e44/regex-2026.1.15-cp311-cp311-win_amd64.whl", hash = "sha256:e9bf3f0bbdb56633c07d7116ae60a576f846efdd86a8848f8d62b749e1209ca7", size = 277840, upload-time = "2026-01-14T23:14:39.785Z" }, + { url = "https://files.pythonhosted.org/packages/78/84/d05f61142709474da3c0853222d91086d3e1372bcdab516c6fd8d80f3297/regex-2026.1.15-cp311-cp311-win_arm64.whl", hash = "sha256:41aef6f953283291c4e4e6850607bd71502be67779586a61472beacb315c97ec", size = 270374, upload-time = "2026-01-14T23:14:41.592Z" }, + { url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" }, + { url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" }, + { url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e4/1fc4599450c9f0863d9406e944592d968b8d6dfd0d552a7d569e43bceada/regex-2026.1.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa", size = 798656, upload-time = "2026-01-14T23:14:48.77Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e6/59650d73a73fa8a60b3a590545bfcf1172b4384a7df2e7fe7b9aab4e2da9/regex-2026.1.15-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804", size = 864252, upload-time = "2026-01-14T23:14:50.528Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ab/1d0f4d50a1638849a97d731364c9a80fa304fec46325e48330c170ee8e80/regex-2026.1.15-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c", size = 912268, upload-time = "2026-01-14T23:14:52.952Z" }, + { url = "https://files.pythonhosted.org/packages/dd/df/0d722c030c82faa1d331d1921ee268a4e8fb55ca8b9042c9341c352f17fa/regex-2026.1.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5", size = 803589, upload-time = "2026-01-14T23:14:55.182Z" }, + { url = "https://files.pythonhosted.org/packages/66/23/33289beba7ccb8b805c6610a8913d0131f834928afc555b241caabd422a9/regex-2026.1.15-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3", size = 775700, upload-time = "2026-01-14T23:14:56.707Z" }, + { url = "https://files.pythonhosted.org/packages/e7/65/bf3a42fa6897a0d3afa81acb25c42f4b71c274f698ceabd75523259f6688/regex-2026.1.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb", size = 787928, upload-time = "2026-01-14T23:14:58.312Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f5/13bf65864fc314f68cdd6d8ca94adcab064d4d39dbd0b10fef29a9da48fc/regex-2026.1.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410", size = 858607, upload-time = "2026-01-14T23:15:00.657Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/040e589834d7a439ee43fb0e1e902bc81bd58a5ba81acffe586bb3321d35/regex-2026.1.15-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4", size = 763729, upload-time = "2026-01-14T23:15:02.248Z" }, + { url = "https://files.pythonhosted.org/packages/9b/84/6921e8129687a427edf25a34a5594b588b6d88f491320b9de5b6339a4fcb/regex-2026.1.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d", size = 850697, upload-time = "2026-01-14T23:15:03.878Z" }, + { url = "https://files.pythonhosted.org/packages/8a/87/3d06143d4b128f4229158f2de5de6c8f2485170c7221e61bf381313314b2/regex-2026.1.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22", size = 789849, upload-time = "2026-01-14T23:15:06.102Z" }, + { url = "https://files.pythonhosted.org/packages/77/69/c50a63842b6bd48850ebc7ab22d46e7a2a32d824ad6c605b218441814639/regex-2026.1.15-cp312-cp312-win32.whl", hash = "sha256:82345326b1d8d56afbe41d881fdf62f1926d7264b2fc1537f99ae5da9aad7913", size = 266279, upload-time = "2026-01-14T23:15:07.678Z" }, + { url = "https://files.pythonhosted.org/packages/f2/36/39d0b29d087e2b11fd8191e15e81cce1b635fcc845297c67f11d0d19274d/regex-2026.1.15-cp312-cp312-win_amd64.whl", hash = "sha256:4def140aa6156bc64ee9912383d4038f3fdd18fee03a6f222abd4de6357ce42a", size = 277166, upload-time = "2026-01-14T23:15:09.257Z" }, + { url = "https://files.pythonhosted.org/packages/28/32/5b8e476a12262748851fa8ab1b0be540360692325975b094e594dfebbb52/regex-2026.1.15-cp312-cp312-win_arm64.whl", hash = "sha256:c6c565d9a6e1a8d783c1948937ffc377dd5771e83bd56de8317c450a954d2056", size = 270415, upload-time = "2026-01-14T23:15:10.743Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2e/6870bb16e982669b674cce3ee9ff2d1d46ab80528ee6bcc20fb2292efb60/regex-2026.1.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e69d0deeb977ffe7ed3d2e4439360089f9c3f217ada608f0f88ebd67afb6385e", size = 489164, upload-time = "2026-01-14T23:15:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/9774542e203849b0286badf67199970a44ebdb0cc5fb739f06e47ada72f8/regex-2026.1.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3601ffb5375de85a16f407854d11cca8fe3f5febbe3ac78fb2866bb220c74d10", size = 291218, upload-time = "2026-01-14T23:15:15.647Z" }, + { url = "https://files.pythonhosted.org/packages/b2/87/b0cda79f22b8dee05f774922a214da109f9a4c0eca5da2c9d72d77ea062c/regex-2026.1.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4c5ef43b5c2d4114eb8ea424bb8c9cec01d5d17f242af88b2448f5ee81caadbc", size = 288895, upload-time = "2026-01-14T23:15:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/3b/6a/0041f0a2170d32be01ab981d6346c83a8934277d82c780d60b127331f264/regex-2026.1.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:968c14d4f03e10b2fd960f1d5168c1f0ac969381d3c1fcc973bc45fb06346599", size = 798680, upload-time = "2026-01-14T23:15:19.342Z" }, + { url = "https://files.pythonhosted.org/packages/58/de/30e1cfcdbe3e891324aa7568b7c968771f82190df5524fabc1138cb2d45a/regex-2026.1.15-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56a5595d0f892f214609c9f76b41b7428bed439d98dc961efafdd1354d42baae", size = 864210, upload-time = "2026-01-14T23:15:22.005Z" }, + { url = "https://files.pythonhosted.org/packages/64/44/4db2f5c5ca0ccd40ff052ae7b1e9731352fcdad946c2b812285a7505ca75/regex-2026.1.15-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf650f26087363434c4e560011f8e4e738f6f3e029b85d4904c50135b86cfa5", size = 912358, upload-time = "2026-01-14T23:15:24.569Z" }, + { url = "https://files.pythonhosted.org/packages/79/b6/e6a5665d43a7c42467138c8a2549be432bad22cbd206f5ec87162de74bd7/regex-2026.1.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18388a62989c72ac24de75f1449d0fb0b04dfccd0a1a7c1c43af5eb503d890f6", size = 803583, upload-time = "2026-01-14T23:15:26.526Z" }, + { url = "https://files.pythonhosted.org/packages/e7/53/7cd478222169d85d74d7437e74750005e993f52f335f7c04ff7adfda3310/regex-2026.1.15-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d220a2517f5893f55daac983bfa9fe998a7dbcaee4f5d27a88500f8b7873788", size = 775782, upload-time = "2026-01-14T23:15:29.352Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b5/75f9a9ee4b03a7c009fe60500fe550b45df94f0955ca29af16333ef557c5/regex-2026.1.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9c08c2fbc6120e70abff5d7f28ffb4d969e14294fb2143b4b5c7d20e46d1714", size = 787978, upload-time = "2026-01-14T23:15:31.295Z" }, + { url = "https://files.pythonhosted.org/packages/72/b3/79821c826245bbe9ccbb54f6eadb7879c722fd3e0248c17bfc90bf54e123/regex-2026.1.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7ef7d5d4bd49ec7364315167a4134a015f61e8266c6d446fc116a9ac4456e10d", size = 858550, upload-time = "2026-01-14T23:15:33.558Z" }, + { url = "https://files.pythonhosted.org/packages/4a/85/2ab5f77a1c465745bfbfcb3ad63178a58337ae8d5274315e2cc623a822fa/regex-2026.1.15-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e42844ad64194fa08d5ccb75fe6a459b9b08e6d7296bd704460168d58a388f3", size = 763747, upload-time = "2026-01-14T23:15:35.206Z" }, + { url = "https://files.pythonhosted.org/packages/6d/84/c27df502d4bfe2873a3e3a7cf1bdb2b9cc10284d1a44797cf38bed790470/regex-2026.1.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cfecdaa4b19f9ca534746eb3b55a5195d5c95b88cac32a205e981ec0a22b7d31", size = 850615, upload-time = "2026-01-14T23:15:37.523Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b7/658a9782fb253680aa8ecb5ccbb51f69e088ed48142c46d9f0c99b46c575/regex-2026.1.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:08df9722d9b87834a3d701f3fca570b2be115654dbfd30179f30ab2f39d606d3", size = 789951, upload-time = "2026-01-14T23:15:39.582Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2a/5928af114441e059f15b2f63e188bd00c6529b3051c974ade7444b85fcda/regex-2026.1.15-cp313-cp313-win32.whl", hash = "sha256:d426616dae0967ca225ab12c22274eb816558f2f99ccb4a1d52ca92e8baf180f", size = 266275, upload-time = "2026-01-14T23:15:42.108Z" }, + { url = "https://files.pythonhosted.org/packages/4f/16/5bfbb89e435897bff28cf0352a992ca719d9e55ebf8b629203c96b6ce4f7/regex-2026.1.15-cp313-cp313-win_amd64.whl", hash = "sha256:febd38857b09867d3ed3f4f1af7d241c5c50362e25ef43034995b77a50df494e", size = 277145, upload-time = "2026-01-14T23:15:44.244Z" }, + { url = "https://files.pythonhosted.org/packages/56/c1/a09ff7392ef4233296e821aec5f78c51be5e91ffde0d163059e50fd75835/regex-2026.1.15-cp313-cp313-win_arm64.whl", hash = "sha256:8e32f7896f83774f91499d239e24cebfadbc07639c1494bb7213983842348337", size = 270411, upload-time = "2026-01-14T23:15:45.858Z" }, + { url = "https://files.pythonhosted.org/packages/3c/38/0cfd5a78e5c6db00e6782fdae70458f89850ce95baa5e8694ab91d89744f/regex-2026.1.15-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ec94c04149b6a7b8120f9f44565722c7ae31b7a6d2275569d2eefa76b83da3be", size = 492068, upload-time = "2026-01-14T23:15:47.616Z" }, + { url = "https://files.pythonhosted.org/packages/50/72/6c86acff16cb7c959c4355826bbf06aad670682d07c8f3998d9ef4fee7cd/regex-2026.1.15-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40c86d8046915bb9aeb15d3f3f15b6fd500b8ea4485b30e1bbc799dab3fe29f8", size = 292756, upload-time = "2026-01-14T23:15:49.307Z" }, + { url = "https://files.pythonhosted.org/packages/4e/58/df7fb69eadfe76526ddfce28abdc0af09ffe65f20c2c90932e89d705153f/regex-2026.1.15-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:726ea4e727aba21643205edad8f2187ec682d3305d790f73b7a51c7587b64bdd", size = 291114, upload-time = "2026-01-14T23:15:51.484Z" }, + { url = "https://files.pythonhosted.org/packages/ed/6c/a4011cd1cf96b90d2cdc7e156f91efbd26531e822a7fbb82a43c1016678e/regex-2026.1.15-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb740d044aff31898804e7bf1181cc72c03d11dfd19932b9911ffc19a79070a", size = 807524, upload-time = "2026-01-14T23:15:53.102Z" }, + { url = "https://files.pythonhosted.org/packages/1d/25/a53ffb73183f69c3e9f4355c4922b76d2840aee160af6af5fac229b6201d/regex-2026.1.15-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05d75a668e9ea16f832390d22131fe1e8acc8389a694c8febc3e340b0f810b93", size = 873455, upload-time = "2026-01-14T23:15:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/66/0b/8b47fc2e8f97d9b4a851736f3890a5f786443aa8901061c55f24c955f45b/regex-2026.1.15-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d991483606f3dbec93287b9f35596f41aa2e92b7c2ebbb935b63f409e243c9af", size = 915007, upload-time = "2026-01-14T23:15:57.041Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fa/97de0d681e6d26fabe71968dbee06dd52819e9a22fdce5dac7256c31ed84/regex-2026.1.15-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:194312a14819d3e44628a44ed6fea6898fdbecb0550089d84c403475138d0a09", size = 812794, upload-time = "2026-01-14T23:15:58.916Z" }, + { url = "https://files.pythonhosted.org/packages/22/38/e752f94e860d429654aa2b1c51880bff8dfe8f084268258adf9151cf1f53/regex-2026.1.15-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe2fda4110a3d0bc163c2e0664be44657431440722c5c5315c65155cab92f9e5", size = 781159, upload-time = "2026-01-14T23:16:00.817Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a7/d739ffaef33c378fc888302a018d7f81080393d96c476b058b8c64fd2b0d/regex-2026.1.15-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:124dc36c85d34ef2d9164da41a53c1c8c122cfb1f6e1ec377a1f27ee81deb794", size = 795558, upload-time = "2026-01-14T23:16:03.267Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c4/542876f9a0ac576100fc73e9c75b779f5c31e3527576cfc9cb3009dcc58a/regex-2026.1.15-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1774cd1981cd212506a23a14dba7fdeaee259f5deba2df6229966d9911e767a", size = 868427, upload-time = "2026-01-14T23:16:05.646Z" }, + { url = "https://files.pythonhosted.org/packages/fc/0f/d5655bea5b22069e32ae85a947aa564912f23758e112cdb74212848a1a1b/regex-2026.1.15-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b5f7d8d2867152cdb625e72a530d2ccb48a3d199159144cbdd63870882fb6f80", size = 769939, upload-time = "2026-01-14T23:16:07.542Z" }, + { url = "https://files.pythonhosted.org/packages/20/06/7e18a4fa9d326daeda46d471a44ef94201c46eaa26dbbb780b5d92cbfdda/regex-2026.1.15-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:492534a0ab925d1db998defc3c302dae3616a2fc3fe2e08db1472348f096ddf2", size = 854753, upload-time = "2026-01-14T23:16:10.395Z" }, + { url = "https://files.pythonhosted.org/packages/3b/67/dc8946ef3965e166f558ef3b47f492bc364e96a265eb4a2bb3ca765c8e46/regex-2026.1.15-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c661fc820cfb33e166bf2450d3dadbda47c8d8981898adb9b6fe24e5e582ba60", size = 799559, upload-time = "2026-01-14T23:16:12.347Z" }, + { url = "https://files.pythonhosted.org/packages/a5/61/1bba81ff6d50c86c65d9fd84ce9699dd106438ee4cdb105bf60374ee8412/regex-2026.1.15-cp313-cp313t-win32.whl", hash = "sha256:99ad739c3686085e614bf77a508e26954ff1b8f14da0e3765ff7abbf7799f952", size = 268879, upload-time = "2026-01-14T23:16:14.049Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/cef7d4c5fb0ea3ac5c775fd37db5747f7378b29526cc83f572198924ff47/regex-2026.1.15-cp313-cp313t-win_amd64.whl", hash = "sha256:32655d17905e7ff8ba5c764c43cb124e34a9245e45b83c22e81041e1071aee10", size = 280317, upload-time = "2026-01-14T23:16:15.718Z" }, + { url = "https://files.pythonhosted.org/packages/b4/52/4317f7a5988544e34ab57b4bde0f04944c4786128c933fb09825924d3e82/regex-2026.1.15-cp313-cp313t-win_arm64.whl", hash = "sha256:b2a13dd6a95e95a489ca242319d18fc02e07ceb28fa9ad146385194d95b3c829", size = 271551, upload-time = "2026-01-14T23:16:17.533Z" }, +] + +[[package]] +name = "requests" +version = "2.31.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/be/10918a2eac4ae9f02f6cfe6414b7a155ccd8f7f9d4380d62fd5b955065c3/requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1", size = 110794, upload-time = "2023-05-22T15:12:44.175Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/8e/0e2d847013cb52cd35b38c009bb167a1a26b2ce6cd6965bf26b47bc0bf44/requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", size = 62574, upload-time = "2023-05-22T15:12:42.313Z" }, +] + +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, +] + +[[package]] +name = "ruff" +version = "0.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a4/55/9f485266e6326cab707369601b13e3e72eb90ba3eee2d6779549a00a0d58/ruff-0.6.4.tar.gz", hash = "sha256:ac3b5bfbee99973f80aa1b7cbd1c9cbce200883bdd067300c22a6cc1c7fba212", size = 2469375, upload-time = "2024-09-05T15:51:36.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/78/307591f81d09c8721b5e64539f287c82c81a46f46d16278eb27941ac17f9/ruff-0.6.4-py3-none-linux_armv6l.whl", hash = "sha256:c4b153fc152af51855458e79e835fb6b933032921756cec9af7d0ba2aa01a258", size = 9692673, upload-time = "2024-09-05T15:50:50.469Z" }, + { url = "https://files.pythonhosted.org/packages/69/63/ef398fcacdbd3995618ed30b5a6c809a1ebbf112ba604b3f5b8c3be464cf/ruff-0.6.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:bedff9e4f004dad5f7f76a9d39c4ca98af526c9b1695068198b3bda8c085ef60", size = 9481182, upload-time = "2024-09-05T15:50:54.027Z" }, + { url = "https://files.pythonhosted.org/packages/a6/fd/8784e3bbd79bc17de0a62de05fe5165f494ff7d77cb06630d6428c2f10d2/ruff-0.6.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d02a4127a86de23002e694d7ff19f905c51e338c72d8e09b56bfb60e1681724f", size = 9174356, upload-time = "2024-09-05T15:50:56.694Z" }, + { url = "https://files.pythonhosted.org/packages/6d/bc/c69db2d68ac7bfbb222c81dc43a86e0402d0063e20b13e609f7d17d81d3f/ruff-0.6.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7862f42fc1a4aca1ea3ffe8a11f67819d183a5693b228f0bb3a531f5e40336fc", size = 10129365, upload-time = "2024-09-05T15:50:59.674Z" }, + { url = "https://files.pythonhosted.org/packages/3b/10/8ed14ff60a4e5eb08cac0a04a9b4e8590c72d1ce4d29ef22cef97d19536d/ruff-0.6.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eebe4ff1967c838a1a9618a5a59a3b0a00406f8d7eefee97c70411fefc353617", size = 9483351, upload-time = "2024-09-05T15:51:02.296Z" }, + { url = "https://files.pythonhosted.org/packages/a9/69/13316b8d64ffd6a43627cf0753339a7f95df413450c301a60904581bee6e/ruff-0.6.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:932063a03bac394866683e15710c25b8690ccdca1cf192b9a98260332ca93408", size = 10301099, upload-time = "2024-09-05T15:51:04.68Z" }, + { url = "https://files.pythonhosted.org/packages/42/00/9623494087272643e8f02187c266638306c6829189a5bf1446968bbe438b/ruff-0.6.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:50e30b437cebef547bd5c3edf9ce81343e5dd7c737cb36ccb4fe83573f3d392e", size = 11033216, upload-time = "2024-09-05T15:51:07.111Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/e0c9d881db42ea1267e075c29aafe0db5a8a3024b131f952747f6234f858/ruff-0.6.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c44536df7b93a587de690e124b89bd47306fddd59398a0fb12afd6133c7b3818", size = 10618140, upload-time = "2024-09-05T15:51:10.005Z" }, + { url = "https://files.pythonhosted.org/packages/5b/35/f1d8b746aedd4c8fde4f83397e940cc4c8fc619860ebbe3073340381a34d/ruff-0.6.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ea086601b22dc5e7693a78f3fcfc460cceabfdf3bdc36dc898792aba48fbad6", size = 11606672, upload-time = "2024-09-05T15:51:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/70/899b03cbb3eb48ed0507d4b32b6f7aee562bc618ef9ffda855ec98c0461a/ruff-0.6.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b52387d3289ccd227b62102c24714ed75fbba0b16ecc69a923a37e3b5e0aaaa", size = 10288013, upload-time = "2024-09-05T15:51:15.487Z" }, + { url = "https://files.pythonhosted.org/packages/17/c6/906bf895640521ca5115ccdd857b2bac42bd61facde6620fdc2efc0a4806/ruff-0.6.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0308610470fcc82969082fc83c76c0d362f562e2f0cdab0586516f03a4e06ec6", size = 10109473, upload-time = "2024-09-05T15:51:17.623Z" }, + { url = "https://files.pythonhosted.org/packages/28/da/1284eb04172f8a5d42eb52fce9d643dd747ac59a4ed6c5d42729f72e934d/ruff-0.6.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:803b96dea21795a6c9d5bfa9e96127cc9c31a1987802ca68f35e5c95aed3fc0d", size = 9568817, upload-time = "2024-09-05T15:51:20.771Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e2/f8250b54edbb2e9222e22806e1bcc35a192ac18d1793ea556fa4977a843a/ruff-0.6.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:66dbfea86b663baab8fcae56c59f190caba9398df1488164e2df53e216248baa", size = 9910840, upload-time = "2024-09-05T15:51:23.565Z" }, + { url = "https://files.pythonhosted.org/packages/9c/7c/dcf2c10562346ecdf6f0e5f6669b2ddc9a74a72956c3f419abd6820c2aff/ruff-0.6.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:34d5efad480193c046c86608dbba2bccdc1c5fd11950fb271f8086e0c763a5d1", size = 10354263, upload-time = "2024-09-05T15:51:26.604Z" }, + { url = "https://files.pythonhosted.org/packages/f1/94/c39d7ac5729e94788110503d928c98c203488664b0fb92c2b801cb832bec/ruff-0.6.4-py3-none-win32.whl", hash = "sha256:f0f8968feea5ce3777c0d8365653d5e91c40c31a81d95824ba61d871a11b8523", size = 7958602, upload-time = "2024-09-05T15:51:29.563Z" }, + { url = "https://files.pythonhosted.org/packages/6b/d2/2dee8c547bee3d4cfdd897f7b8e38510383acaff2c8130ea783b67631d72/ruff-0.6.4-py3-none-win_amd64.whl", hash = "sha256:549daccee5227282289390b0222d0fbee0275d1db6d514550d65420053021a58", size = 8795059, upload-time = "2024-09-05T15:51:31.994Z" }, + { url = "https://files.pythonhosted.org/packages/07/1a/23280818aa4fa89bd0552aab10857154e1d3b90f27b5b745f09ec1ac6ad8/ruff-0.6.4-py3-none-win_arm64.whl", hash = "sha256:ac4b75e898ed189b3708c9ab3fc70b79a433219e1e87193b4f2b77251d058d14", size = 8239636, upload-time = "2024-09-05T15:51:34.17Z" }, +] + +[[package]] +name = "sgmllib3k" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/bd/3704a8c3e0942d711c1299ebf7b9091930adae6675d7c8f476a7ce48653c/sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9", size = 5750, upload-time = "2010-08-24T14:33:52.445Z" } + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "tenacity" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/94/91fccdb4b8110642462e653d5dcb27e7b674742ad68efd146367da7bdb10/tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b", size = 47421, upload-time = "2024-07-29T12:12:27.547Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169, upload-time = "2024-07-29T12:12:25.825Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991, upload-time = "2025-02-14T06:03:01.003Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987, upload-time = "2025-02-14T06:02:14.174Z" }, + { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155, upload-time = "2025-02-14T06:02:15.384Z" }, + { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898, upload-time = "2025-02-14T06:02:16.666Z" }, + { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535, upload-time = "2025-02-14T06:02:18.595Z" }, + { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548, upload-time = "2025-02-14T06:02:20.729Z" }, + { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895, upload-time = "2025-02-14T06:02:22.67Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073, upload-time = "2025-02-14T06:02:24.768Z" }, + { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075, upload-time = "2025-02-14T06:02:26.92Z" }, + { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754, upload-time = "2025-02-14T06:02:28.124Z" }, + { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678, upload-time = "2025-02-14T06:02:29.845Z" }, + { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283, upload-time = "2025-02-14T06:02:33.838Z" }, + { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897, upload-time = "2025-02-14T06:02:36.265Z" }, + { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919, upload-time = "2025-02-14T06:02:37.494Z" }, + { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877, upload-time = "2025-02-14T06:02:39.516Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095, upload-time = "2025-02-14T06:02:41.791Z" }, + { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649, upload-time = "2025-02-14T06:02:43Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465, upload-time = "2025-02-14T06:02:45.046Z" }, + { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669, upload-time = "2025-02-14T06:02:47.341Z" }, +] + +[[package]] +name = "tomli" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240808" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/08/6f5737f645571b7a0b1ebd2fe8b5cf1ee4ec3e707866ca96042a86fc1d10/types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af", size = 12359, upload-time = "2024-08-08T02:30:32.727Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/ad/ffbad24e2bc8f20bf047ec22af0c0a92f6ce2071eb21c9103df600cda6de/types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35", size = 15298, upload-time = "2024-08-08T02:30:31.101Z" }, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20240907" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/94/b6f90e5f09e1d621d5cd6d1057d5d28d4019d95f06eab205afa743ba1907/types-requests-2.32.0.20240907.tar.gz", hash = "sha256:ff33935f061b5e81ec87997e91050f7b4af4f82027a7a7a9d9aaea04a963fdf8", size = 18004, upload-time = "2024-09-07T02:35:43.592Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/6e/425219be1dfc954c3e129b3ea70407abc78c1bd6414d0c7180df9940ca1f/types_requests-2.32.0.20240907-py3-none-any.whl", hash = "sha256:1d1e79faeaf9d42def77f3c304893dea17a97cae98168ac69f3cb465516ee8da", size = 15828, upload-time = "2024-09-07T02:35:42.152Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.36.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +]