diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl index 485c159..0e4945f 100644 --- a/.beads/issues.jsonl +++ b/.beads/issues.jsonl @@ -1,3 +1,4 @@ +{"id":"openadapt-evals-0an","title":"CLI: aws-costs and waa-image delete commands added","notes":"openadapt-evals PR #24: Added aws-costs command, waa-image delete action, changed default to Docker Hub","status":"open","priority":1,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-29T16:17:03.612486-05:00","created_by":"Richard Abrich","updated_at":"2026-01-29T16:17:03.612486-05:00"} {"id":"openadapt-evals-0dt","title":"Add pre-flight check for Windows install issues","description":"Detect product key prompts or stuck installations BEFORE 10-minute timeout. Check container logs for specific error patterns.","status":"open","priority":1,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-20T18:57:42.24338-05:00","created_by":"Richard Abrich","updated_at":"2026-01-20T18:57:42.24338-05:00"} {"id":"openadapt-evals-0ms","title":"Run 20-50 task evaluation","description":"Run WAA benchmark on 20-50 tasks to measure baseline success rate. Target is \u003e80% success rate. This provides quantitative data on agent performance.","notes":"2026-01-29: Azure quota limits parallelization to 2 workers max (10 vCPUs / 4 vCPUs per worker). 10-worker test failed with ClusterCoreQuotaReached. User declined manual portal quota increase. Waiting for api-openai test results before full 154-task run.","status":"open","priority":0,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-20T17:44:26.461765-05:00","created_by":"Richard Abrich","updated_at":"2026-01-29T00:28:02.609085-05:00","dependencies":[{"issue_id":"openadapt-evals-0ms","depends_on_id":"openadapt-evals-c3f","type":"blocks","created_at":"2026-01-20T17:44:26.462904-05:00","created_by":"Richard Abrich"}]} {"id":"openadapt-evals-2ar","title":"Implement permanent fix for Windows unattended install","status":"closed","priority":0,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-20T18:59:36.544113-05:00","created_by":"Richard Abrich","updated_at":"2026-01-20T20:32:06.634857-05:00","closed_at":"2026-01-20T20:32:06.634857-05:00","close_reason":"Duplicate of openadapt-evals-b3l"} @@ -8,5 +9,6 @@ {"id":"openadapt-evals-czj","title":"Docker installation fails on Azure VM - pkgProblemResolver error","description":"vm setup-waa fails to install Docker. Error: pkgProblemResolver::Resolve generated breaks. Need to investigate root cause before attempting fix.","status":"open","priority":0,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-20T22:48:59.527637-05:00","created_by":"Richard Abrich","updated_at":"2026-01-20T22:48:59.527637-05:00"} {"id":"openadapt-evals-dke","title":"SYSTEM: Create knowledge persistence workflow using Beads","description":"Every fix/approach must be logged as a Beads issue with:\n1. Problem description\n2. Attempted solution\n3. Result (worked/failed/partial)\n4. Root cause if known\n5. Files changed\n\nBefore any fix attempt, agent MUST:\n1. Run 'bd list --labels=fix,approach' to see prior attempts\n2. Review what was tried before\n3. Document new attempt BEFORE implementing\n\nAfter context compaction, first action:\n1. Run 'bd ready' for current tasks\n2. Run 'bd list --labels=recurring' for known recurring issues\n3. Check docs/RECURRING_ISSUES.md for patterns","status":"open","priority":0,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-20T19:00:18.155796-05:00","created_by":"Richard Abrich","updated_at":"2026-01-20T19:00:18.155796-05:00"} {"id":"openadapt-evals-gna","title":"Test simplified Dockerfile (Azure mode)","description":"Testing Dockerfile.simplified which uses vanilla WAA Azure mode: native OEM mechanism (C:\\oem), InstallFrom element for unattended install, VERSION=11e for no product key. Steps: 1) Delete current VM 2) Create fresh VM 3) Build simplified image 4) Test Windows installation via QEMU screenshots","notes":"2026-01-22: Confirmed the blocker is not just docker pull; even starting the existing 'winarena' container via az vm run-command timed out.\n\n- smoke-live tried to run docker start winarena via run-command and timed out (900s)\n- WAA server remained unreachable at http://172.171.112.41:5000\n- VM was deallocated after the attempt\n\nImplication: VM/docker state is unhealthy or container start is hanging (possibly due to incomplete image extraction / stuck daemon / disk pressure).\nNext: add/run a vm-debug command to capture docker/system logs and determine whether to rebuild VM/image, pin/mirror image (ACR), or adjust docker config.","status":"open","priority":0,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-21T12:47:15.12243-05:00","created_by":"Richard Abrich","updated_at":"2026-01-22T10:32:01.038825-05:00","labels":["testing","waa"],"comments":[{"id":3,"issue_id":"openadapt-evals-gna","author":"Richard Abrich","text":"Session Recovery 2026-01-22 17:58: Previous agents killed during compaction. VM state: Docker/containerd unhealthy, disk /mnt only 32GB (need 47GB+ for vanilla WAA). Git-lfs failing. User feedback: 1) use beads, 2) larger disk, 3) clean up CLI, 4) vanilla WAA config.","created_at":"2026-01-22T18:05:45Z"},{"id":4,"issue_id":"openadapt-evals-gna","author":"Richard Abrich","text":"Launched 3 parallel agents: ae159fc (VM disk upgrade), aabad47 (CLI cleanup), aee4e8a (fix containerd). Check /private/tmp/claude/-Users-abrichr-oa-src-openadapt-ml/tasks/*.output for results.","created_at":"2026-01-22T18:06:18Z"},{"id":5,"issue_id":"openadapt-evals-gna","author":"Richard Abrich","text":"WORKFLOW DOCUMENTED: VM config changes = delete VM -\u003e update code -\u003e relaunch. Added to CLAUDE.md. Default VM size now D8ds_v5 (300GB). Launching fresh VM now.","created_at":"2026-01-22T18:09:12Z"},{"id":6,"issue_id":"openadapt-evals-gna","author":"Richard Abrich","text":"2026-01-22 18:20: VM resources cleaned up, launched agent a9be1f8 to add auto-cleanup to CLI, WAA setup retrying in background (b04fcbe). Workflow documented in CLAUDE.md and STATUS.md.","created_at":"2026-01-22T18:11:56Z"},{"id":7,"issue_id":"openadapt-evals-gna","author":"Richard Abrich","text":"2026-01-22 18:30: VM created with D8s_v3 fallback (D8ds_v5 quota 0), IP 20.120.37.97. Restored waa_deploy symlink. Docker image building. W\u0026B integration agent a21c3ef running.","created_at":"2026-01-22T18:25:29Z"},{"id":8,"issue_id":"openadapt-evals-gna","author":"Richard Abrich","text":"2026-01-22 19:05: WAA Docker image built successfully! Container running. Windows booting. VM: 20.120.37.97, VNC: http://20.120.37.97:8006","created_at":"2026-01-22T18:47:03Z"}]} +{"id":"openadapt-evals-hvm","title":"VL model fix PR #18 ready to merge","notes":"openadapt-ml PR #18: VL model detection, exception handling, assistant_only_loss fix. All tests passing. Ready to merge.","status":"open","priority":0,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-29T16:17:03.491938-05:00","created_by":"Richard Abrich","updated_at":"2026-01-29T16:17:03.491938-05:00"} {"id":"openadapt-evals-sz4","title":"RCA: Windows product key prompt recurring issue","status":"closed","priority":0,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-20T18:59:36.266286-05:00","created_by":"Richard Abrich","updated_at":"2026-01-20T20:32:06.493102-05:00","closed_at":"2026-01-20T20:32:06.493102-05:00","close_reason":"RCA complete - root cause is VERSION mismatch (CLI=11, Dockerfile=11e). Fix documented in RECURRING_ISSUES.md and WINDOWS_PRODUCT_KEY_RCA.md"} {"id":"openadapt-evals-wis","title":"Add pre-flight check to detect Windows install issues","status":"closed","priority":1,"issue_type":"task","owner":"richard.abrich@gmail.com","created_at":"2026-01-20T18:59:36.865052-05:00","created_by":"Richard Abrich","updated_at":"2026-01-20T20:32:06.757261-05:00","closed_at":"2026-01-20T20:32:06.757261-05:00","close_reason":"Duplicate of openadapt-evals-0dt"} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..cfc11e7 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,101 @@ +name: Auto Release + +on: + push: + branches: + - main + paths: + - '**.py' + - 'pyproject.toml' + +jobs: + release: + name: Bump version and release + runs-on: ubuntu-latest + # Only run on merged PRs (not direct pushes or version bump commits) + if: | + github.event.head_commit.message != '' && + !startsWith(github.event.head_commit.message, 'chore: bump version') + permissions: + contents: write + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install toml + run: pip install toml + + - name: Determine version bump type + id: bump-type + run: | + COMMIT_MSG="${{ github.event.head_commit.message }}" + # Extract the type from conventional commit (feat, fix, etc.) + if [[ "$COMMIT_MSG" =~ ^feat ]]; then + echo "type=minor" >> $GITHUB_OUTPUT + elif [[ "$COMMIT_MSG" =~ ^(fix|perf) ]]; then + echo "type=patch" >> $GITHUB_OUTPUT + elif [[ "$COMMIT_MSG" =~ ^(docs|style|refactor|test|chore|ci|build) ]]; then + echo "type=patch" >> $GITHUB_OUTPUT + else + # Default to patch for non-conventional commits + echo "type=patch" >> $GITHUB_OUTPUT + fi + + - name: Bump version + id: bump + run: | + python << 'EOF' + import toml + import os + + # Read current version + with open('pyproject.toml', 'r') as f: + data = toml.load(f) + + current = data['project']['version'] + major, minor, patch = map(int, current.split('.')) + + bump_type = os.environ.get('BUMP_TYPE', 'patch') + + if bump_type == 'major': + major += 1 + minor = 0 + patch = 0 + elif bump_type == 'minor': + minor += 1 + patch = 0 + else: # patch + patch += 1 + + new_version = f"{major}.{minor}.{patch}" + data['project']['version'] = new_version + + with open('pyproject.toml', 'w') as f: + toml.dump(data, f) + + print(f"Bumped {current} -> {new_version}") + + # Set output + with open(os.environ['GITHUB_OUTPUT'], 'a') as f: + f.write(f"version={new_version}\n") + f.write(f"tag=v{new_version}\n") + EOF + env: + BUMP_TYPE: ${{ steps.bump-type.outputs.type }} + + - name: Commit and tag + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add pyproject.toml + git commit -m "chore: bump version to ${{ steps.bump.outputs.version }}" + git tag ${{ steps.bump.outputs.tag }} + git push origin main --tags diff --git a/README.md b/README.md index f2c8802..477e0a1 100644 --- a/README.md +++ b/README.md @@ -5,41 +5,44 @@ [![Downloads](https://img.shields.io/pypi/dm/openadapt-evals.svg)](https://pypi.org/project/openadapt-evals/) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Python 3.10+](https://img.shields.io/badge/python-3.10%2B-blue)](https://www.python.org/downloads/) -[![Azure Success Rate](https://img.shields.io/badge/Azure%20Success%20Rate-95%25%2B-success)](https://github.com/OpenAdaptAI/openadapt-evals) -[![Cost Savings](https://img.shields.io/badge/Cost%20Savings-67%25-brightgreen)](https://github.com/OpenAdaptAI/openadapt-evals/blob/main/COST_OPTIMIZATION.md) - -Evaluation infrastructure for GUI agent benchmarks. +Evaluation infrastructure for GUI agent benchmarks. **Simplified CLI toolkit for Windows Agent Arena.** ## Overview `openadapt-evals` provides a unified framework for evaluating GUI automation agents across standardized benchmarks like Windows Agent Arena (WAA), OSWorld, WebArena, and others. -## Recent Improvements +## Windows Agent Arena (WAA) - Headline Feature + +> **Status**: Actively running full 154-task evaluation. Results coming soon. + +A **simplified CLI toolkit** for the [Windows Agent Arena](https://github.com/microsoft/WindowsAgentArena) benchmark, providing: +- Easy Azure VM setup and SSH tunnel management +- Agent adapters for Claude, GPT-4o, and custom agents +- Results viewer with per-domain breakdown +- Parallelization support for faster evaluations + +See the [WAA Benchmark Results](#waa-benchmark-results) section below for current status. -We've made significant improvements to reliability, cost-efficiency, and observability: +## Roadmap (In Progress) -### Azure Reliability (v0.2.0 - January 2026) -- **95%+ Success Rate Target**: Fixed nested virtualization issues that caused 0% task completion -- **VM Configuration**: Upgraded to `Standard_D4s_v5` with proper nested virtualization support +The following features are under active development: + +### Azure Reliability (`[IN PROGRESS]`) +- **Goal**: 95%+ task completion rate (vs. early issues with 0%) +- **VM Configuration**: Using `Standard_D4s_v5` with nested virtualization (configurable) - **Health Monitoring**: Automatic detection and retry of stuck jobs -- **Fast Failure Detection**: 10-minute timeout instead of 8+ hour hangs -- See [PR #11](https://github.com/OpenAdaptAI/openadapt-evals/pull/11) for details - -### Cost Optimization (v0.2.0 - January 2026) -- **67% Cost Reduction**: From $7.68 to $2.50 per full evaluation (154 tasks) -- **Tiered VM Sizing**: Automatic VM size selection based on task complexity (37% savings) -- **Spot Instance Support**: 70-80% discount on compute costs (64% savings with tiered VMs) -- **Azure Container Registry**: 10x faster image pulls (1-2 min vs 8-12 min) -- **Real-time Cost Tracking**: Monitor costs during evaluation -- See [COST_OPTIMIZATION.md](./COST_OPTIMIZATION.md) and [PR #13](https://github.com/OpenAdaptAI/openadapt-evals/pull/13) for details - -### Screenshot Validation & Viewer (v0.2.0 - January 2026) -- **Real Benchmark Screenshots**: Viewer now displays actual WAA evaluation screenshots + +### Cost Optimization (`[IN PROGRESS]`) +- **Goal**: Reduce per-evaluation cost from ~$7.68 to ~$2.50 (154 tasks) +- **Tiered VM Sizing**: Match VM size to task complexity +- **Spot Instance Support**: Use preemptible VMs for 70-80% discount +- See [COST_OPTIMIZATION.md](./COST_OPTIMIZATION.md) for design + +### Benchmark Viewer (Available) +- **Real Benchmark Screenshots**: Viewer displays actual WAA evaluation screenshots - **Auto-Screenshot Tool**: Automated screenshot generation with Playwright -- **Screenshot Validation**: Manifest-based validation ensuring correctness - **Execution Logs**: Step-by-step logs with search and filtering -- **Live Monitoring**: Real-time Azure ML job monitoring with auto-refresh -- See [PR #6](https://github.com/OpenAdaptAI/openadapt-evals/pull/6) for details +- **Live Monitoring**: Real-time progress tracking ## Installation @@ -79,7 +82,7 @@ adapter = WAALiveAdapter(config) agent = ApiAgent(provider="anthropic") # or "openai" for GPT-5.1 # Run evaluation -results = evaluate_agent_on_benchmark(agent, adapter, task_ids=["notepad_1"]) +results = evaluate_agent_on_benchmark(agent, adapter, task_ids=["notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS"]) # Compute metrics metrics = compute_metrics(results) @@ -262,7 +265,7 @@ The package provides a CLI for running WAA evaluations: python -m openadapt_evals.benchmarks.cli probe --server http://vm-ip:5000 # Run live evaluation against a WAA server -python -m openadapt_evals.benchmarks.cli live --server http://vm-ip:5000 --task-ids notepad_1,notepad_2 +python -m openadapt_evals.benchmarks.cli live --server http://vm-ip:5000 --task-ids notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS,notepad_a7d4b6c5-569b-452e-9e1d-ffdb3d431d15-WOS # Generate HTML viewer for results python -m openadapt_evals.benchmarks.cli view --run-name my_eval_run @@ -298,7 +301,7 @@ if not adapter.check_connection(): print("WAA server not ready") # Run evaluation -results = evaluate_agent_on_benchmark(agent, adapter, task_ids=["notepad_1"]) +results = evaluate_agent_on_benchmark(agent, adapter, task_ids=["notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS"]) ``` ### Local WAA Evaluation @@ -318,6 +321,11 @@ results = evaluate_agent_on_benchmark(agent, adapter, task_ids=[t.task_id for t Run WAA at scale using Azure ML compute with optimized costs: +> **⚠️ Quota Requirements**: Parallel evaluation requires sufficient Azure vCPU quota. +> - Default VM: `Standard_D4s_v5` (4 vCPUs per worker) +> - 10 workers = 40 vCPUs required +> - Default quota is typically 10 vCPUs - [request an increase](https://learn.microsoft.com/en-us/azure/quotas/quickstart-increase-quota-portal) before running parallel evaluations + ```bash # Install Azure dependencies pip install openadapt-evals[azure] @@ -358,7 +366,7 @@ results = orchestrator.run_evaluation( ) ``` -**Azure Reliability**: The orchestrator now uses `Standard_D4s_v5` VMs with proper nested virtualization support and automatic health monitoring, achieving 95%+ success rates. +**Azure Reliability**: The orchestrator uses `Standard_D4s_v5` VMs with nested virtualization support and automatic health monitoring. ### Live Monitoring @@ -371,7 +379,7 @@ pip install openadapt-evals[viewer] # Start an Azure evaluation (in terminal 1) python -m openadapt_evals.benchmarks.cli azure \ --workers 1 \ - --task-ids notepad_1,browser_1 \ + --task-ids notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS,chrome_2ae9ba84-3a0d-4d4c-8338-3a1478dc5fe3-wos \ --waa-path /path/to/WAA # Monitor job logs in real-time (in terminal 2) diff --git a/docs/WAA_DOCKER_IMAGE_DESIGN.md b/docs/WAA_DOCKER_IMAGE_DESIGN.md new file mode 100644 index 0000000..c201f1d --- /dev/null +++ b/docs/WAA_DOCKER_IMAGE_DESIGN.md @@ -0,0 +1,219 @@ +# WAA Custom Docker Image Design + +## Problem + +The vanilla `windowsarena/winarena:latest` image does NOT work for unattended WAA evaluation because: + +1. **Outdated base**: Uses old dockurr/windows that doesn't auto-download Windows +2. **No FirstLogonCommands**: Missing patches to autounattend.xml that: + - Run `install.bat` (installs Python, Chrome, dependencies) + - Create scheduled task for WAA server auto-start + - Start WAA server on first boot +3. **Manual intervention required**: Without these patches, user must manually click through Windows setup + +Our custom `waa-auto` Dockerfile (in `openadapt_ml/benchmarks/waa_deploy/Dockerfile`) solves these issues. + +## Current State + +- Custom Dockerfile exists at: `openadapt-ml/openadapt_ml/benchmarks/waa_deploy/Dockerfile` +- Building requires manual Docker commands +- No automated push to registry +- Azure parallelization defaults to vanilla image (broken) + +## Requirements + +1. **Easy CLI**: Single command to build and push custom image +2. **Registry support**: Push to Docker Hub or Azure Container Registry (ACR) +3. **Azure integration**: Parallelization should auto-use custom image +4. **Idempotent**: Skip build if image already exists with same hash + +## Proposed CLI Commands + +```bash +# Build custom WAA image locally +uv run python -m openadapt_evals.benchmarks.cli waa-image build + +# Push to Docker Hub (requires DOCKER_USERNAME, DOCKER_PASSWORD or login) +uv run python -m openadapt_evals.benchmarks.cli waa-image push --registry dockerhub + +# Push to Azure Container Registry (uses azure-setup credentials) +uv run python -m openadapt_evals.benchmarks.cli waa-image push --registry acr + +# Build and push in one command +uv run python -m openadapt_evals.benchmarks.cli waa-image build-push --registry acr + +# Check if custom image exists in registry +uv run python -m openadapt_evals.benchmarks.cli waa-image check +``` + +## Implementation + +### 1. Copy Dockerfile to openadapt-evals + +The Dockerfile and supporting files should live in openadapt-evals (the benchmark package), not openadapt-ml: + +``` +openadapt_evals/ + benchmarks/ + waa_deploy/ + Dockerfile # Custom WAA image + start_waa_server.bat # WAA server startup script +``` + +### 2. CLI Command: `waa-image` + +```python +def cmd_waa_image(args): + action = args.action # build, push, build-push, check + registry = args.registry # dockerhub, acr + + if action in ("build", "build-push"): + build_waa_image() + + if action in ("push", "build-push"): + if registry == "dockerhub": + push_to_dockerhub() + elif registry == "acr": + push_to_acr() + + if action == "check": + check_image_exists(registry) +``` + +### 3. Build Function + +```python +def build_waa_image(tag: str = "waa-auto:latest") -> bool: + """Build custom WAA Docker image.""" + dockerfile_dir = Path(__file__).parent / "waa_deploy" + + # Check if Dockerfile exists + if not (dockerfile_dir / "Dockerfile").exists(): + raise FileNotFoundError("Dockerfile not found in waa_deploy/") + + # Build image + cmd = ["docker", "build", "-t", tag, str(dockerfile_dir)] + subprocess.run(cmd, check=True) + return True +``` + +### 4. Push Functions + +```python +def push_to_dockerhub(tag: str, repo: str = "openadaptai/waa-auto"): + """Push image to Docker Hub.""" + # Tag for Docker Hub + full_tag = f"{repo}:{tag}" + subprocess.run(["docker", "tag", f"waa-auto:{tag}", full_tag], check=True) + subprocess.run(["docker", "push", full_tag], check=True) + +def push_to_acr(tag: str): + """Push image to Azure Container Registry.""" + # Get ACR name from config + acr_name = os.getenv("AZURE_ACR_NAME", "openadaptacr") + full_tag = f"{acr_name}.azurecr.io/waa-auto:{tag}" + + # Login to ACR + subprocess.run(["az", "acr", "login", "--name", acr_name], check=True) + + # Tag and push + subprocess.run(["docker", "tag", f"waa-auto:{tag}", full_tag], check=True) + subprocess.run(["docker", "push", full_tag], check=True) +``` + +### 5. Update Azure Config + +Update `azure.py` to use custom image by default: + +```python +@dataclass +class AzureConfig: + # Change default from vanilla to custom + docker_image: str = "openadaptai/waa-auto:latest" # Docker Hub + # Or for ACR: + # docker_image: str = "{acr_name}.azurecr.io/waa-auto:latest" +``` + +### 6. Auto-build on First Use + +The `azure` command should check if custom image exists and prompt to build: + +```python +def cmd_azure(args): + # Check if custom image exists + if not image_exists_in_registry(): + print("Custom WAA image not found. Building and pushing...") + print("Run: uv run python -m openadapt_evals.benchmarks.cli waa-image build-push") + return 1 + + # Continue with Azure evaluation... +``` + +## Registry Options + +### Option A: Docker Hub (Recommended for simplicity) +- Public registry, no Azure setup required +- Image: `openadaptai/waa-auto:latest` +- Requires Docker Hub account and `docker login` + +### Option B: Azure Container Registry +- Private registry, integrated with Azure ML +- Faster pulls from Azure compute (same region) +- Requires ACR setup via `azure-setup` command +- Image: `{acr_name}.azurecr.io/waa-auto:latest` + +## Current State (Updated 2026-01-29) + +**waa_deploy/ ALREADY EXISTS in openadapt-evals:** +``` +openadapt_evals/waa_deploy/ +├── Dockerfile # ✅ Complete custom Dockerfile +├── __init__.py # ✅ Package init +├── api_agent.py # ✅ API agent for Claude/GPT +└── start_waa_server.bat # ✅ Server startup script +``` + +The Dockerfile has all critical modifications: +- Uses `dockurr/windows:latest` as modern base (auto-downloads Windows) +- Patches autounattend.xml with FirstLogonCommands +- Runs install.bat, creates scheduled task for WAA server auto-start +- Copies Python 3.9 from vanilla (transformers 4.46.2 compatibility) +- Patches IP addresses (20.20.20.21 → 172.30.0.2) +- Includes api_agent.py for Claude/GPT-4o support + +## Remaining Tasks + +1. ~~Copy `waa_deploy/` from openadapt-ml to openadapt-evals~~ ✅ DONE +2. Add `waa-image` CLI command (build, push, check) +3. Update `azure.py` default docker_image to use custom image +4. Build and push custom image to Docker Hub +5. Test Azure parallelization with custom image + +## Open Questions + +1. **Versioning**: Should we tag images with version numbers (v1.0.0) or just use `latest`? +2. **CI/CD**: Should GitHub Actions auto-build on Dockerfile changes? +3. **Size optimization**: Image is ~25GB - can we reduce? + +## Testing + +```bash +# 1. Build locally +uv run python -m openadapt_evals.benchmarks.cli waa-image build + +# 2. Test locally (requires VM with nested virt) +docker run -d --name test-waa \ + --device=/dev/kvm --cap-add NET_ADMIN \ + -p 8006:8006 -p 5000:5000 \ + -v /tmp/waa-storage:/storage \ + -e VERSION=11e \ + waa-auto:latest + +# 3. Wait for Windows install, verify WAA server starts automatically + +# 4. Push to registry +uv run python -m openadapt_evals.benchmarks.cli waa-image push --registry dockerhub + +# 5. Test Azure parallelization +uv run python -m openadapt_evals.benchmarks.cli azure --workers 2 --task-ids notepad_1,notepad_2 +``` diff --git a/docs/cost/COST_OPTIMIZATION.md b/docs/cost/COST_OPTIMIZATION.md index 992a45d..8bb1cf7 100644 --- a/docs/cost/COST_OPTIMIZATION.md +++ b/docs/cost/COST_OPTIMIZATION.md @@ -366,7 +366,7 @@ print(f"Savings: {optimized['savings_percentage']}%") | `AZURE_ENABLE_TIERED_VMS` | Enable automatic VM tier selection | false | | `AZURE_USE_SPOT_INSTANCES` | Use spot instances | false (true for development) | | `AZURE_MAX_SPOT_PRICE` | Maximum hourly spot price | 0.5 | -| `AZURE_DOCKER_IMAGE` | Docker image to use | windowsarena/winarena:latest | +| `AZURE_DOCKER_IMAGE` | Docker image to use | public.ecr.aws/g3w3k7s5/waa-auto:latest | ## API Reference diff --git a/openadapt_evals/agents/retrieval_agent.py b/openadapt_evals/agents/retrieval_agent.py index 400e176..aeb3211 100644 --- a/openadapt_evals/agents/retrieval_agent.py +++ b/openadapt_evals/agents/retrieval_agent.py @@ -27,7 +27,7 @@ --agent retrieval-claude \ --demo-library /path/to/demos \ --server http://vm:5000 \ - --task-ids notepad_1 + --task-ids notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS """ from __future__ import annotations diff --git a/openadapt_evals/benchmarks/azure.py b/openadapt_evals/benchmarks/azure.py index cfd35cc..476dbce 100644 --- a/openadapt_evals/benchmarks/azure.py +++ b/openadapt_evals/benchmarks/azure.py @@ -244,16 +244,24 @@ class AzureConfig: max_spot_price: Maximum hourly price for spot instances (default: 0.5). spot_eviction_policy: What to do when spot instance is evicted (Deallocate or Delete). environment: Deployment environment (production or development). + enable_ssh: Whether to enable SSH access for VNC debugging (default: True). + ssh_public_key_path: Path to SSH public key file (default: ~/.ssh/id_rsa.pub). """ subscription_id: str resource_group: str workspace_name: str - vm_size: str = "Standard_D4s_v5" # Better nested virt support than v3 + vm_size: str = "Standard_D4ds_v5" # D4ds_v5 supported by Azure ML compute vm_security_type: str = "Standard" # NOT TrustedLaunch (disables nested virt) enable_nested_virtualization: bool = True idle_timeout_minutes: int = 60 - docker_image: str = "windowsarena/winarena:latest" # Public Docker Hub image + # Custom WAA image with unattended installation support + # Use public.ecr.aws image (not vanilla windowsarena/winarena) because: + # - Modern dockurr/windows base (auto-downloads Windows 11) + # - FirstLogonCommands patches for unattended installation + # - Python 3.9 with transformers 4.46.2 (compatible with navi agent) + # Build with: uv run python -m openadapt_evals.benchmarks.cli waa-image build-push + docker_image: str = "public.ecr.aws/g3w3k7s5/waa-auto:latest" storage_account: str | None = None use_managed_identity: bool = False managed_identity_name: str | None = None @@ -263,6 +271,9 @@ class AzureConfig: max_spot_price: float = 0.5 # Maximum hourly price for spot instances spot_eviction_policy: str = "Deallocate" # Deallocate or Delete environment: str = "production" # production or development + # SSH/VNC access for debugging parallel workers + enable_ssh: bool = True # Enable SSH for VNC access to workers + ssh_public_key_path: str = "~/.ssh/id_rsa.pub" # Path to SSH public key @classmethod def from_env(cls) -> AzureConfig: @@ -274,9 +285,9 @@ def from_env(cls) -> AzureConfig: AZURE_ML_WORKSPACE_NAME Optional env vars: - AZURE_VM_SIZE (default: Standard_D4s_v5) + AZURE_VM_SIZE (default: Standard_D4ds_v5) AZURE_VM_SECURITY_TYPE (default: Standard) - AZURE_DOCKER_IMAGE (default: windowsarena/winarena:latest) + AZURE_DOCKER_IMAGE (default: public.ecr.aws/g3w3k7s5/waa-auto:latest) AZURE_ENABLE_TIERED_VMS (default: false) - Auto-select VM size by task complexity AZURE_USE_SPOT_INSTANCES (default: false) - Use spot instances for cost savings AZURE_MAX_SPOT_PRICE (default: 0.5) - Maximum hourly price for spot instances @@ -321,11 +332,11 @@ def from_env(cls) -> AzureConfig: subscription_id=subscription_id, resource_group=resource_group, workspace_name=workspace_name, - vm_size=os.getenv("AZURE_VM_SIZE", "Standard_D4s_v5"), + vm_size=os.getenv("AZURE_VM_SIZE", "Standard_D4ds_v5"), vm_security_type=os.getenv("AZURE_VM_SECURITY_TYPE", "Standard"), docker_image=os.getenv( "AZURE_DOCKER_IMAGE", - "windowsarena/winarena:latest" + "public.ecr.aws/g3w3k7s5/waa-auto:latest" ), enable_tiered_vms=enable_tiered_vms, use_spot_instances=use_spot_instances, @@ -359,9 +370,10 @@ class WorkerState: error: str | None = None start_time: float | None = None end_time: float | None = None + job_name: str | None = None # Azure ML job name for this worker # Cost tracking vm_tier: str = "medium" # simple, medium, or complex - vm_size: str = "Standard_D4s_v5" # Actual VM size used + vm_size: str = "Standard_D4ds_v5" # Actual VM size used is_spot: bool = False # Whether spot instance was used hourly_cost: float = 0.192 # Actual hourly cost total_cost: float = 0.0 # Total cost for this worker @@ -488,22 +500,22 @@ def _get_credential(self): def create_compute_instance( self, name: str, - startup_script: str | None = None, + startup_script_path: str | None = None, vm_size: str | None = None, use_spot: bool | None = None, ) -> str: - """Create a compute instance. + """Create a compute instance with startup script. Args: name: Compute instance name. - startup_script: Optional startup script content (not yet implemented). + startup_script_path: Path to startup script in datastore (e.g., 'Users/me/startup.sh'). vm_size: Override VM size (uses config.vm_size if None). use_spot: Override spot instance setting (uses config.use_spot_instances if None). Returns: Compute instance name. """ - from azure.ai.ml.entities import ComputeInstance + from azure.ai.ml.entities import ComputeInstance, ScriptReference, SetupScripts # Check if already exists try: @@ -520,10 +532,45 @@ def create_compute_instance( # CRITICAL: Use Standard security type for nested virtualization # TrustedLaunch (Azure default since 2024) disables nested virtualization + + # Configure SSH settings for VNC access + ssh_settings = None + if self.config.enable_ssh: + from azure.ai.ml.entities import ComputeInstanceSshSettings + + # Read SSH public key + ssh_key_path = os.path.expanduser(self.config.ssh_public_key_path) + if os.path.exists(ssh_key_path): + with open(ssh_key_path) as f: + ssh_public_key = f.read().strip() + ssh_settings = ComputeInstanceSshSettings( + ssh_public_access="Enabled", + admin_public_key=ssh_public_key, + ) + logger.info(f"SSH enabled for {name} (key: {ssh_key_path})") + else: + logger.warning( + f"SSH key not found at {ssh_key_path}. " + f"SSH access disabled for {name}. " + f"Generate with: ssh-keygen -t rsa -b 4096" + ) + + # Configure startup script to stop conflicting services + setup_scripts = None + if startup_script_path: + startup_script_ref = ScriptReference( + path=startup_script_path, + timeout_minutes=10, + ) + setup_scripts = SetupScripts(startup_script=startup_script_ref) + logger.info(f"Startup script configured: {startup_script_path}") + compute = ComputeInstance( name=name, size=vm_size, idle_time_before_shutdown_minutes=self.config.idle_timeout_minutes, + ssh_settings=ssh_settings, + setup_scripts=setup_scripts, ) # Configure spot instance if enabled @@ -616,6 +663,77 @@ def get_compute_status(self, name: str) -> str: compute = self.client.compute.get(name) return compute.state + def get_compute_ssh_info(self, name: str) -> dict[str, Any] | None: + """Get SSH connection info for a compute instance. + + Args: + name: Compute instance name. + + Returns: + Dict with ssh_host, ssh_port, ssh_user, or None if SSH not available. + Example: {"ssh_host": "10.0.0.4", "ssh_port": 50000, "ssh_user": "azureuser"} + """ + try: + compute = self.client.compute.get(name) + + # Check if SSH settings exist + if not hasattr(compute, "ssh_settings") or compute.ssh_settings is None: + logger.warning(f"SSH not enabled for compute instance {name}") + return None + + # Get SSH connection details from compute properties + # Azure ML provides these in the connectivity_endpoints or ssh_settings + ssh_info = { + "ssh_user": "azureuser", # Azure ML always uses azureuser + "ssh_host": None, + "ssh_port": 50000, # Default SSH port for Azure ML compute + } + + # Try to get IP from various possible locations + if hasattr(compute, "public_ip_address") and compute.public_ip_address: + ssh_info["ssh_host"] = compute.public_ip_address + elif hasattr(compute, "connectivity_endpoints"): + endpoints = compute.connectivity_endpoints + if endpoints and hasattr(endpoints, "public_ip_address"): + ssh_info["ssh_host"] = endpoints.public_ip_address + if endpoints and hasattr(endpoints, "ssh_port"): + ssh_info["ssh_port"] = endpoints.ssh_port + + # Alternative: Check properties dict + if ssh_info["ssh_host"] is None and hasattr(compute, "properties"): + props = compute.properties + if isinstance(props, dict): + if "connectivityEndpoints" in props: + ep = props["connectivityEndpoints"] + ssh_info["ssh_host"] = ep.get("publicIpAddress") + ssh_info["ssh_port"] = ep.get("sshPort", 50000) + + if ssh_info["ssh_host"] is None: + logger.warning(f"Could not determine SSH host for {name}") + return None + + return ssh_info + + except Exception as e: + logger.warning(f"Failed to get SSH info for {name}: {e}") + return None + + def get_all_workers_ssh_info(self, worker_names: list[str]) -> dict[str, dict]: + """Get SSH info for all workers. + + Args: + worker_names: List of compute instance names. + + Returns: + Dict mapping worker name to SSH info. + """ + ssh_info = {} + for name in worker_names: + info = self.get_compute_ssh_info(name) + if info: + ssh_info[name] = info + return ssh_info + def submit_job( self, compute_name: str, @@ -624,49 +742,135 @@ def submit_job( display_name: str | None = None, timeout_hours: float = 4.0, ) -> str: - """Submit a job to a compute instance. + """Submit a job to a compute instance using SDK V1 with Docker. + + This uses the Azure ML SDK V1 approach with DockerConfiguration, + which is required to run WAA because: + 1. Docker runs the job INSIDE the container (not just using it as env) + 2. NET_ADMIN capability is needed for QEMU networking + 3. entry_setup.sh is called to start Windows VM Args: compute_name: Target compute instance. - command: Command to run. + command: Command to run (passed to entry script). environment_variables: Environment variables. display_name: Job display name. - timeout_hours: Maximum job duration in hours (default: 4). The job - will be automatically canceled after this duration. + timeout_hours: Maximum job duration in hours (default: 4). Returns: - Job name/ID. + Job run ID. """ - from azure.ai.ml import command as ml_command - from azure.ai.ml.entities import Environment, CommandJobLimits + # Use SDK V1 for DockerConfiguration support + from azureml.core import Workspace, Experiment, Environment + from azureml.core.runconfig import RunConfiguration, DockerConfiguration + from azureml.core.compute import ComputeTarget + from azureml.core import ScriptRunConfig + + # Connect to workspace using V1 SDK + ws = Workspace( + subscription_id=self.config.subscription_id, + resource_group=self.config.resource_group, + workspace_name=self.config.workspace_name, + ) - # Create environment with Docker image - env = Environment( + # Create environment from Docker image + env = Environment.from_docker_image( + name=f"waa-env-{int(time.time())}", image=self.config.docker_image, - name="waa-agent-env", ) - import uuid - timestamp = int(time.time()) - unique_id = str(uuid.uuid4())[:8] - job_name = f"waa-{compute_name}-{timestamp}-{unique_id}" - - # Convert hours to seconds for Azure ML timeout - timeout_seconds = int(timeout_hours * 3600) + # Configure Docker with NET_ADMIN capability (required for QEMU networking) + docker_config = DockerConfiguration( + use_docker=True, + shared_volumes=True, + arguments=["--cap-add", "NET_ADMIN"], + shm_size="16g", # Shared memory for QEMU + ) - job = ml_command( - command=command, - environment=env, - compute=compute_name, - name=job_name, - display_name=display_name or f"waa-job-{compute_name}", - environment_variables=environment_variables or {}, - limits=CommandJobLimits(timeout=timeout_seconds), + # Set up run configuration + run_config = RunConfiguration() + run_config.target = ComputeTarget(workspace=ws, name=compute_name) + run_config.environment = env + run_config.docker = docker_config + run_config.environment_variables = environment_variables or {} + + # Get the azure_files directory (contains run_entry.py) + azure_files_dir = Path(__file__).parent / "azure_files" + + # Parse command to extract arguments for run_entry.py + # Command format: "cd /client && python run.py --agent_name X --model Y ..." + # We need to convert this to arguments for run_entry.py + import shlex + args = self._parse_command_to_args(command) + + # Create script run config + src = ScriptRunConfig( + source_directory=str(azure_files_dir), + script="run_entry.py", + arguments=args, + run_config=run_config, ) - submitted = self.client.jobs.create_or_update(job) - logger.info(f"Job submitted: {submitted.name} (timeout: {timeout_hours}h)") - return submitted.name + # Submit to experiment + exp_name = display_name or f"waa-{compute_name}" + experiment = Experiment(workspace=ws, name=exp_name) + run = experiment.submit(config=src) + + logger.info(f"Job submitted: {run.id} (portal: {run.get_portal_url()})") + return run.id + + def _parse_command_to_args(self, command: str) -> list[str]: + """Parse the WAA command into arguments for run_entry.py. + + Args: + command: WAA command string like: + "cd /client && python run.py --agent_name navi --model gpt-4o ..." + + Returns: + List of arguments for run_entry.py: + [output_path, exp_name, num_workers, worker_id, agent, model, max_steps] + """ + # Default values + output_path = "/outputs" + exp_name = "waa_eval" + num_workers = "1" + worker_id = "0" + agent = "navi" + model = "gpt-4o" + max_steps = "15" + + # Parse command to extract values + if "--worker_id" in command: + match = re.search(r"--worker_id\s+(\d+)", command) + if match: + worker_id = match.group(1) + + if "--num_workers" in command: + match = re.search(r"--num_workers\s+(\d+)", command) + if match: + num_workers = match.group(1) + + if "--agent_name" in command: + match = re.search(r"--agent_name\s+(\w+)", command) + if match: + agent = match.group(1) + + if "--model" in command: + match = re.search(r"--model\s+([\w\-\.]+)", command) + if match: + model = match.group(1) + + if "--max_steps" in command: + match = re.search(r"--max_steps\s+(\d+)", command) + if match: + max_steps = match.group(1) + + if "--result_dir" in command: + match = re.search(r"--result_dir\s+(\S+)", command) + if match: + output_path = match.group(1) + + return [output_path, exp_name, num_workers, worker_id, agent, model, max_steps] def wait_for_job(self, job_name: str, timeout_seconds: int = 3600) -> dict: """Wait for a job to complete. @@ -690,6 +894,65 @@ def wait_for_job(self, job_name: str, timeout_seconds: int = 3600) -> dict: raise TimeoutError(f"Job {job_name} did not complete within {timeout_seconds}s") + def get_job_logs(self, job_name: str, tail: int | None = None) -> str: + """Fetch logs for a job (non-streaming). + + Args: + job_name: Job name/ID. + tail: If specified, return only the last N lines. + + Returns: + Log content as string. + """ + try: + # Use az ml job download to get logs + import tempfile + with tempfile.TemporaryDirectory() as temp_dir: + result = subprocess.run( + [ + "az", + "ml", + "job", + "download", + "--name", + job_name, + "--workspace-name", + self.config.workspace_name, + "--resource-group", + self.config.resource_group, + "--download-path", + temp_dir, + "--outputs", + "logs", + ], + capture_output=True, + text=True, + timeout=60, + ) + + if result.returncode != 0: + logger.warning(f"Failed to download logs: {result.stderr}") + return "" + + # Find and read the user_logs/std_log.txt file + log_dir = Path(temp_dir) + log_files = list(log_dir.rglob("std_log.txt")) + list(log_dir.rglob("stdout.txt")) + + if not log_files: + return "" + + logs = log_files[0].read_text() + + if tail: + lines = logs.split("\n") + logs = "\n".join(lines[-tail:]) + + return logs + + except Exception as e: + logger.warning(f"Error fetching logs for {job_name}: {e}") + return "" + def stream_job_logs( self, job_name: str, @@ -744,6 +1007,172 @@ def _read_logs(): return process +class WorkerVNCManager: + """Manages SSH tunnels for VNC access to parallel workers. + + Provides VNC access to multiple Azure ML compute instances via SSH tunnels. + Each worker gets a unique local port (8006, 8007, 8008, ...) mapped to its + VNC port (8006) via SSH. + + Example: + manager = WorkerVNCManager(ml_client) + manager.start_tunnels(["worker0", "worker1", "worker2"]) + # Access VNC at localhost:8006, localhost:8007, localhost:8008 + + # Get status + print(manager.get_status()) + + # Cleanup + manager.stop_all_tunnels() + """ + + VNC_REMOTE_PORT = 8006 # noVNC port inside Windows container + VNC_BASE_LOCAL_PORT = 8006 # Local ports start at 8006 + + def __init__(self, ml_client: AzureMLClient): + """Initialize VNC manager. + + Args: + ml_client: AzureMLClient instance for getting SSH info. + """ + self.ml_client = ml_client + self.tunnels: dict[str, subprocess.Popen] = {} # worker_name -> tunnel process + self.local_ports: dict[str, int] = {} # worker_name -> local port + + def start_tunnel(self, worker_name: str, local_port: int | None = None) -> int | None: + """Start SSH tunnel for a single worker. + + Args: + worker_name: Compute instance name. + local_port: Local port to use (auto-assigned if None). + + Returns: + Local port number, or None if tunnel failed. + """ + # Get SSH connection info + ssh_info = self.ml_client.get_compute_ssh_info(worker_name) + if not ssh_info: + logger.error(f"Cannot start tunnel for {worker_name}: no SSH info") + return None + + # Assign local port + if local_port is None: + # Find next available port + used_ports = set(self.local_ports.values()) + local_port = self.VNC_BASE_LOCAL_PORT + while local_port in used_ports: + local_port += 1 + + # Build SSH tunnel command + ssh_cmd = [ + "ssh", + "-N", # Don't execute remote command + "-L", f"{local_port}:localhost:{self.VNC_REMOTE_PORT}", + "-p", str(ssh_info["ssh_port"]), + "-o", "StrictHostKeyChecking=no", + "-o", "UserKnownHostsFile=/dev/null", + "-o", "ServerAliveInterval=60", + f"{ssh_info['ssh_user']}@{ssh_info['ssh_host']}", + ] + + try: + # Start tunnel process + process = subprocess.Popen( + ssh_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + # Give it a moment to establish + time.sleep(1) + + # Check if it's still running + if process.poll() is not None: + stderr = process.stderr.read().decode() if process.stderr else "" + logger.error(f"Tunnel failed for {worker_name}: {stderr}") + return None + + self.tunnels[worker_name] = process + self.local_ports[worker_name] = local_port + logger.info(f"VNC tunnel started: localhost:{local_port} -> {worker_name}:8006") + return local_port + + except Exception as e: + logger.error(f"Failed to start tunnel for {worker_name}: {e}") + return None + + def start_tunnels(self, worker_names: list[str]) -> dict[str, int]: + """Start SSH tunnels for multiple workers. + + Args: + worker_names: List of compute instance names. + + Returns: + Dict mapping worker name to local port. + """ + results = {} + for i, worker_name in enumerate(worker_names): + local_port = self.VNC_BASE_LOCAL_PORT + i + port = self.start_tunnel(worker_name, local_port) + if port: + results[worker_name] = port + return results + + def stop_tunnel(self, worker_name: str) -> None: + """Stop SSH tunnel for a worker. + + Args: + worker_name: Compute instance name. + """ + if worker_name in self.tunnels: + process = self.tunnels[worker_name] + process.terminate() + try: + process.wait(timeout=5) + except subprocess.TimeoutExpired: + process.kill() + del self.tunnels[worker_name] + del self.local_ports[worker_name] + logger.info(f"Tunnel stopped for {worker_name}") + + def stop_all_tunnels(self) -> None: + """Stop all SSH tunnels.""" + for worker_name in list(self.tunnels.keys()): + self.stop_tunnel(worker_name) + + def get_status(self) -> dict[str, dict]: + """Get status of all tunnels. + + Returns: + Dict mapping worker name to status info. + """ + status = {} + for worker_name, process in self.tunnels.items(): + is_running = process.poll() is None + status[worker_name] = { + "local_port": self.local_ports.get(worker_name), + "vnc_url": f"http://localhost:{self.local_ports.get(worker_name)}", + "running": is_running, + "pid": process.pid, + } + return status + + def print_vnc_urls(self) -> None: + """Print VNC URLs for all active tunnels.""" + status = self.get_status() + if not status: + print("No active VNC tunnels") + return + + print("\n=== VNC Access URLs ===") + for worker_name, info in sorted(status.items()): + if info["running"]: + print(f" {worker_name}: {info['vnc_url']}") + else: + print(f" {worker_name}: (tunnel down)") + print() + + class AzureWAAOrchestrator: """Orchestrates WAA evaluation across multiple Azure VMs. @@ -781,6 +1210,7 @@ def __init__( self.waa_repo_path = Path(waa_repo_path) self.experiment_name = experiment_name self.ml_client = AzureMLClient(config) + self.vnc_manager = WorkerVNCManager(self.ml_client) self._current_run: EvaluationRun | None = None self._cleanup_registered = False self._interrupted = False @@ -791,8 +1221,10 @@ def _setup_signal_handlers(self) -> None: return def signal_handler(sig, frame): - logger.warning("\n⚠️ Interrupted! Cleaning up compute instances...") + logger.warning("\n⚠️ Interrupted! Cleaning up...") self._interrupted = True + # Stop VNC tunnels first + self.vnc_manager.stop_all_tunnels() if self._current_run and self._current_run.workers: self._cleanup_workers(self._current_run.workers) sys.exit(1) @@ -858,6 +1290,7 @@ def run_evaluation( cleanup_on_complete: bool = True, cleanup_stale_on_start: bool = True, timeout_hours: float = 4.0, + enable_vnc: bool = False, ) -> list[BenchmarkResult]: """Run evaluation across multiple Azure VMs. @@ -871,6 +1304,8 @@ def run_evaluation( cleanup_stale_on_start: Whether to cleanup stale instances before starting. timeout_hours: Maximum job duration in hours (default: 4). Jobs are auto-canceled after this duration to prevent runaway costs. + enable_vnc: Whether to start VNC tunnels for debugging (default: False). + When enabled, VNC is accessible at localhost:8006, 8007, etc. Returns: List of BenchmarkResult for all tasks. @@ -957,6 +1392,15 @@ def run_evaluation( self._provision_workers(workers) print(f" VM(s) ready") + # Start VNC tunnels if enabled (for debugging) + if enable_vnc: + print(f"[2.5/5] Starting VNC tunnels for debugging...") + vnc_ports = self.start_vnc_tunnels(workers) + if vnc_ports: + print(f" VNC available at: {', '.join(f'localhost:{p}' for p in vnc_ports.values())}") + else: + print(f" Warning: VNC tunnels could not be established") + # Submit jobs to workers print(f"[3/5] Submitting evaluation jobs...") self._submit_worker_jobs(workers, task_batches, agent, max_steps_per_task, timeout_hours) @@ -997,6 +1441,79 @@ def _distribute_tasks( batches[i % num_workers].append(task) return batches + def start_vnc_tunnels(self, workers: list[WorkerState] | None = None) -> dict[str, int]: + """Start VNC tunnels for workers. + + This can be called during evaluation to enable VNC debugging, + or standalone to connect to existing workers. + + Args: + workers: List of workers to connect to. If None, uses current run's workers. + + Returns: + Dict mapping worker name to local VNC port. + """ + if workers is None and self._current_run: + workers = self._current_run.workers + + if not workers: + logger.warning("No workers to connect to") + return {} + + # Get provisioned worker names + worker_names = [ + w.compute_name for w in workers + if w.status in ("provisioned", "running") + ] + + if not worker_names: + logger.warning("No provisioned workers found") + return {} + + print(f"\n[VNC] Starting tunnels for {len(worker_names)} worker(s)...") + ports = self.vnc_manager.start_tunnels(worker_names) + + if ports: + print("\n=== VNC Access URLs ===") + for name, port in sorted(ports.items()): + print(f" {name}: http://localhost:{port}") + print("\nOpen these URLs in a browser to view Windows VMs") + print("Tip: Arrange browser windows side-by-side for multi-worker view\n") + + return ports + + def start_vnc_for_existing_workers(self, prefix: str = "waa") -> dict[str, int]: + """Start VNC tunnels for existing workers (standalone debugging). + + Useful for connecting to workers from a previous run or from another + terminal while evaluation is in progress. + + Args: + prefix: Worker name prefix to filter. + + Returns: + Dict mapping worker name to local VNC port. + """ + instances = self.ml_client.list_compute_instances(prefix=prefix) + running = [i for i in instances if i.get("state") == "Running"] + + if not running: + print(f"No running compute instances found with prefix '{prefix}'") + return {} + + worker_names = [i["name"] for i in running] + print(f"\nFound {len(worker_names)} running worker(s): {', '.join(worker_names)}") + + ports = self.vnc_manager.start_tunnels(worker_names) + + if ports: + print("\n=== VNC Access URLs ===") + for name, port in sorted(ports.items()): + print(f" {name}: http://localhost:{port}") + print() + + return ports + def _provision_workers(self, workers: list[WorkerState]) -> None: """Provision all worker VMs in parallel with cost-optimized sizing.""" with ThreadPoolExecutor(max_workers=len(workers)) as executor: @@ -1043,29 +1560,41 @@ def _submit_worker_jobs( max_steps: Maximum steps per task. timeout_hours: Maximum job duration in hours. """ + # Serialize agent config for remote workers + agent_config = self._serialize_agent_config(agent) + for worker, tasks in zip(workers, task_batches): if worker.status == "failed": continue try: - # Serialize task IDs for this worker - task_ids = [t.task_id for t in tasks] - task_ids_json = json.dumps(task_ids) + # Build command using vanilla WAA run.py + # Uses --worker_id and --num_workers for task distribution + command = self._build_worker_command( + worker_id=worker.worker_id, + num_workers=len(workers), + max_steps=max_steps, + agent_config=agent_config, + ) - # Build command - command = self._build_worker_command(task_ids_json, max_steps, agent) + # Environment variables for WAA runner + # OPENAI_API_KEY is required by vanilla WAA + env_vars = { + "WAA_WORKER_ID": str(worker.worker_id), + "WAA_NUM_WORKERS": str(len(workers)), + "WAA_MAX_STEPS": str(max_steps), + **agent_config.get("env_vars", {}), + } - # Submit job with timeout - self.ml_client.submit_job( + # Submit job with timeout and capture job_name + job_name = self.ml_client.submit_job( compute_name=worker.compute_name, command=command, - environment_variables={ - "WAA_TASK_IDS": task_ids_json, - "WAA_MAX_STEPS": str(max_steps), - }, + environment_variables=env_vars, display_name=f"waa-worker-{worker.worker_id}", timeout_hours=timeout_hours, ) + worker.job_name = job_name worker.status = "running" worker.start_time = time.time() @@ -1074,31 +1603,88 @@ def _submit_worker_jobs( worker.error = str(e) logger.error(f"Failed to submit job for worker {worker.worker_id}: {e}") + def _serialize_agent_config(self, agent: BenchmarkAgent) -> dict[str, Any]: + """Serialize agent configuration for remote execution. + + Extracts API keys and model config that can be passed via environment + variables to remote workers running vanilla WAA. + + Args: + agent: The agent to serialize. + + Returns: + Dict with: + - agent_name: WAA agent name (e.g., "navi") + - model: Model name (e.g., "gpt-4o") + - env_vars: Dict of environment variables to set + """ + config: dict[str, Any] = { + "agent_name": "navi", # Default WAA agent + "model": "gpt-4o", # Default model + "env_vars": {}, + } + + # Check if agent has provider/model info (ApiAgent pattern) + if hasattr(agent, "provider"): + if agent.provider == "openai": + config["model"] = getattr(agent, "model", "gpt-4o") + # Get API key from agent or environment + api_key = getattr(agent, "api_key", None) or os.getenv("OPENAI_API_KEY") + if api_key: + config["env_vars"]["OPENAI_API_KEY"] = api_key + elif agent.provider == "anthropic": + # WAA's navi agent supports Azure OpenAI, but we can use direct OpenAI + # For Claude, we'd need custom agent code on the worker + config["model"] = getattr(agent, "model", "claude-sonnet-4-5-20250929") + api_key = getattr(agent, "api_key", None) or os.getenv("ANTHROPIC_API_KEY") + if api_key: + config["env_vars"]["ANTHROPIC_API_KEY"] = api_key + + # Check for OpenAI API key in environment as fallback + if "OPENAI_API_KEY" not in config["env_vars"]: + openai_key = os.getenv("OPENAI_API_KEY") + if openai_key: + config["env_vars"]["OPENAI_API_KEY"] = openai_key + + return config + def _build_worker_command( self, - task_ids_json: str, + worker_id: int, + num_workers: int, max_steps: int, - agent: BenchmarkAgent, + agent_config: dict[str, Any], ) -> str: """Build the command to run on a worker VM. + Uses vanilla WAA's run.py with --worker_id and --num_workers for + built-in task distribution. This matches Microsoft's official + Azure deployment pattern. + Args: - task_ids_json: JSON string of task IDs for this worker. + worker_id: This worker's ID (0-indexed). + num_workers: Total number of workers. max_steps: Maximum steps per task. - agent: Agent to run (TODO: serialize agent config for remote execution). + agent_config: Serialized agent configuration. + + Returns: + Shell command string to execute in the WAA container. """ - # TODO: Serialize agent config and pass to remote worker - # For now, workers use a default agent configuration - _ = agent # Reserved for agent serialization + agent_name = agent_config.get("agent_name", "navi") + model = agent_config.get("model", "gpt-4o") + # WAA Docker image has client at /client (see Dockerfile-WinArena) - # The run.py script is at /client/run.py (not a module, so use python run.py) + # The run.py script uses --worker_id and --num_workers for task distribution + # Results are written to --result_dir return f""" - cd /client && \ - python run.py \ - --task_ids '{task_ids_json}' \ - --max_steps {max_steps} \ - --output_dir /outputs - """ +cd /client && python run.py \\ + --agent_name {agent_name} \\ + --model {model} \\ + --worker_id {worker_id} \\ + --num_workers {num_workers} \\ + --max_steps {max_steps} \\ + --result_dir /outputs +""" @retry( stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=60), @@ -1177,7 +1763,11 @@ def _wait_and_collect_results( workers: list[WorkerState], on_worker_complete: Callable[[WorkerState], None] | None, ) -> list[BenchmarkResult]: - """Wait for all workers and collect results.""" + """Wait for all workers and collect results. + + Polls Azure ML job status (not compute status) to determine completion. + Jobs can complete with status: Completed, Failed, Canceled. + """ all_results: list[BenchmarkResult] = [] # Poll workers for completion @@ -1186,14 +1776,22 @@ def _wait_and_collect_results( while pending_workers: for worker in pending_workers[:]: try: - status = self.ml_client.get_compute_status(worker.compute_name) + # Check job status (not compute status) + if not worker.job_name: + logger.warning(f"Worker {worker.worker_id} has no job_name") + worker.status = "failed" + worker.error = "No job submitted" + pending_workers.remove(worker) + continue + + job = self.ml_client.client.jobs.get(worker.job_name) + job_status = job.status - # Check if job completed (simplified - real impl would check job status) - if status in ["Stopped", "Deallocated"]: + if job_status in ["Completed", "Finished"]: worker.status = "completed" worker.end_time = time.time() - # Fetch results from blob storage + # Fetch results from job outputs results = self._fetch_worker_results(worker) worker.results = results all_results.extend(results) @@ -1207,6 +1805,24 @@ def _wait_and_collect_results( f"{len(results)} results" ) + elif job_status in ["Failed", "Canceled"]: + worker.status = "failed" + worker.error = f"Job {job_status}" + worker.end_time = time.time() + + # Still try to fetch any partial results + results = self._fetch_worker_results(worker) + worker.results = results + all_results.extend(results) + + pending_workers.remove(worker) + logger.warning( + f"Worker {worker.worker_id} failed ({job_status}): " + f"{len(results)} partial results" + ) + + # else: still running, continue polling + except Exception as e: logger.warning(f"Error checking worker {worker.worker_id}: {e}") @@ -1216,23 +1832,145 @@ def _wait_and_collect_results( return all_results def _fetch_worker_results(self, worker: WorkerState) -> list[BenchmarkResult]: - """Fetch results from a worker's output storage.""" - # In a real implementation, this would download results from blob storage - # For now, return placeholder results + """Fetch results from a worker's output storage. + + Downloads job outputs from Azure ML and parses WAA result files. + WAA writes results in the format: {result_dir}/{domain}/{task_id}/result.txt + + Args: + worker: WorkerState with job_name set. + + Returns: + List of BenchmarkResult for each task. + """ results = [] - for task_id in worker.assigned_tasks: - results.append( - BenchmarkResult( - task_id=task_id, - success=False, # Placeholder - score=0.0, - num_steps=0, + + if not worker.job_name: + logger.warning(f"Worker {worker.worker_id} has no job_name, returning empty results") + for task_id in worker.assigned_tasks: + results.append( + BenchmarkResult( + task_id=task_id, + success=False, + score=0.0, + num_steps=0, + error_message="No job_name available", + ) ) - ) + return results + + try: + # Download job outputs to a temp directory + import tempfile + with tempfile.TemporaryDirectory() as temp_dir: + output_path = Path(temp_dir) + + # Download all outputs from the job + self.ml_client.client.jobs.download( + name=worker.job_name, + download_path=output_path, + output_name="default", # Default output location + ) + + # Parse WAA result files + # WAA writes: {result_dir}/{action_space}/{observation_type}/{model}/{trial_id}/{domain}/{task_id}/result.txt + # But we simplified to: /outputs/{domain}/{task_id}/result.txt + outputs_dir = output_path / "outputs" + if not outputs_dir.exists(): + # Try alternative path structure + outputs_dir = output_path + + results = self._parse_waa_results(outputs_dir, worker.assigned_tasks) + + except Exception as e: + logger.error(f"Failed to fetch results for worker {worker.worker_id}: {e}") + # Return failed results for all tasks + for task_id in worker.assigned_tasks: + results.append( + BenchmarkResult( + task_id=task_id, + success=False, + score=0.0, + num_steps=0, + error_message=f"Failed to fetch results: {e}", + ) + ) + + return results + + def _parse_waa_results( + self, + outputs_dir: Path, + task_ids: list[str], + ) -> list[BenchmarkResult]: + """Parse WAA result files from downloaded outputs. + + WAA writes result.txt files containing a single float (0.0 or 1.0). + + Args: + outputs_dir: Directory containing WAA outputs. + task_ids: List of expected task IDs. + + Returns: + List of BenchmarkResult for each task. + """ + results = [] + + for task_id in task_ids: + # WAA task_id format: UUID-WOS/wos (e.g., notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS) + # Result path: {domain}/{task_id}/result.txt + parts = task_id.rsplit("_", 1) + if len(parts) == 2: + domain = parts[0] + else: + domain = task_id + + result_file = outputs_dir / domain / task_id / "result.txt" + + if result_file.exists(): + try: + score = float(result_file.read_text().strip()) + results.append( + BenchmarkResult( + task_id=task_id, + success=score >= 1.0, + score=score, + num_steps=0, # WAA doesn't expose step count in result.txt + ) + ) + except (ValueError, OSError) as e: + logger.warning(f"Failed to parse result for {task_id}: {e}") + results.append( + BenchmarkResult( + task_id=task_id, + success=False, + score=0.0, + num_steps=0, + error_message=f"Failed to parse result: {e}", + ) + ) + else: + # Result file not found - task may have failed + logger.warning(f"Result file not found for {task_id}: {result_file}") + results.append( + BenchmarkResult( + task_id=task_id, + success=False, + score=0.0, + num_steps=0, + error_message="Result file not found", + ) + ) + return results def _cleanup_workers(self, workers: list[WorkerState]) -> None: - """Delete all worker VMs.""" + """Delete all worker VMs and stop VNC tunnels.""" + # Stop VNC tunnels first + if self.vnc_manager.tunnels: + logger.info("Stopping VNC tunnels...") + self.vnc_manager.stop_all_tunnels() + logger.info("Cleaning up worker VMs...") with ThreadPoolExecutor(max_workers=len(workers)) as executor: futures = [ diff --git a/openadapt_evals/benchmarks/azure_files/compute-instance-startup.sh b/openadapt_evals/benchmarks/azure_files/compute-instance-startup.sh new file mode 100644 index 0000000..fc77d1e --- /dev/null +++ b/openadapt_evals/benchmarks/azure_files/compute-instance-startup.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# Startup script for Azure ML Compute Instance +# Prepares the VM for running WAA Docker container with QEMU + +echo "Initializing Compute VM for WAA..." + +# Install dos2unix (for Windows line endings) +sudo apt-get update && sudo apt-get install -y dos2unix + +# Stop services that conflict with WAA networking +# DNS on port 53 (needed by QEMU's built-in DNS) +sudo systemctl stop systemd-resolved 2>/dev/null || true + +# Named DNS service +sudo systemctl stop named.service 2>/dev/null || true + +# Nginx on port 80 (sometimes runs on Azure ML instances) +sudo service nginx stop 2>/dev/null || true + +echo "Compute VM startup complete" diff --git a/openadapt_evals/benchmarks/azure_files/run_entry.py b/openadapt_evals/benchmarks/azure_files/run_entry.py new file mode 100644 index 0000000..a891efe --- /dev/null +++ b/openadapt_evals/benchmarks/azure_files/run_entry.py @@ -0,0 +1,120 @@ +"""Entry script for WAA evaluation on Azure ML. + +This script runs INSIDE the WAA Docker container and: +1. Sets up storage paths +2. Starts the Windows VM via /entry_setup.sh +3. Runs the WAA client with the specified configuration + +Based on Microsoft's WindowsAgentArena/scripts/azure_files/run_entry.py +""" + +import os +import subprocess +import sys +import time + + +def main(): + """Main entry point.""" + print("=" * 60) + print("WAA Azure Entry Script Starting") + print("=" * 60) + + # Parse arguments + if len(sys.argv) < 8: + print(f"Usage: {sys.argv[0]} " + " ") + print(f"Got: {sys.argv}") + sys.exit(1) + + output_path = sys.argv[1] + exp_name = sys.argv[2] + num_workers = sys.argv[3] + worker_id = sys.argv[4] + agent = sys.argv[5] + model = sys.argv[6] + max_steps = sys.argv[7] + + print(f"Configuration:") + print(f" Output path: {output_path}") + print(f" Experiment: {exp_name}") + print(f" Workers: {num_workers} (this is worker {worker_id})") + print(f" Agent: {agent}") + print(f" Model: {model}") + print(f" Max steps: {max_steps}") + + # Create result directory + result_dir = os.path.join(output_path, exp_name, f"worker_{worker_id}") + os.makedirs(result_dir, exist_ok=True) + print(f"Result directory: {result_dir}") + + # Write start marker + with open(os.path.join(result_dir, "started.txt"), "w") as f: + f.write(f"Started at {time.ctime()}\n") + + # Check NET_ADMIN capability (required for QEMU networking) + print("\nChecking NET_ADMIN capability...") + try: + subprocess.check_call( + ["ip", "link", "add", "dummy0", "type", "dummy"], + stderr=subprocess.DEVNULL + ) + subprocess.check_call( + ["ip", "link", "del", "dummy0"], + stderr=subprocess.DEVNULL + ) + print(" NET_ADMIN: ENABLED") + except subprocess.CalledProcessError: + print(" NET_ADMIN: DISABLED (WAA may not work correctly)") + + # Start Windows VM via entry_setup.sh + # This script is part of the WAA Docker image and: + # - Starts QEMU with Windows 11 + # - Waits for Windows to boot + # - Starts the WAA server + print("\n" + "=" * 60) + print("Starting Windows VM (this takes 5-15 minutes)...") + print("=" * 60) + + entry_script = "/entry_setup.sh" + if os.path.exists(entry_script): + result = os.system(entry_script) + if result != 0: + print(f"WARNING: entry_setup.sh returned {result}") + else: + print(f"WARNING: {entry_script} not found!") + print("Available files in /:") + os.system("ls -la /") + + # Run the WAA client + print("\n" + "=" * 60) + print("Starting WAA client...") + print("=" * 60) + + client_cmd = ( + f"cd /client && python run.py " + f"--agent_name {agent} " + f"--model {model} " + f"--worker_id {worker_id} " + f"--num_workers {num_workers} " + f"--max_steps {max_steps} " + f"--result_dir {result_dir}" + ) + print(f"Command: {client_cmd}") + + result = os.system(client_cmd) + + # Write completion marker + with open(os.path.join(result_dir, "completed.txt"), "w") as f: + f.write(f"Completed at {time.ctime()}\n") + f.write(f"Exit code: {result}\n") + + print("\n" + "=" * 60) + print(f"WAA client finished with exit code {result}") + print("=" * 60) + + sys.exit(result) + + +if __name__ == "__main__": + main() diff --git a/openadapt_evals/benchmarks/cli.py b/openadapt_evals/benchmarks/cli.py index 9d15d7f..65a8422 100644 --- a/openadapt_evals/benchmarks/cli.py +++ b/openadapt_evals/benchmarks/cli.py @@ -276,8 +276,8 @@ def cmd_run(args: argparse.Namespace) -> int: task_ids = [t.strip() for t in args.tasks.split(",")] else: print("ERROR: Specify --task or --tasks") - print("Example: --task notepad_1") - print("Example: --tasks notepad_1,notepad_2,browser_1") + print("Example: --task notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS") + print("Example: --tasks notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS,chrome_2ae9ba84-3a0d-4d4c-8338-3a1478dc5fe3-wos") return 1 # Create agent @@ -468,7 +468,7 @@ def cmd_live(args: argparse.Namespace) -> int: else: # For live evaluation, we need explicit task IDs print("ERROR: --task-ids required for live evaluation") - print("Example: --task-ids notepad_1,notepad_2,browser_1") + print("Example: --task-ids notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS,chrome_2ae9ba84-3a0d-4d4c-8338-3a1478dc5fe3-wos") return 1 # Run evaluation @@ -1494,10 +1494,10 @@ def patch_evaluate_endpoint() -> bool: print(" WARNING: /evaluate endpoint health check failed") print("\nRun a no-API-key smoke test with:") print( - f" uv run python -m openadapt_evals.benchmarks.cli live --server {server_url} --agent noop --task-ids notepad_1" + f" uv run python -m openadapt_evals.benchmarks.cli live --server {server_url} --agent noop --task-ids notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS" ) print("\nOr fully automated (starts + tests + deallocates):") - print(" uv run python -m openadapt_evals.benchmarks.cli smoke-live --task-id notepad_1") + print(" uv run python -m openadapt_evals.benchmarks.cli smoke-live --task-id notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS") return 0 except Exception: pass @@ -1711,6 +1711,174 @@ def cmd_azure_monitor(args: argparse.Namespace) -> int: return 1 +def cmd_azure_setup(args: argparse.Namespace) -> int: + """Set up Azure resources for WAA benchmark evaluation. + + This is a simplified setup that: + 1. Checks Azure CLI is installed and user is logged in + 2. Creates resource group if needed + 3. Creates ML workspace if needed + 4. Writes config to .env file + """ + import shutil + + def run_az(cmd: list[str], check: bool = True) -> tuple[int, str, str]: + """Run az command and return (returncode, stdout, stderr).""" + result = subprocess.run(cmd, capture_output=True, text=True) + return result.returncode, result.stdout.strip(), result.stderr.strip() + + print("\n" + "=" * 50) + print("Azure Setup for WAA Benchmark") + print("=" * 50) + + # Step 1: Check Azure CLI + print("\n[1/5] Checking Azure CLI...") + if not shutil.which("az"): + print(" ERROR: Azure CLI not found!") + print("\n Install Azure CLI:") + print(" macOS: brew install azure-cli") + print(" Windows: winget install Microsoft.AzureCLI") + print(" Linux: curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash") + return 1 + print(" Azure CLI is installed") + + # Step 2: Check login / login if needed + print("\n[2/5] Checking Azure login...") + rc, out, err = run_az(["az", "account", "show", "-o", "json"]) + if rc != 0: + print(" Not logged in. Running 'az login'...") + rc, out, err = run_az(["az", "login"], check=False) + if rc != 0: + print(f" ERROR: Login failed: {err}") + return 1 + rc, out, err = run_az(["az", "account", "show", "-o", "json"]) + + account = json.loads(out) + subscription_id = account.get("id") + user_name = account.get("user", {}).get("name", "unknown") + print(f" Logged in as: {user_name}") + print(f" Subscription: {account.get('name')} ({subscription_id[:8]}...)") + + resource_group = args.resource_group + workspace_name = args.workspace + location = args.location + + # Step 3: Create resource group + print(f"\n[3/5] Creating resource group '{resource_group}'...") + rc, out, err = run_az(["az", "group", "show", "--name", resource_group]) + if rc == 0: + print(f" Resource group '{resource_group}' already exists") + else: + rc, out, err = run_az([ + "az", "group", "create", + "--name", resource_group, + "--location", location + ]) + if rc != 0: + print(f" ERROR: Failed to create resource group: {err}") + return 1 + print(f" Resource group '{resource_group}' created") + + # Step 4: Install ML extension if needed and create workspace + print(f"\n[4/5] Creating ML workspace '{workspace_name}'...") + + # Check if ml extension is installed + rc, out, err = run_az(["az", "extension", "list", "-o", "json"]) + if rc == 0: + extensions = json.loads(out) if out else [] + has_ml = any(ext.get("name") == "ml" for ext in extensions) + if not has_ml: + print(" Installing Azure ML CLI extension...") + run_az(["az", "extension", "add", "--name", "ml", "--yes"]) + + # Check if workspace exists + rc, out, err = run_az([ + "az", "ml", "workspace", "show", + "--name", workspace_name, + "--resource-group", resource_group + ]) + if rc == 0: + print(f" ML workspace '{workspace_name}' already exists") + else: + print(f" Creating ML workspace (this takes 1-2 minutes)...") + rc, out, err = run_az([ + "az", "ml", "workspace", "create", + "--name", workspace_name, + "--resource-group", resource_group, + "--location", location + ]) + if rc != 0: + print(f" ERROR: Failed to create workspace: {err}") + return 1 + print(f" ML workspace '{workspace_name}' created") + + # Step 5: Write to .env file + print(f"\n[5/5] Writing config to .env...") + env_path = Path(args.env_file) + + # Read existing content + existing_content = "" + if env_path.exists(): + existing_content = env_path.read_text() + + # Check if Azure section exists + azure_vars = { + "AZURE_SUBSCRIPTION_ID": subscription_id, + "AZURE_ML_RESOURCE_GROUP": resource_group, + "AZURE_ML_WORKSPACE_NAME": workspace_name, + } + + if "AZURE_SUBSCRIPTION_ID=" in existing_content: + print(" Updating existing Azure config in .env") + lines = existing_content.split("\n") + new_lines = [] + for line in lines: + updated = False + for var, value in azure_vars.items(): + if line.startswith(f"{var}="): + new_lines.append(f"{var}={value}") + updated = True + break + if not updated: + new_lines.append(line) + env_path.write_text("\n".join(new_lines)) + else: + # Append Azure section + azure_section = f""" +# ============================================================================= +# Azure Configuration (auto-generated by azure-setup) +# ============================================================================= +AZURE_SUBSCRIPTION_ID={subscription_id} +AZURE_ML_RESOURCE_GROUP={resource_group} +AZURE_ML_WORKSPACE_NAME={workspace_name} +""" + with open(env_path, "a") as f: + f.write(azure_section) + + print(f" Config written to {env_path}") + + # Validation + print("\n" + "=" * 50) + print("Setup Complete!") + print("=" * 50) + print(f""" +Resources: + - Subscription: {subscription_id[:8]}... + - Resource Group: {resource_group} + - ML Workspace: {workspace_name} + - Location: {location} + +Next steps: + 1. Run evaluation: + uv run python -m openadapt_evals.benchmarks.cli azure --workers 2 --task-ids notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS,notepad_a7d4b6c5-569b-452e-9e1d-ffdb3d431d15-WOS + + 2. Estimate costs: + uv run python -m openadapt_evals.benchmarks.cli estimate --workers 2 +""") + + return 0 + + def cmd_azure(args: argparse.Namespace) -> int: """Run Azure-based parallel evaluation.""" from openadapt_evals.benchmarks.azure import AzureConfig, AzureWAAOrchestrator @@ -1722,7 +1890,9 @@ def cmd_azure(args: argparse.Namespace) -> int: config = AzureConfig.from_env() except ValueError as e: print(f"ERROR: {e}") - print("\nSet these environment variables:") + print("\nAzure is not configured. Run setup first:") + print(" uv run python -m openadapt_evals.benchmarks.cli azure-setup") + print("\nOr set these environment variables manually:") print(" AZURE_SUBSCRIPTION_ID") print(" AZURE_ML_RESOURCE_GROUP") print(" AZURE_ML_WORKSPACE_NAME") @@ -1788,6 +1958,7 @@ def cmd_azure(args: argparse.Namespace) -> int: cleanup_on_complete=not args.no_cleanup, cleanup_stale_on_start=not args.skip_cleanup_stale, timeout_hours=args.timeout_hours, + enable_vnc=getattr(args, "enable_vnc", False), ) # Report results @@ -1809,6 +1980,682 @@ def cmd_azure(args: argparse.Namespace) -> int: return 1 +def cmd_azure_vnc(args: argparse.Namespace) -> int: + """Start VNC tunnels to Azure ML workers for debugging. + + This command connects to running Azure ML compute instances and creates + SSH tunnels for VNC access. Use this to debug evaluations in progress. + + Example: + # Start tunnels to all running workers + uv run python -m openadapt_evals.benchmarks.cli azure-vnc + + # Keep tunnels open until Ctrl+C + uv run python -m openadapt_evals.benchmarks.cli azure-vnc --keep-alive + + # Filter by prefix + uv run python -m openadapt_evals.benchmarks.cli azure-vnc --prefix waa4704 + """ + from openadapt_evals.benchmarks.azure import AzureConfig, AzureWAAOrchestrator + from pathlib import Path + import time + + print("Connecting to Azure ML workers for VNC access...") + + try: + config = AzureConfig.from_env() + except ValueError as e: + print(f"ERROR: {e}") + print("\nAzure is not configured. Set these environment variables:") + print(" AZURE_SUBSCRIPTION_ID") + print(" AZURE_ML_RESOURCE_GROUP") + print(" AZURE_ML_WORKSPACE_NAME") + return 1 + + # Create orchestrator (with dummy WAA path since we're not running evaluation) + orchestrator = AzureWAAOrchestrator( + config=config, + waa_repo_path=Path("/tmp/dummy"), + ) + + # Start tunnels to existing workers + prefix = getattr(args, "prefix", "waa") + ports = orchestrator.start_vnc_for_existing_workers(prefix=prefix) + + if not ports: + print("\nNo VNC tunnels established. Check that:") + print(" 1. Workers are running (use 'azure' command first)") + print(" 2. SSH is enabled on compute instances") + print(" 3. Your SSH key is configured") + return 1 + + if getattr(args, "keep_alive", False): + print("\nTunnels active. Press Ctrl+C to stop...") + try: + while True: + time.sleep(1) + except KeyboardInterrupt: + print("\nStopping tunnels...") + orchestrator.vnc_manager.stop_all_tunnels() + else: + print("\nTunnels established. They will remain open as background processes.") + print("To stop tunnels, use: pkill -f 'ssh -N -L'") + + return 0 + + +def cmd_waa_image(args: argparse.Namespace) -> int: + """Build and push custom WAA Docker image. + + The custom image includes: + - Modern dockurr/windows base (auto-downloads Windows 11) + - FirstLogonCommands patches for unattended installation + - Python 3.9 with transformers 4.46.2 (compatible with navi agent) + - api_agent.py for Claude/GPT support + """ + import subprocess + from pathlib import Path + + # Find waa_deploy directory + waa_deploy_dir = Path(__file__).parent.parent / "waa_deploy" + if not waa_deploy_dir.exists(): + print(f"ERROR: waa_deploy directory not found at {waa_deploy_dir}") + return 1 + + dockerfile = waa_deploy_dir / "Dockerfile" + if not dockerfile.exists(): + print(f"ERROR: Dockerfile not found at {dockerfile}") + return 1 + + action = args.action + tag = args.tag + local_tag = f"waa-auto:{tag}" + + if action == "build": + print(f"Building WAA image: {local_tag}") + print(f"Using Dockerfile: {dockerfile}") + print("Platform: linux/amd64 (required for windowsarena base image)") + try: + result = subprocess.run( + ["docker", "build", "--platform", "linux/amd64", + "-t", local_tag, str(waa_deploy_dir)], + check=True, + ) + print(f"\nSuccessfully built: {local_tag}") + return 0 + except subprocess.CalledProcessError as e: + print(f"ERROR: Docker build failed: {e}") + return 1 + except FileNotFoundError: + print("ERROR: Docker not found. Please install Docker first.") + return 1 + + elif action == "push": + registry = args.registry + + if registry == "ecr": + import json + import os + + # ECR Public repository name + repo_name = "waa-auto" + region = "us-east-1" # ECR Public is only in us-east-1 + + print(f"Pushing to AWS ECR Public...") + + try: + # Check if repository exists, create if not + print("Checking ECR Public repository...") + result = subprocess.run( + ["aws", "ecr-public", "describe-repositories", + "--repository-names", repo_name, + "--region", region], + capture_output=True, + text=True, + ) + + if result.returncode != 0: + print(f"Creating ECR Public repository: {repo_name}") + create_result = subprocess.run( + ["aws", "ecr-public", "create-repository", + "--repository-name", repo_name, + "--region", region, + "--catalog-data", "description=Custom WAA Docker image with unattended Windows installation"], + capture_output=True, + text=True, + check=True, + ) + repo_data = json.loads(create_result.stdout) + repo_uri = repo_data["repository"]["repositoryUri"] + else: + repo_data = json.loads(result.stdout) + repo_uri = repo_data["repositories"][0]["repositoryUri"] + + print(f"Repository URI: {repo_uri}") + full_tag = f"{repo_uri}:{tag}" + + # Login to ECR Public + print("Logging in to ECR Public...") + login_result = subprocess.run( + ["aws", "ecr-public", "get-login-password", "--region", region], + capture_output=True, + text=True, + check=True, + ) + subprocess.run( + ["docker", "login", "--username", "AWS", "--password-stdin", "public.ecr.aws"], + input=login_result.stdout, + check=True, + ) + + # Tag for registry + print(f"Tagging image: {full_tag}") + subprocess.run( + ["docker", "tag", local_tag, full_tag], + check=True, + ) + + # Push + print(f"Pushing image: {full_tag}") + subprocess.run( + ["docker", "push", full_tag], + check=True, + ) + + print(f"\n✓ Successfully pushed: {full_tag}") + print(f"\nTo use this image, set:") + print(f" export AZURE_DOCKER_IMAGE={full_tag}") + return 0 + + except subprocess.CalledProcessError as e: + print(f"ERROR: ECR push failed: {e}") + if e.stderr: + print(f"Details: {e.stderr}") + print("\nMake sure AWS CLI is configured: aws configure") + return 1 + except json.JSONDecodeError as e: + print(f"ERROR: Failed to parse AWS response: {e}") + return 1 + + elif registry == "dockerhub": + repo = args.repo or "openadaptai/waa-auto" + full_tag = f"{repo}:{tag}" + print(f"Pushing to Docker Hub: {full_tag}") + + try: + # Tag for registry + subprocess.run( + ["docker", "tag", local_tag, full_tag], + check=True, + ) + # Push + subprocess.run( + ["docker", "push", full_tag], + check=True, + ) + print(f"\nSuccessfully pushed: {full_tag}") + return 0 + except subprocess.CalledProcessError as e: + print(f"ERROR: Docker push failed: {e}") + print("\nMake sure you're logged in: docker login") + return 1 + + elif registry == "acr": + import os + acr_name = os.getenv("AZURE_ACR_NAME", "openadaptacr") + full_tag = f"{acr_name}.azurecr.io/waa-auto:{tag}" + print(f"Pushing to Azure Container Registry: {full_tag}") + + try: + # Login to ACR + subprocess.run( + ["az", "acr", "login", "--name", acr_name], + check=True, + ) + # Tag for registry + subprocess.run( + ["docker", "tag", local_tag, full_tag], + check=True, + ) + # Push + subprocess.run( + ["docker", "push", full_tag], + check=True, + ) + print(f"\nSuccessfully pushed: {full_tag}") + return 0 + except subprocess.CalledProcessError as e: + print(f"ERROR: ACR push failed: {e}") + return 1 + else: + print(f"ERROR: Unknown registry: {registry}") + return 1 + + elif action == "build-push": + # Build first + print(f"Building WAA image: {local_tag}") + print("Platform: linux/amd64 (required for windowsarena base image)") + try: + subprocess.run( + ["docker", "build", "--platform", "linux/amd64", + "-t", local_tag, str(waa_deploy_dir)], + check=True, + ) + print(f"Successfully built: {local_tag}") + except subprocess.CalledProcessError as e: + print(f"ERROR: Docker build failed: {e}") + return 1 + + # Then push + args.action = "push" # Set action to push for the push logic + return cmd_waa_image(args) + + elif action == "check": + registry = args.registry + + if registry == "ecr": + import json + + repo_name = "waa-auto" + region = "us-east-1" + + print(f"Checking AWS ECR Public for waa-auto:{tag}...") + + try: + # Check if repository exists + result = subprocess.run( + ["aws", "ecr-public", "describe-repositories", + "--repository-names", repo_name, + "--region", region], + capture_output=True, + text=True, + ) + + if result.returncode != 0: + print(f"Repository NOT found: {repo_name}") + return 1 + + repo_data = json.loads(result.stdout) + repo_uri = repo_data["repositories"][0]["repositoryUri"] + + # Check if tag exists + tags_result = subprocess.run( + ["aws", "ecr-public", "describe-image-tags", + "--repository-name", repo_name, + "--region", region], + capture_output=True, + text=True, + ) + + if tags_result.returncode == 0: + tags_data = json.loads(tags_result.stdout) + existing_tags = [t.get("imageTag") for t in tags_data.get("imageTagDetails", [])] + if tag in existing_tags: + print(f"✓ Image exists: {repo_uri}:{tag}") + return 0 + + print(f"Image NOT found: {repo_uri}:{tag}") + return 1 + + except subprocess.CalledProcessError as e: + print(f"ERROR: Failed to check ECR: {e}") + return 1 + except json.JSONDecodeError as e: + print(f"ERROR: Failed to parse AWS response: {e}") + return 1 + + elif registry == "dockerhub": + repo = args.repo or "openadaptai/waa-auto" + print(f"Checking Docker Hub for {repo}:{tag}...") + + # Check if image exists using Docker Hub API + import urllib.request + import urllib.error + url = f"https://hub.docker.com/v2/repositories/{repo}/tags/{tag}" + try: + urllib.request.urlopen(url, timeout=10) + print(f"Image exists: {repo}:{tag}") + return 0 + except urllib.error.HTTPError as e: + if e.code == 404: + print(f"Image NOT found: {repo}:{tag}") + return 1 + print(f"ERROR: Failed to check: {e}") + return 1 + + elif registry == "acr": + import os + acr_name = os.getenv("AZURE_ACR_NAME", "openadaptacr") + print(f"Checking ACR for {acr_name}.azurecr.io/waa-auto:{tag}...") + + try: + result = subprocess.run( + ["az", "acr", "repository", "show-tags", + "--name", acr_name, + "--repository", "waa-auto", + "--output", "tsv"], + capture_output=True, + text=True, + ) + if tag in result.stdout.split("\n"): + print(f"Image exists: {acr_name}.azurecr.io/waa-auto:{tag}") + return 0 + else: + print(f"Image NOT found: {acr_name}.azurecr.io/waa-auto:{tag}") + return 1 + except subprocess.CalledProcessError as e: + print(f"ERROR: Failed to check ACR: {e}") + return 1 + else: + print(f"ERROR: Unknown registry: {registry}") + return 1 + + elif action == "delete": + registry = args.registry + + if registry == "ecr": + repo_name = "waa-auto" + region = "us-east-1" + + print(f"Deleting AWS ECR Public repository: {repo_name}...") + + try: + # Check if repository exists first + result = subprocess.run( + ["aws", "ecr-public", "describe-repositories", + "--repository-names", repo_name, + "--region", region], + capture_output=True, + text=True, + ) + + if result.returncode != 0: + print(f"Repository not found: {repo_name} (already deleted or never created)") + return 0 + + # Delete the repository + delete_result = subprocess.run( + ["aws", "ecr-public", "delete-repository", + "--repository-name", repo_name, + "--region", region, + "--force"], # --force deletes even with images + capture_output=True, + text=True, + check=True, + ) + + print(f"✓ Deleted ECR Public repository: {repo_name}") + return 0 + + except subprocess.CalledProcessError as e: + print(f"ERROR: Failed to delete ECR repository: {e}") + if e.stderr: + print(f"Details: {e.stderr}") + return 1 + + elif registry == "dockerhub": + repo = args.repo or "openadaptai/waa-auto" + print(f"NOTE: Docker Hub repository deletion must be done via web interface") + print(f" 1. Go to: https://hub.docker.com/repository/docker/{repo}/settings") + print(f" 2. Click 'Delete Repository'") + print(f"\nDocker Hub free tier doesn't charge for storage, so deletion is optional.") + return 0 + + elif registry == "acr": + import os + acr_name = os.getenv("AZURE_ACR_NAME", "openadaptacr") + print(f"Deleting ACR repository: {acr_name}.azurecr.io/waa-auto...") + + try: + subprocess.run( + ["az", "acr", "repository", "delete", + "--name", acr_name, + "--repository", "waa-auto", + "--yes"], # Skip confirmation + check=True, + ) + print(f"✓ Deleted ACR repository: waa-auto") + return 0 + except subprocess.CalledProcessError as e: + print(f"ERROR: Failed to delete ACR repository: {e}") + return 1 + + else: + print(f"ERROR: Unknown registry: {registry}") + return 1 + + else: + print(f"ERROR: Unknown action: {action}") + return 1 + + +def cmd_aws_costs(args: argparse.Namespace) -> int: + """Show AWS costs using Cost Explorer API. + + Displays current month's costs (total and by service), historical costs, + and ECR storage costs specifically. + """ + from datetime import datetime, timedelta + + months = getattr(args, "months", 3) + output_json = getattr(args, "json", False) + + # Calculate date ranges + today = datetime.now() + # Current month: 1st of month to today + current_month_start = today.replace(day=1).strftime("%Y-%m-%d") + current_month_end = today.strftime("%Y-%m-%d") + + # Historical: go back N months + historical_start = (today.replace(day=1) - timedelta(days=months * 31)).replace(day=1) + historical_start_str = historical_start.strftime("%Y-%m-%d") + + results = { + "current_month": {}, + "current_month_by_service": [], + "historical_monthly": [], + "ecr_costs": {}, + } + + # Check AWS CLI availability + try: + result = subprocess.run( + ["aws", "--version"], + capture_output=True, + text=True, + timeout=10, + ) + if result.returncode != 0: + print("ERROR: AWS CLI not available") + return 1 + except (FileNotFoundError, subprocess.TimeoutExpired): + print("ERROR: AWS CLI not installed or not in PATH") + print("Install: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html") + return 1 + + # 1. Get current month total costs + print("Fetching current month costs...") + try: + result = subprocess.run( + [ + "aws", "ce", "get-cost-and-usage", + "--time-period", f"Start={current_month_start},End={current_month_end}", + "--granularity", "MONTHLY", + "--metrics", "UnblendedCost", + "--output", "json", + ], + capture_output=True, + text=True, + timeout=60, + ) + if result.returncode == 0: + data = json.loads(result.stdout) + for period in data.get("ResultsByTime", []): + total = period.get("Total", {}).get("UnblendedCost", {}) + results["current_month"] = { + "amount": float(total.get("Amount", 0)), + "unit": total.get("Unit", "USD"), + "start": period.get("TimePeriod", {}).get("Start"), + "end": period.get("TimePeriod", {}).get("End"), + } + else: + print(f"WARNING: Failed to get current month costs: {result.stderr}") + except (subprocess.TimeoutExpired, json.JSONDecodeError) as e: + print(f"WARNING: Error fetching current month costs: {e}") + + # 2. Get current month costs by service + print("Fetching costs by service...") + try: + result = subprocess.run( + [ + "aws", "ce", "get-cost-and-usage", + "--time-period", f"Start={current_month_start},End={current_month_end}", + "--granularity", "MONTHLY", + "--metrics", "UnblendedCost", + "--group-by", "Type=DIMENSION,Key=SERVICE", + "--output", "json", + ], + capture_output=True, + text=True, + timeout=60, + ) + if result.returncode == 0: + data = json.loads(result.stdout) + services = [] + for period in data.get("ResultsByTime", []): + for group in period.get("Groups", []): + service_name = group.get("Keys", ["Unknown"])[0] + cost_data = group.get("Metrics", {}).get("UnblendedCost", {}) + amount = float(cost_data.get("Amount", 0)) + if amount > 0.001: # Only include services with non-trivial costs + services.append({ + "service": service_name, + "amount": amount, + "unit": cost_data.get("Unit", "USD"), + }) + # Sort by cost descending + services.sort(key=lambda x: x["amount"], reverse=True) + results["current_month_by_service"] = services + else: + print(f"WARNING: Failed to get costs by service: {result.stderr}") + except (subprocess.TimeoutExpired, json.JSONDecodeError) as e: + print(f"WARNING: Error fetching costs by service: {e}") + + # 3. Get historical monthly costs + print(f"Fetching historical costs ({months} months)...") + try: + result = subprocess.run( + [ + "aws", "ce", "get-cost-and-usage", + "--time-period", f"Start={historical_start_str},End={current_month_end}", + "--granularity", "MONTHLY", + "--metrics", "UnblendedCost", + "--output", "json", + ], + capture_output=True, + text=True, + timeout=60, + ) + if result.returncode == 0: + data = json.loads(result.stdout) + for period in data.get("ResultsByTime", []): + total = period.get("Total", {}).get("UnblendedCost", {}) + time_period = period.get("TimePeriod", {}) + results["historical_monthly"].append({ + "month": time_period.get("Start", "")[:7], # YYYY-MM + "amount": float(total.get("Amount", 0)), + "unit": total.get("Unit", "USD"), + }) + else: + print(f"WARNING: Failed to get historical costs: {result.stderr}") + except (subprocess.TimeoutExpired, json.JSONDecodeError) as e: + print(f"WARNING: Error fetching historical costs: {e}") + + # 4. Get ECR-specific costs + print("Fetching ECR storage costs...") + try: + result = subprocess.run( + [ + "aws", "ce", "get-cost-and-usage", + "--time-period", f"Start={current_month_start},End={current_month_end}", + "--granularity", "MONTHLY", + "--metrics", "UnblendedCost", + "--filter", json.dumps({ + "Dimensions": { + "Key": "SERVICE", + "Values": ["Amazon EC2 Container Registry (ECR)", "Amazon Elastic Container Registry"] + } + }), + "--output", "json", + ], + capture_output=True, + text=True, + timeout=60, + ) + if result.returncode == 0: + data = json.loads(result.stdout) + ecr_total = 0.0 + for period in data.get("ResultsByTime", []): + total = period.get("Total", {}).get("UnblendedCost", {}) + ecr_total += float(total.get("Amount", 0)) + results["ecr_costs"] = { + "amount": ecr_total, + "unit": "USD", + "period": "current_month", + } + else: + # ECR might not have costs - that's fine + results["ecr_costs"] = {"amount": 0.0, "unit": "USD", "period": "current_month"} + except (subprocess.TimeoutExpired, json.JSONDecodeError) as e: + print(f"WARNING: Error fetching ECR costs: {e}") + results["ecr_costs"] = {"amount": 0.0, "unit": "USD", "period": "current_month"} + + # Output results + if output_json: + print(json.dumps(results, indent=2)) + return 0 + + # Pretty print results + print("\n" + "=" * 60) + print("AWS Cost Report") + print("=" * 60) + + # Current month total + if results["current_month"]: + cm = results["current_month"] + print(f"\nCurrent Month ({cm.get('start', 'N/A')} to {cm.get('end', 'N/A')}):") + print(f" Total: ${cm['amount']:.2f} {cm['unit']}") + else: + print("\nCurrent Month: No data available") + + # By service + if results["current_month_by_service"]: + print("\nCosts by Service (current month):") + for svc in results["current_month_by_service"][:10]: # Top 10 + print(f" {svc['service']:<45} ${svc['amount']:>10.2f}") + if len(results["current_month_by_service"]) > 10: + remaining = sum(s["amount"] for s in results["current_month_by_service"][10:]) + print(f" {'(other services)':<45} ${remaining:>10.2f}") + + # ECR specific + if results["ecr_costs"]: + ecr = results["ecr_costs"] + print(f"\nECR Storage (current month): ${ecr['amount']:.2f}") + + # Historical + if results["historical_monthly"]: + print(f"\nHistorical Monthly Costs (last {months} months):") + for month_data in results["historical_monthly"]: + print(f" {month_data['month']}: ${month_data['amount']:.2f}") + total_historical = sum(m["amount"] for m in results["historical_monthly"]) + avg_monthly = total_historical / len(results["historical_monthly"]) if results["historical_monthly"] else 0 + print(f" --------------------------------") + print(f" Average: ${avg_monthly:.2f}/month") + + print("\n" + "=" * 60) + return 0 + + def main() -> int: """Main entry point for CLI.""" parser = argparse.ArgumentParser( @@ -1837,9 +2684,9 @@ def main() -> int: run_parser.add_argument("--agent", type=str, default="api-openai", help="Agent type: noop, mock, api-claude, api-openai") run_parser.add_argument("--task", type=str, - help="Single task ID (e.g., notepad_1)") + help="Single task ID (e.g., notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS)") run_parser.add_argument("--tasks", type=str, - help="Comma-separated task IDs (e.g., notepad_1,notepad_2)") + help="Comma-separated task IDs (e.g., notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS,chrome_2ae9ba84-3a0d-4d4c-8338-3a1478dc5fe3-wos)") run_parser.add_argument("--demo", type=str, help="Demo trajectory file for ApiAgent") run_parser.add_argument("--max-steps", type=int, default=15, @@ -1916,6 +2763,32 @@ def main() -> int: help="Prefix filter for cleanup (default: 'waa')") azure_parser.add_argument("--dry-run", action="store_true", help="List instances without deleting (for --cleanup-only)") + azure_parser.add_argument("--enable-vnc", action="store_true", + help="Start VNC tunnels for debugging (access at localhost:8006, 8007, ...)") + + # Azure VNC - Connect to running Azure ML workers for debugging + azure_vnc_parser = subparsers.add_parser( + "azure-vnc", + help="Start VNC tunnels to Azure ML workers for debugging" + ) + azure_vnc_parser.add_argument("--prefix", type=str, default="waa", + help="Worker name prefix filter (default: 'waa')") + azure_vnc_parser.add_argument("--keep-alive", action="store_true", + help="Keep tunnels open until Ctrl+C (default: exit after printing URLs)") + + # Azure setup command + azure_setup_parser = subparsers.add_parser( + "azure-setup", + help="Set up Azure resources for WAA benchmark (resource group, ML workspace)" + ) + azure_setup_parser.add_argument("--resource-group", "-g", type=str, default="openadapt-agents", + help="Resource group name (default: openadapt-agents)") + azure_setup_parser.add_argument("--workspace", "-w", type=str, default="openadapt-ml", + help="ML workspace name (default: openadapt-ml)") + azure_setup_parser.add_argument("--location", "-l", type=str, default="eastus", + help="Azure region (default: eastus)") + azure_setup_parser.add_argument("--env-file", type=str, default=".env", + help="Path to .env file (default: .env)") # VM management commands vm_start_parser = subparsers.add_parser("vm-start", help="Start an Azure VM") @@ -1982,8 +2855,8 @@ def main() -> int: help="Azure VM name (optional if tagged)") smoke_live_parser.add_argument("--resource-group", type=str, default=None, help="Azure resource group (optional if tagged)") - smoke_live_parser.add_argument("--task-id", type=str, default="notepad_1", - help="Single task ID to run") + smoke_live_parser.add_argument("--task-id", type=str, default="notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS", + help="Single task ID to run (e.g., notepad_366de66e-cbae-4d72-b042-26390db2b145-WOS)") smoke_live_parser.add_argument("--max-steps", type=int, default=15, help="Max steps per task") smoke_live_parser.add_argument("--boot-wait", type=int, default=30, @@ -2091,6 +2964,32 @@ def main() -> int: wandb_log_parser.add_argument("--dry-run", action="store_true", help="Validate data but don't upload") + # WAA Docker image management + waa_image_parser = subparsers.add_parser( + "waa-image", + help="Build and push custom WAA Docker image for unattended installation" + ) + waa_image_parser.add_argument("action", type=str, + choices=["build", "push", "build-push", "check", "delete"], + help="Action: build, push, build-push, check, or delete") + waa_image_parser.add_argument("--registry", type=str, default="dockerhub", + choices=["dockerhub", "ecr", "acr"], + help="Registry: dockerhub (default, free), ecr (AWS ECR Public), or acr (Azure)") + waa_image_parser.add_argument("--tag", type=str, default="latest", + help="Image tag (default: latest)") + waa_image_parser.add_argument("--repo", type=str, default=None, + help="Repository override (default: auto-detected per registry)") + + # AWS cost tracking + aws_costs_parser = subparsers.add_parser( + "aws-costs", + help="Show AWS costs (current month, historical, ECR storage)" + ) + aws_costs_parser.add_argument("--months", type=int, default=3, + help="Number of months for historical costs (default: 3)") + aws_costs_parser.add_argument("--json", action="store_true", + help="Output as JSON for programmatic use") + args = parser.parse_args() if args.command is None: @@ -2107,6 +3006,8 @@ def main() -> int: "view": cmd_view, "estimate": cmd_estimate, "azure": cmd_azure, + "azure-vnc": cmd_azure_vnc, + "azure-setup": cmd_azure_setup, "azure-monitor": cmd_azure_monitor, "vm-start": cmd_vm_start, "vm-stop": cmd_vm_stop, @@ -2119,6 +3020,8 @@ def main() -> int: "wandb-demo": cmd_wandb_demo, "wandb-report": cmd_wandb_report, "wandb-log": cmd_wandb_log, + "waa-image": cmd_waa_image, + "aws-costs": cmd_aws_costs, } handler = handlers.get(args.command) diff --git a/openadapt_evals/benchmarks/health_checker.py b/openadapt_evals/benchmarks/health_checker.py index 7b90060..76d66a7 100644 --- a/openadapt_evals/benchmarks/health_checker.py +++ b/openadapt_evals/benchmarks/health_checker.py @@ -279,16 +279,8 @@ def _get_job_logs(self, job_name: str, last_n_lines: int | None = None) -> str: Log content as string. """ try: - # Use Azure ML SDK to get logs - # Note: This is a simplified implementation - # Real implementation would use ml_client.jobs.get_logs() or similar - job = self.ml_client.client.jobs.get(job_name) - - # For now, return empty string as placeholder - # In production, this would fetch actual logs - # TODO: Implement proper log fetching via Azure ML SDK - return "" - + # Use the ml_client's get_job_logs method + return self.ml_client.get_job_logs(job_name, tail=last_n_lines) except Exception as e: logger.warning(f"Failed to fetch logs for {job_name}: {e}") return "" diff --git a/openadapt_evals/cli/vm.py b/openadapt_evals/cli/vm.py index e66306d..2175b07 100644 --- a/openadapt_evals/cli/vm.py +++ b/openadapt_evals/cli/vm.py @@ -132,6 +132,8 @@ def cmd_setup(args: argparse.Namespace) -> int: sudo systemctl start docker sudo systemctl enable docker sudo usermod -aG docker $USER +# TODO: Switch to openadaptai/waa-auto:latest once it's published to Docker Hub +# The vanilla image requires manual setup; waa-auto has unattended installation sudo docker pull windowsarena/winarena:latest echo "WAA image pulled successfully" """ diff --git a/pyproject.toml b/pyproject.toml index 0aab7ec..cc06097 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,8 @@ azure = [ # Azure ML dependencies for distributed WAA evaluation "azure-ai-ml>=1.12.0", "azure-identity>=1.15.0", + # SDK V1 for DockerConfiguration (required for WAA Docker containers) + "azureml-core>=1.55.0", ] retrieval = [ # For RetrievalAugmentedAgent with automatic demo selection diff --git a/uv.lock b/uv.lock index f6e1b80..69c1c5e 100644 --- a/uv.lock +++ b/uv.lock @@ -1,8 +1,25 @@ version = 1 requires-python = ">=3.10" resolution-markers = [ - "python_full_version < '3.12'", - "python_full_version >= '3.12'", + "python_full_version < '3.12' and sys_platform == 'linux'", + "python_full_version < '3.12' and sys_platform != 'linux'", + "python_full_version >= '3.12' and sys_platform == 'linux'", + "python_full_version >= '3.12' and sys_platform != 'linux'", +] + +[[package]] +name = "adal" +version = "1.2.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyjwt" }, + { name = "python-dateutil" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/d7/a829bc5e8ff28f82f9e2dc9b363f3b7b9c1194766d5a75105e3885bfa9a8/adal-1.2.7.tar.gz", hash = "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1", size = 35196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/8d/58008a9a86075827f99aa8bb75d8db515bb9c34654f95e647cda31987db7/adal-1.2.7-py2.py3-none-any.whl", hash = "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d", size = 55539 }, ] [[package]] @@ -47,6 +64,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592 }, ] +[[package]] +name = "argcomplete" +version = "3.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/61/0b9ae6399dd4a58d8c1b1dc5a27d6f2808023d0b5dd3104bb99f45a33ff6/argcomplete-3.6.3.tar.gz", hash = "sha256:62e8ed4fd6a45864acc8235409461b72c9a28ee785a2011cc5eb78318786c89c", size = 73754 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/f5/9373290775639cb67a2fce7f629a1c240dce9f12fe927bc32b2736e16dfc/argcomplete-3.6.3-py3-none-any.whl", hash = "sha256:f5007b3a600ccac5d25bbce33089211dfd49eab4a7718da3f10e3082525a92ce", size = 43846 }, +] + [[package]] name = "asgiref" version = "3.11.0" @@ -131,6 +157,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/5e/97a471f66935e7f89f521d0e11ae49c7f0871ca38f5c319dccae2155c8d8/azure_core_tracing_opentelemetry-1.0.0b12-py3-none-any.whl", hash = "sha256:38fd42709f1cc4bbc4f2797008b1c30a6a01617e49910c05daa3a0d0c65053ac", size = 11962 }, ] +[[package]] +name = "azure-graphrbac" +version = "0.61.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-common" }, + { name = "msrest" }, + { name = "msrestazure" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f9/bd/421cc9c208a47983adfaa63ce045d241c3b4580ddd217a4caa9a71c15775/azure_graphrbac-0.61.2.tar.gz", hash = "sha256:fb25b03307e17f739c81ad6bd3e9b57c57843686031f0f214b65158447c773dd", size = 47651 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/22/3f502f6439780fbb1d365730405a95944847fbfa3faed49e5675c4a854bd/azure_graphrbac-0.61.2-py2.py3-none-any.whl", hash = "sha256:7a204554aa933684b09df3e17669fe31aa307d33b9bfb44595fe0ad5b99900d4", size = 142453 }, +] + [[package]] name = "azure-identity" version = "1.25.1" @@ -147,6 +187,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/83/7b/5652771e24fff12da9dde4c20ecf4682e606b104f26419d139758cc935a6/azure_identity-1.25.1-py3-none-any.whl", hash = "sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651", size = 191317 }, ] +[[package]] +name = "azure-mgmt-authorization" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-common" }, + { name = "azure-mgmt-core" }, + { name = "isodate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/ab/e79874f166eed24f4456ce4d532b29a926fb4c798c2c609eefd916a3f73d/azure-mgmt-authorization-4.0.0.zip", hash = "sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4", size = 1134795 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/b3/8ec1268082f4d20cc8bf723a1a8e6b9e330bcc338a4dbcee9c7737e9dc1c/azure_mgmt_authorization-4.0.0-py3-none-any.whl", hash = "sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a", size = 1072620 }, +] + +[[package]] +name = "azure-mgmt-containerregistry" +version = "14.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-common" }, + { name = "azure-mgmt-core" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f0/4e/bae97dfe20cd172d91b832fc1554f21f55403f2e2cb8eafbfa725491921e/azure_mgmt_containerregistry-14.0.0.tar.gz", hash = "sha256:7383f1c5547bcffe76e411edd8250d55c34cddf5ef6804d5875c163cca66c4b5", size = 1181541 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/51/558350cc79c1516509fd56922aea75f7cb374e0f8d9be0bc7933cd510c9c/azure_mgmt_containerregistry-14.0.0-py3-none-any.whl", hash = "sha256:224e823beffe317c59ca4b482df5ee0b2977c3f44cdae59b165513fb47b32b8e", size = 1697813 }, +] + [[package]] name = "azure-mgmt-core" version = "1.6.0" @@ -159,6 +228,64 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/26/c79f962fd3172b577b6f38685724de58b6b4337a51d3aad316a43a4558c6/azure_mgmt_core-1.6.0-py3-none-any.whl", hash = "sha256:0460d11e85c408b71c727ee1981f74432bc641bb25dfcf1bb4e90a49e776dbc4", size = 29310 }, ] +[[package]] +name = "azure-mgmt-keyvault" +version = "11.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-common" }, + { name = "azure-mgmt-core" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/d3/9e8d31aaedfb37efd20c8a9ac420b07cdb5c3d2f19c3452c9cdcb082dad6/azure_mgmt_keyvault-11.0.0.tar.gz", hash = "sha256:fcfb1366852926f2a311e1bc6e6a786eb8a8a1fd46e6025d4c114ede2cb4642e", size = 187547 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/24/87c592517bbf97daf1897a271c8713ad1afd888b05db1fb48d7428cc8ee7/azure_mgmt_keyvault-11.0.0-py3-none-any.whl", hash = "sha256:abff0023a1c1b8033f3d4800cac996bf2b5470bd9c5ab71470c3184d7e8f0654", size = 308775 }, +] + +[[package]] +name = "azure-mgmt-network" +version = "30.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-mgmt-core" }, + { name = "msrest" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/27/d767e68c48b46ecfa59b20c14a1a7fc32d32614e35ea6efc154bd015f53f/azure_mgmt_network-30.0.0.tar.gz", hash = "sha256:117ee4ea18668c1318c869eff2445bbad06f0f4094f83c533e0191b41dc6dd1f", size = 689658 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/f0/572d40e3bc052a18c56ec9d8ce257fcb282577212f3dd4b6004096147c35/azure_mgmt_network-30.0.0-py3-none-any.whl", hash = "sha256:df93640f9782a734a1c217bd79817eb025bf8e3b6dea5ac5ef508ed08a8dc8e1", size = 614013 }, +] + +[[package]] +name = "azure-mgmt-resource" +version = "24.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-common" }, + { name = "azure-mgmt-core" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/4c/b27a3dfbedebbcc8e346a956a803528bd94a19fdf14b1de4bd781b03a6cc/azure_mgmt_resource-24.0.0.tar.gz", hash = "sha256:cf6b8995fcdd407ac9ff1dd474087129429a1d90dbb1ac77f97c19b96237b265", size = 3030022 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/18/f047cb553dad6fdb65c625c4fe48552e043c4e9a859416a70c5047d07475/azure_mgmt_resource-24.0.0-py3-none-any.whl", hash = "sha256:27b32cd223e2784269f5a0db3c282042886ee4072d79cedc638438ece7cd0df4", size = 3613790 }, +] + +[[package]] +name = "azure-mgmt-storage" +version = "24.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-mgmt-core" }, + { name = "msrest" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/42/b01a1c451417ac05229d986f5755a411bec7922c5eb5170d54642c2118df/azure_mgmt_storage-24.0.0.tar.gz", hash = "sha256:b1ae225ef87ada85f29c02e406140ab5895285ca64de2bcfe50b631c4818a337", size = 212908 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/9d/621a8e25d8a085d4d13fbb85339ba550e76a89c82e047efe6f40ac754669/azure_mgmt_storage-24.0.0-py3-none-any.whl", hash = "sha256:d1e35c07e8e3a70c3ba56b1adb21cfd87c25143876446697eaf61efa5b029978", size = 290914 }, +] + [[package]] name = "azure-monitor-opentelemetry" version = "1.8.4" @@ -244,6 +371,141 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/37/fb/101a69754bea38a40fb93387ee63bde3802014bdad6f2ecbd123a599e90c/azure_storage_file_share-12.24.0-py3-none-any.whl", hash = "sha256:d2b67788c48318fc69003ceda03ecdd00fa1f5061be7e28256b0e559cd8d9fe7", size = 314825 }, ] +[[package]] +name = "azureml-core" +version = "1.61.0.post1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "adal" }, + { name = "argcomplete" }, + { name = "azure-common" }, + { name = "azure-core" }, + { name = "azure-graphrbac" }, + { name = "azure-mgmt-authorization" }, + { name = "azure-mgmt-containerregistry" }, + { name = "azure-mgmt-keyvault" }, + { name = "azure-mgmt-network" }, + { name = "azure-mgmt-resource" }, + { name = "azure-mgmt-storage" }, + { name = "backports-tempfile" }, + { name = "contextlib2" }, + { name = "docker" }, + { name = "humanfriendly" }, + { name = "jmespath" }, + { name = "jsonpickle" }, + { name = "knack" }, + { name = "msal" }, + { name = "msal-extensions" }, + { name = "msrest" }, + { name = "msrestazure" }, + { name = "ndg-httpsclient" }, + { name = "packaging" }, + { name = "paramiko" }, + { name = "pathspec" }, + { name = "pkginfo" }, + { name = "pyjwt" }, + { name = "pyopenssl" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "requests", extra = ["socks"] }, + { name = "secretstorage" }, + { name = "urllib3" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/cd/2a2b53989c9c59853ad2b71e9f6bfe0f065f5881c154d83ff8ae38e5fb0f/azureml_core-1.61.0.post1-py3-none-any.whl", hash = "sha256:a7cb96d7f36830b2ca7f21669716731d087cb139eea3a98ebe28aa79b2c7b1a7", size = 3315924 }, +] + +[[package]] +name = "backports-tempfile" +version = "1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-weakref" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/f9/fa432ec3d185a63d39c20c49e37e163633aaae4009cc79574aecc064d2d0/backports.tempfile-1.0.tar.gz", hash = "sha256:1c648c452e8770d759bdc5a5e2431209be70d25484e1be24876cf2168722c762", size = 10262 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/5c/077f910632476281428fe254807952eb47ca78e720d059a46178c541e669/backports.tempfile-1.0-py2.py3-none-any.whl", hash = "sha256:05aa50940946f05759696156a8c39be118169a0e0f94a49d0bb106503891ff54", size = 4398 }, +] + +[[package]] +name = "backports-weakref" +version = "1.0.post1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/ab/cf35cf43a4a6215e3255cf2e49c77d5ba1e9c733af2aa3ec1ca9c4d02592/backports.weakref-1.0.post1.tar.gz", hash = "sha256:bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2", size = 10574 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ec/f598b633c3d5ffe267aaada57d961c94fdfa183c5c3ebda2b6d151943db6/backports.weakref-1.0.post1-py2.py3-none-any.whl", hash = "sha256:81bc9b51c0abc58edc76aefbbc68c62a787918ffe943a37947e162c3f8e19e82", size = 5237 }, +] + +[[package]] +name = "bcrypt" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806 }, + { url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626 }, + { url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853 }, + { url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793 }, + { url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930 }, + { url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194 }, + { url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381 }, + { url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750 }, + { url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757 }, + { url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740 }, + { url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197 }, + { url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974 }, + { url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498 }, + { url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853 }, + { url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626 }, + { url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862 }, + { url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544 }, + { url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787 }, + { url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753 }, + { url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587 }, + { url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178 }, + { url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295 }, + { url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700 }, + { url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034 }, + { url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766 }, + { url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449 }, + { url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310 }, + { url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761 }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553 }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009 }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029 }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907 }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500 }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412 }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486 }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940 }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776 }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922 }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367 }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187 }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752 }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881 }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931 }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313 }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290 }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253 }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084 }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185 }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656 }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662 }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240 }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152 }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284 }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643 }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698 }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725 }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912 }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953 }, + { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180 }, + { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791 }, + { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746 }, + { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375 }, +] + [[package]] name = "blinker" version = "1.9.0" @@ -454,6 +716,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, ] +[[package]] +name = "contextlib2" +version = "21.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/13/37ea7805ae3057992e96ecb1cffa2fa35c2ef4498543b846f90dd2348d8f/contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869", size = 43795 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/56/6d6872f79d14c0cb02f1646cbb4592eef935857c0951a105874b7b62a0c3/contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f", size = 13277 }, +] + [[package]] name = "cryptography" version = "46.0.3" @@ -528,6 +799,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, ] +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, +] + [[package]] name = "docstring-parser" version = "0.17.0" @@ -635,6 +920,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ab/6e/81d47999aebc1b155f81eca4477a616a70f238a2549848c38983f3c22a82/ftfy-6.3.1-py3-none-any.whl", hash = "sha256:7c70eb532015cd2f9adb53f101fb6c7945988d023a085d127d1573dc49dd0083", size = 44821 }, ] +[[package]] +name = "gitdb" +version = "4.0.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "smmap" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794 }, +] + +[[package]] +name = "gitpython" +version = "3.1.46" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gitdb" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/b5/59d16470a1f0dfe8c793f9ef56fd3826093fc52b3bd96d6b9d6c26c7e27b/gitpython-3.1.46.tar.gz", hash = "sha256:400124c7d0ef4ea03f7310ac2fbf7151e09ff97f2a3288d64a440c584a29c37f", size = 215371 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/09/e21df6aef1e1ffc0c816f0522ddc3f6dcded766c3261813131c78a704470/gitpython-3.1.46-py3-none-any.whl", hash = "sha256:79812ed143d9d25b6d176a10bb511de0f9c67b1fa641d82097b0ab90398a2058", size = 208620 }, +] + [[package]] name = "h11" version = "0.16.0" @@ -720,6 +1029,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/bd/1a875e0d592d447cbc02805fd3fe0f497714d6a2583f59d14fa9ebad96eb/huggingface_hub-0.36.0-py3-none-any.whl", hash = "sha256:7bcc9ad17d5b3f07b57c78e79d527102d08313caa278a641993acddcb894548d", size = 566094 }, ] +[[package]] +name = "humanfriendly" +version = "10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyreadline3", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 }, +] + [[package]] name = "idna" version = "3.11" @@ -768,6 +1089,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234 }, ] +[[package]] +name = "jeepney" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010 }, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -877,6 +1207,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110 }, ] +[[package]] +name = "jmespath" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/59/322338183ecda247fb5d1763a6cbe46eff7222eaeebafd9fa65d4bf5cb11/jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d", size = 27377 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419 }, +] + +[[package]] +name = "jsonpickle" +version = "4.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/a6/d07afcfdef402900229bcca795f80506b207af13a838d4d99ad45abf530c/jsonpickle-4.1.1.tar.gz", hash = "sha256:f86e18f13e2b96c1c1eede0b7b90095bbb61d99fedc14813c44dc2f361dbbae1", size = 316885 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/73/04df8a6fa66d43a9fd45c30f283cc4afff17da671886e451d52af60bdc7e/jsonpickle-4.1.1-py3-none-any.whl", hash = "sha256:bb141da6057898aa2438ff268362b126826c812a1721e31cf08a6e142910dc91", size = 47125 }, +] + [[package]] name = "jsonschema" version = "4.26.0" @@ -904,6 +1252,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437 }, ] +[[package]] +name = "knack" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argcomplete" }, + { name = "jmespath" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "pyyaml" }, + { name = "tabulate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/eb/1f26c9112a4ee84cfa4a0a81bdf844207496a476fa026cfc98545bb702db/knack-0.12.0.tar.gz", hash = "sha256:71f2a6b42ae9a302e43243320fa05edb09b19339fcf1f331f5b6d07bf97f5291", size = 72401 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/28/dc87d6014ad6dbfeaf0ee79e62d225ae80f537dade0ac690d67b11e2c948/knack-0.12.0-py3-none-any.whl", hash = "sha256:c1c3e8555f5aa974880f580ad7c862502b6ef274b1c9891ae0cc17f8eaa5c8b5", size = 60843 }, +] + [[package]] name = "markupsafe" version = "3.0.3" @@ -1052,6 +1417,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/15/cf/f2966a2638144491f8696c27320d5219f48a072715075d168b31d3237720/msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32", size = 85384 }, ] +[[package]] +name = "msrestazure" +version = "0.6.4.post1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "adal" }, + { name = "msrest" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5d/86/06a086e4ed3523765a1917665257b1828f1bf882130768445f082a4c3484/msrestazure-0.6.4.post1.tar.gz", hash = "sha256:39842007569e8c77885ace5c46e4bf2a9108fcb09b1e6efdf85b6e2c642b55d4", size = 47728 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/7e/620e883def84ae56b8a9da382d960f7f801e37518fe930085cf72c148dae/msrestazure-0.6.4.post1-py2.py3-none-any.whl", hash = "sha256:2264493b086c2a0a82ddf5fd87b35b3fffc443819127fed992ac5028354c151e", size = 40789 }, +] + +[[package]] +name = "ndg-httpsclient" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, + { name = "pyopenssl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/f8/8f49278581cb848fb710a362bfc3028262a82044167684fb64ad068dbf92/ndg_httpsclient-0.5.1.tar.gz", hash = "sha256:d72faed0376ab039736c2ba12e30695e2788c4aa569c9c3e3d72131de2592210", size = 26665 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/67/c2f508c00ed2a6911541494504b7cac16fe0b0473912568df65fd1801132/ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4", size = 34042 }, +] + [[package]] name = "networkx" version = "3.4.2" @@ -1302,28 +1694,34 @@ wheels = [ [[package]] name = "openadapt-evals" -version = "0.1.0" +version = "0.1.1" source = { editable = "." } dependencies = [ + { name = "httpx" }, { name = "open-clip-torch" }, { name = "pillow" }, { name = "python-dotenv" }, + { name = "requests" }, + { name = "tenacity" }, ] [package.optional-dependencies] all = [ { name = "azure-ai-ml" }, { name = "azure-identity" }, + { name = "azureml-core" }, { name = "flask" }, { name = "flask-cors" }, { name = "openadapt-retrieval" }, { name = "pytest" }, { name = "requests" }, { name = "ruff" }, + { name = "wandb" }, ] azure = [ { name = "azure-ai-ml" }, { name = "azure-identity" }, + { name = "azureml-core" }, ] dev = [ { name = "pytest" }, @@ -1342,22 +1740,30 @@ viewer = [ waa = [ { name = "requests" }, ] +wandb = [ + { name = "wandb" }, +] [package.metadata] requires-dist = [ { name = "anthropic", marker = "extra == 'test'", specifier = ">=0.76.0" }, { name = "azure-ai-ml", marker = "extra == 'azure'", specifier = ">=1.12.0" }, { name = "azure-identity", marker = "extra == 'azure'", specifier = ">=1.15.0" }, + { name = "azureml-core", marker = "extra == 'azure'", specifier = ">=1.55.0" }, { name = "flask", marker = "extra == 'viewer'", specifier = ">=3.0.0" }, { name = "flask-cors", marker = "extra == 'viewer'", specifier = ">=4.0.0" }, + { name = "httpx", specifier = ">=0.25.0" }, { name = "open-clip-torch", specifier = ">=2.20.0" }, - { name = "openadapt-evals", extras = ["dev", "waa", "azure", "retrieval", "viewer"], marker = "extra == 'all'" }, + { name = "openadapt-evals", extras = ["dev", "waa", "azure", "retrieval", "viewer", "wandb"], marker = "extra == 'all'" }, { name = "openadapt-retrieval", marker = "extra == 'retrieval'", specifier = ">=0.1.0" }, { name = "pillow", specifier = ">=10.0.0" }, { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, { name = "python-dotenv", specifier = ">=1.2.1" }, + { name = "requests", specifier = ">=2.28.0" }, { name = "requests", marker = "extra == 'waa'", specifier = ">=2.28.0" }, { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1.0" }, + { name = "tenacity", specifier = ">=8.2.0" }, + { name = "wandb", marker = "extra == 'wandb'", specifier = ">=0.16.0" }, ] [[package]] @@ -1618,6 +2024,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, ] +[[package]] +name = "paramiko" +version = "3.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bcrypt" }, + { name = "cryptography" }, + { name = "pynacl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/15/ad6ce226e8138315f2451c2aeea985bf35ee910afb477bae7477dc3a8f3b/paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822", size = 1566110 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/f8/c7bd0ef12954a81a1d3cea60a13946bd9a49a0036a5927770c461eade7ae/paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61", size = 227298 }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, +] + [[package]] name = "pillow" version = "12.1.0" @@ -1716,6 +2145,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2d/71/64e9b1c7f04ae0027f788a248e6297d7fcc29571371fe7d45495a78172c0/pillow-12.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:75af0b4c229ac519b155028fa1be632d812a519abba9b46b20e50c6caa184f19", size = 7029809 }, ] +[[package]] +name = "pkginfo" +version = "1.12.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/03/e26bf3d6453b7fda5bd2b84029a426553bb373d6277ef6b5ac8863421f87/pkginfo-1.12.1.2.tar.gz", hash = "sha256:5cd957824ac36f140260964eba3c6be6442a8359b8c48f4adf90210f33a04b7b", size = 451828 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/3d/f4f2ba829efb54b6cd2d91349c7463316a9cc55a43fc980447416c88540f/pkginfo-1.12.1.2-py3-none-any.whl", hash = "sha256:c783ac885519cab2c34927ccfa6bf64b5a704d7c69afaea583dd9b7afe969343", size = 32717 }, +] + +[[package]] +name = "platformdirs" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731 }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -1725,6 +2172,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, ] +[[package]] +name = "protobuf" +version = "6.33.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/b8/cda15d9d46d03d4aa3a67cb6bffe05173440ccf86a9541afaf7ac59a1b6b/protobuf-6.33.4.tar.gz", hash = "sha256:dc2e61bca3b10470c1912d166fe0af67bfc20eb55971dcef8dfa48ce14f0ed91", size = 444346 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/be/24ef9f3095bacdf95b458543334d0c4908ccdaee5130420bf064492c325f/protobuf-6.33.4-cp310-abi3-win32.whl", hash = "sha256:918966612c8232fc6c24c78e1cd89784307f5814ad7506c308ee3cf86662850d", size = 425612 }, + { url = "https://files.pythonhosted.org/packages/31/ad/e5693e1974a28869e7cd244302911955c1cebc0161eb32dfa2b25b6e96f0/protobuf-6.33.4-cp310-abi3-win_amd64.whl", hash = "sha256:8f11ffae31ec67fc2554c2ef891dcb561dae9a2a3ed941f9e134c2db06657dbc", size = 436962 }, + { url = "https://files.pythonhosted.org/packages/66/15/6ee23553b6bfd82670207ead921f4d8ef14c107e5e11443b04caeb5ab5ec/protobuf-6.33.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2fe67f6c014c84f655ee06f6f66213f9254b3a8b6bda6cda0ccd4232c73c06f0", size = 427612 }, + { url = "https://files.pythonhosted.org/packages/2b/48/d301907ce6d0db75f959ca74f44b475a9caa8fcba102d098d3c3dd0f2d3f/protobuf-6.33.4-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:757c978f82e74d75cba88eddec479df9b99a42b31193313b75e492c06a51764e", size = 324484 }, + { url = "https://files.pythonhosted.org/packages/92/1c/e53078d3f7fe710572ab2dcffd993e1e3b438ae71cfc031b71bae44fcb2d/protobuf-6.33.4-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c7c64f259c618f0bef7bee042075e390debbf9682334be2b67408ec7c1c09ee6", size = 339256 }, + { url = "https://files.pythonhosted.org/packages/e8/8e/971c0edd084914f7ee7c23aa70ba89e8903918adca179319ee94403701d5/protobuf-6.33.4-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:3df850c2f8db9934de4cf8f9152f8dc2558f49f298f37f90c517e8e5c84c30e9", size = 323311 }, + { url = "https://files.pythonhosted.org/packages/75/b1/1dc83c2c661b4c62d56cc081706ee33a4fc2835bd90f965baa2663ef7676/protobuf-6.33.4-py3-none-any.whl", hash = "sha256:1fe3730068fcf2e595816a6c34fe66eeedd37d51d0400b72fabc848811fdc1bc", size = 170532 }, +] + [[package]] name = "psutil" version = "7.2.1" @@ -1753,6 +2215,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3e/73/2ce007f4198c80fcf2cb24c169884f833fe93fbc03d55d302627b094ee91/psutil-7.2.1-cp37-abi3-win_arm64.whl", hash = "sha256:0d67c1822c355aa6f7314d92018fb4268a76668a536f133599b91edd48759442", size = 133836 }, ] +[[package]] +name = "pyasn1" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371 }, +] + [[package]] name = "pycparser" version = "2.23" @@ -1930,6 +2401,72 @@ crypto = [ { name = "cryptography" }, ] +[[package]] +name = "pynacl" +version = "1.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d9/9a/4019b524b03a13438637b11538c82781a5eda427394380381af8f04f467a/pynacl-1.6.2.tar.gz", hash = "sha256:018494d6d696ae03c7e656e5e74cdfd8ea1326962cc401bcf018f1ed8436811c", size = 3511692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/79/0e3c34dc3c4671f67d251c07aa8eb100916f250ee470df230b0ab89551b4/pynacl-1.6.2-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594", size = 390064 }, + { url = "https://files.pythonhosted.org/packages/eb/1c/23a26e931736e13b16483795c8a6b2f641bf6a3d5238c22b070a5112722c/pynacl-1.6.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0", size = 809370 }, + { url = "https://files.pythonhosted.org/packages/87/74/8d4b718f8a22aea9e8dcc8b95deb76d4aae380e2f5b570cc70b5fd0a852d/pynacl-1.6.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9", size = 1408304 }, + { url = "https://files.pythonhosted.org/packages/fd/73/be4fdd3a6a87fe8a4553380c2b47fbd1f7f58292eb820902f5c8ac7de7b0/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574", size = 844871 }, + { url = "https://files.pythonhosted.org/packages/55/ad/6efc57ab75ee4422e96b5f2697d51bbcf6cdcc091e66310df91fbdc144a8/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634", size = 1446356 }, + { url = "https://files.pythonhosted.org/packages/78/b7/928ee9c4779caa0a915844311ab9fb5f99585621c5d6e4574538a17dca07/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88", size = 826814 }, + { url = "https://files.pythonhosted.org/packages/f7/a9/1bdba746a2be20f8809fee75c10e3159d75864ef69c6b0dd168fc60e485d/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14", size = 1411742 }, + { url = "https://files.pythonhosted.org/packages/f3/2f/5e7ea8d85f9f3ea5b6b87db1d8388daa3587eed181bdeb0306816fdbbe79/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444", size = 801714 }, + { url = "https://files.pythonhosted.org/packages/06/ea/43fe2f7eab5f200e40fb10d305bf6f87ea31b3bbc83443eac37cd34a9e1e/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b", size = 1372257 }, + { url = "https://files.pythonhosted.org/packages/4d/54/c9ea116412788629b1347e415f72195c25eb2f3809b2d3e7b25f5c79f13a/pynacl-1.6.2-cp314-cp314t-win32.whl", hash = "sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145", size = 231319 }, + { url = "https://files.pythonhosted.org/packages/ce/04/64e9d76646abac2dccf904fccba352a86e7d172647557f35b9fe2a5ee4a1/pynacl-1.6.2-cp314-cp314t-win_amd64.whl", hash = "sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590", size = 244044 }, + { url = "https://files.pythonhosted.org/packages/33/33/7873dc161c6a06f43cda13dec67b6fe152cb2f982581151956fa5e5cdb47/pynacl-1.6.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2", size = 188740 }, + { url = "https://files.pythonhosted.org/packages/be/7b/4845bbf88e94586ec47a432da4e9107e3fc3ce37eb412b1398630a37f7dd/pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465", size = 388458 }, + { url = "https://files.pythonhosted.org/packages/1e/b4/e927e0653ba63b02a4ca5b4d852a8d1d678afbf69b3dbf9c4d0785ac905c/pynacl-1.6.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0", size = 800020 }, + { url = "https://files.pythonhosted.org/packages/7f/81/d60984052df5c97b1d24365bc1e30024379b42c4edcd79d2436b1b9806f2/pynacl-1.6.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4", size = 1399174 }, + { url = "https://files.pythonhosted.org/packages/68/f7/322f2f9915c4ef27d140101dd0ed26b479f7e6f5f183590fd32dfc48c4d3/pynacl-1.6.2-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87", size = 835085 }, + { url = "https://files.pythonhosted.org/packages/3e/d0/f301f83ac8dbe53442c5a43f6a39016f94f754d7a9815a875b65e218a307/pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c", size = 1437614 }, + { url = "https://files.pythonhosted.org/packages/c4/58/fc6e649762b029315325ace1a8c6be66125e42f67416d3dbd47b69563d61/pynacl-1.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130", size = 818251 }, + { url = "https://files.pythonhosted.org/packages/c9/a8/b917096b1accc9acd878819a49d3d84875731a41eb665f6ebc826b1af99e/pynacl-1.6.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6", size = 1402859 }, + { url = "https://files.pythonhosted.org/packages/85/42/fe60b5f4473e12c72f977548e4028156f4d340b884c635ec6b063fe7e9a5/pynacl-1.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e", size = 791926 }, + { url = "https://files.pythonhosted.org/packages/fa/f9/e40e318c604259301cc091a2a63f237d9e7b424c4851cafaea4ea7c4834e/pynacl-1.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577", size = 1363101 }, + { url = "https://files.pythonhosted.org/packages/48/47/e761c254f410c023a469284a9bc210933e18588ca87706ae93002c05114c/pynacl-1.6.2-cp38-abi3-win32.whl", hash = "sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa", size = 227421 }, + { url = "https://files.pythonhosted.org/packages/41/ad/334600e8cacc7d86587fe5f565480fde569dfb487389c8e1be56ac21d8ac/pynacl-1.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0", size = 239754 }, + { url = "https://files.pythonhosted.org/packages/29/7d/5945b5af29534641820d3bd7b00962abbbdfee84ec7e19f0d5b3175f9a31/pynacl-1.6.2-cp38-abi3-win_arm64.whl", hash = "sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c", size = 184801 }, +] + +[[package]] +name = "pyopenssl" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/be/97b83a464498a79103036bc74d1038df4a7ef0e402cfaf4d5e113fb14759/pyopenssl-25.3.0.tar.gz", hash = "sha256:c981cb0a3fd84e8602d7afc209522773b94c1c2446a3c710a75b06fe1beae329", size = 184073 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/81/ef2b1dfd1862567d573a4fdbc9f969067621764fbb74338496840a1d2977/pyopenssl-25.3.0-py3-none-any.whl", hash = "sha256:1fda6fc034d5e3d179d39e59c1895c9faeaf40a79de5fc4cbbfbe0d36f4a77b6", size = 57268 }, +] + +[[package]] +name = "pyreadline3" +version = "3.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, +] + +[[package]] +name = "pysocks" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725 }, +] + [[package]] name = "pytest" version = "9.0.2" @@ -1969,6 +2506,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230 }, ] +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432 }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103 }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557 }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031 }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308 }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930 }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543 }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040 }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102 }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700 }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700 }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318 }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714 }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800 }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540 }, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -2183,6 +2751,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, ] +[package.optional-dependencies] +socks = [ + { name = "pysocks" }, +] + [[package]] name = "requests-oauthlib" version = "2.0.0" @@ -2370,6 +2943,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/58/5b/632a58724221ef03d78ab65062e82a1010e1bef8e8e0b9d7c6d7b8044841/safetensors-0.7.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:473b32699f4200e69801bf5abf93f1a4ecd432a70984df164fc22ccf39c4a6f3", size = 531885 }, ] +[[package]] +name = "secretstorage" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/46/f5af3402b579fd5e11573ce652019a67074317e18c1935cc0b4ba9b35552/secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137", size = 15554 }, +] + +[[package]] +name = "sentry-sdk" +version = "2.51.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/9f/094bbb6be5cf218ab6712c6528310687f3d3fe8818249fcfe1d74192f7c5/sentry_sdk-2.51.0.tar.gz", hash = "sha256:b89d64577075fd8c13088bc3609a2ce77a154e5beb8cba7cc16560b0539df4f7", size = 407447 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/da/df379404d484ca9dede4ad8abead5de828cdcff35623cd44f0351cf6869c/sentry_sdk-2.51.0-py2.py3-none-any.whl", hash = "sha256:e21016d318a097c2b617bb980afd9fc737e1efc55f9b4f0cdc819982c9717d5f", size = 431426 }, +] + [[package]] name = "setuptools" version = "80.9.0" @@ -2388,6 +2987,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, ] +[[package]] +name = "smmap" +version = "5.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303 }, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -2421,6 +3029,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 }, ] +[[package]] +name = "tabulate" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 }, +] + +[[package]] +name = "tenacity" +version = "9.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248 }, +] + [[package]] name = "timm" version = "1.0.24" @@ -2705,6 +3331,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584 }, ] +[[package]] +name = "wandb" +version = "0.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "gitpython" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "sentry-sdk" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/7e/aad6e943012ea4d88f3a037f1a5a7c6898263c60fbef8c9cdb95a8ff9fd9/wandb-0.24.0.tar.gz", hash = "sha256:4715a243b3d460b6434b9562e935dfd9dfdf5d6e428cfb4c3e7ce4fd44460ab3", size = 44197947 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/8a/efec186dcc5dcf3c806040e3f33e58997878b2d30b87aa02b26f046858b6/wandb-0.24.0-py3-none-macosx_12_0_arm64.whl", hash = "sha256:aa9777398ff4b0f04c41359f7d1b95b5d656cb12c37c63903666799212e50299", size = 21464901 }, + { url = "https://files.pythonhosted.org/packages/ed/84/fadf0d5f1d86c3ba662d2b33a15d2b1f08ff1e4e196c77e455f028b0fda2/wandb-0.24.0-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:0423fbd58c3926949724feae8aab89d20c68846f9f4f596b80f9ffe1fc298130", size = 22697817 }, + { url = "https://files.pythonhosted.org/packages/6e/5f/e3124e68d02b30c62856175ce714e07904730be06eecb00f66bb1a59aacf/wandb-0.24.0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:2b25fc0c123daac97ed32912ac55642c65013cc6e3a898e88ca2d917fc8eadc0", size = 21118798 }, + { url = "https://files.pythonhosted.org/packages/22/a1/8d68a914c030e897c306c876d47c73aa5d9ca72be608971290d3a5749570/wandb-0.24.0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:9485344b4667944b5b77294185bae8469cfa4074869bec0e74f54f8492234cc2", size = 22849954 }, + { url = "https://files.pythonhosted.org/packages/e9/f8/3e68841a4282a4fb6a8935534e6064acc6c9708e8fb76953ec73bbc72a5e/wandb-0.24.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:51b2b9a9d7d6b35640f12a46a48814fd4516807ad44f586b819ed6560f8de1fd", size = 21160339 }, + { url = "https://files.pythonhosted.org/packages/16/e5/d851868ce5b4b437a7cc90405979cd83809790e4e2a2f1e454f63f116e52/wandb-0.24.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:11f7e7841f31eff82c82a677988889ad3aa684c6de61ff82145333b5214ec860", size = 22936978 }, + { url = "https://files.pythonhosted.org/packages/d2/34/43b7f18870051047ce6fe18e7eb24ba7ebdc71663a8f1c58e31e855eb8ac/wandb-0.24.0-py3-none-win32.whl", hash = "sha256:42af348998b00d4309ae790c5374040ac6cc353ab21567f4e29c98c9376dee8e", size = 22118243 }, + { url = "https://files.pythonhosted.org/packages/a1/92/909c81173cf1399111f57f9ca5399a8f165607b024e406e080178c878f70/wandb-0.24.0-py3-none-win_amd64.whl", hash = "sha256:32604eddcd362e1ed4a2e2ce5f3a239369c4a193af223f3e66603481ac91f336", size = 22118246 }, + { url = "https://files.pythonhosted.org/packages/87/85/a845aefd9c2285f98261fa6ffa0a14466366c1ac106d35bc84b654c0ad7f/wandb-0.24.0-py3-none-win_arm64.whl", hash = "sha256:e0f2367552abfca21b0f3a03405fbf48f1e14de9846e70f73c6af5da57afd8ef", size = 20077678 }, +] + [[package]] name = "wcwidth" version = "0.2.14"