diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..4150dd3 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +.git +.context +node_modules +dist +tests +coverage +pnpm-debug.log +*.local diff --git a/.env.example b/.env.example index bc70500..b5c9511 100644 --- a/.env.example +++ b/.env.example @@ -1,2 +1,29 @@ -# Used in testing +# Core DWS API NUTRIENT_DWS_API_KEY=your-nutrient-dws-api-key +DWS_API_BASE_URL=https://api.nutrient.io + +# Transport +MCP_TRANSPORT=stdio +PORT=3000 +MCP_HOST=127.0.0.1 +MCP_ALLOWED_HOSTS= +MCP_DEBUG_LOGGING=false + +# HTTP auth mode (static|jwt) +AUTH_MODE=static + +# Static auth options (HTTP mode) +MCP_BEARER_TOKEN= +MCP_BEARER_CLIENT_ID=default-client +MCP_BEARER_SCOPES=mcp:invoke +MCP_BEARER_ALLOWED_TOOLS= +MCP_BEARER_TOKENS_JSON= + +# OAuth/JWT mode options +RESOURCE_URL=https://mcp.nutrient.io/mcp +AUTH_SERVER_URL=https://api.nutrient.io +JWKS_URL= +ISSUER=https://api.nutrient.io + +# Optional sandbox root +SANDBOX_PATH= diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..9619acb --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,44 @@ +name: Tests + +on: + push: + branches: + - '**' + pull_request: + +concurrency: + group: tests-${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + name: Lint, Build, Test + runs-on: ubuntu-latest + timeout-minutes: 20 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: pnpm + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Lint + run: pnpm exec eslint src tests + + - name: Build + run: pnpm run build + + - name: Run tests + run: pnpm run test:ci diff --git a/Dockerfile b/Dockerfile index 007c75b..95c820b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,5 +23,6 @@ COPY --from=builder /app/dist ./dist RUN chown -R appuser:appgroup /app USER appuser -# MCP runs over stdio +# MCP supports stdio and HTTP transports +EXPOSE 3000 ENTRYPOINT ["node", "dist/index.js"] diff --git a/README.md b/README.md index 463d4c7..c6046cf 100644 --- a/README.md +++ b/README.md @@ -16,20 +16,20 @@ A Model Context Protocol (MCP) server that connects AI assistants to the [Nutrie Once configured, you (or your AI agent) can process documents through natural language: -**You:** *"Merge report-q1.pdf and report-q2.pdf into a single document"* -**AI:** *"Done! I've merged both reports into combined-report.pdf (24 pages total)."* +**You:** _"Merge report-q1.pdf and report-q2.pdf into a single document"_ +**AI:** _"Done! I've merged both reports into combined-report.pdf (24 pages total)."_ -**You:** *"Redact all social security numbers and email addresses from application.pdf"* -**AI:** *"I found and redacted 5 SSNs and 3 email addresses. The redacted version is saved as application-redacted.pdf."* +**You:** _"Redact all social security numbers and email addresses from application.pdf"_ +**AI:** _"I found and redacted 5 SSNs and 3 email addresses. The redacted version is saved as application-redacted.pdf."_ -**You:** *"Digitally sign this contract with a visible signature on page 3"* -**AI:** *"I've applied a PAdES-compliant digital signature to contract.pdf. The signed document is saved as contract-signed.pdf."* +**You:** _"Digitally sign this contract with a visible signature on page 3"_ +**AI:** _"I've applied a PAdES-compliant digital signature to contract.pdf. The signed document is saved as contract-signed.pdf."_ -**You:** *"Convert this PDF to markdown"* -**AI:** *"Here's the markdown content extracted from your document..."* +**You:** _"Convert this PDF to markdown"_ +**AI:** _"Here's the markdown content extracted from your document..."_ -**You:** *"OCR this scanned document in German and extract the text"* -**AI:** *"I've processed the scan with German OCR. Here's the extracted text..."* +**You:** _"OCR this scanned document in German and extract the text"_ +**AI:** _"I've processed the scan with German OCR. Here's the extracted text..."_ ## Quick Start @@ -57,11 +57,11 @@ Open Settings → Developer → Edit Config, then add: "args": ["-y", "@nutrient-sdk/dws-mcp-server"], "env": { "NUTRIENT_DWS_API_KEY": "YOUR_API_KEY_HERE", - "SANDBOX_PATH": "/your/sandbox/directory" + "SANDBOX_PATH": "/your/sandbox/directory", // "C:\\your\\sandbox\\directory" for Windows - } - } - } + }, + }, + }, } ``` @@ -80,13 +80,14 @@ Create `.cursor/mcp.json` in your project root: "args": ["-y", "@nutrient-sdk/dws-mcp-server"], "env": { "NUTRIENT_DWS_API_KEY": "YOUR_API_KEY_HERE", - "SANDBOX_PATH": "/your/project/documents" + "SANDBOX_PATH": "/your/project/documents", // "C:\\your\\project\\documents" for Windows - } - } - } + }, + }, + }, } ``` +
@@ -102,13 +103,14 @@ Add to `~/.codeium/windsurf/mcp_config.json`: "args": ["-y", "@nutrient-sdk/dws-mcp-server"], "env": { "NUTRIENT_DWS_API_KEY": "YOUR_API_KEY_HERE", - "SANDBOX_PATH": "/your/sandbox/directory" + "SANDBOX_PATH": "/your/sandbox/directory", // "C:\\your\\sandbox\\directory" for Windows - } - } - } + }, + }, + }, } ``` +
@@ -132,6 +134,7 @@ Add to `.vscode/settings.json` in your project: } } ``` +
@@ -142,6 +145,7 @@ Any MCP-compatible client can connect using stdio transport: ```bash NUTRIENT_DWS_API_KEY=your_key SANDBOX_PATH=/your/path npx @nutrient-sdk/dws-mcp-server ``` +
### 3. Restart Your AI Client @@ -154,26 +158,26 @@ Drop documents into your sandbox directory and start giving instructions! ## Available Tools -| Tool | Description | -|------|-------------| +| Tool | Description | +| ---------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | **document_processor** | All-in-one document processing: merge PDFs, convert formats, apply OCR, watermark, rotate, redact, flatten annotations, extract text/tables/key-value pairs, and more | -| **document_signer** | Digitally sign PDFs with PAdES-compliant CMS or CAdES signatures, with customizable visible/invisible signature appearances | -| **sandbox_file_tree** | Browse files in the sandbox directory (when sandbox mode is enabled) | -| **directory_tree** | Browse directory contents (when sandbox mode is disabled) | +| **document_signer** | Digitally sign PDFs with PAdES-compliant CMS or CAdES signatures, with customizable visible/invisible signature appearances | +| **sandbox_file_tree** | Browse files in the sandbox directory (when sandbox mode is enabled) | +| **directory_tree** | Browse directory contents (when sandbox mode is disabled) | ### Document Processor Capabilities -| Feature | Description | -|---------|-------------| -| Document Creation | Merge PDFs, Office docs (DOCX, XLSX, PPTX), and images into a single document | -| Format Conversion | PDF ↔ DOCX, images (PNG, JPEG, WebP), PDF/A, PDF/UA, HTML, Markdown | -| Editing | Watermark (text/image), rotate pages, flatten annotations | -| Security | Redact sensitive data (SSNs, credit cards, emails, etc.), password protection, permission control | -| Data Extraction | Extract text, tables, or key-value pairs as structured JSON | -| OCR | Multi-language optical character recognition for scanned documents | -| Optimization | Compress and linearize PDFs without quality loss | -| Annotations | Import XFDF annotations, flatten annotations | -| Digital Signing | PAdES-compliant CMS and CAdES digital signatures (via document_signer tool) | +| Feature | Description | +| ----------------- | ------------------------------------------------------------------------------------------------- | +| Document Creation | Merge PDFs, Office docs (DOCX, XLSX, PPTX), and images into a single document | +| Format Conversion | PDF ↔ DOCX, images (PNG, JPEG, WebP), PDF/A, PDF/UA, HTML, Markdown | +| Editing | Watermark (text/image), rotate pages, flatten annotations | +| Security | Redact sensitive data (SSNs, credit cards, emails, etc.), password protection, permission control | +| Data Extraction | Extract text, tables, or key-value pairs as structured JSON | +| OCR | Multi-language optical character recognition for scanned documents | +| Optimization | Compress and linearize PDFs without quality loss | +| Annotations | Import XFDF annotations, flatten annotations | +| Digital Signing | PAdES-compliant CMS and CAdES digital signatures (via document_signer tool) | ## Use with AI Agent Frameworks @@ -212,6 +216,7 @@ npx @nutrient-sdk/dws-mcp-server ``` When sandbox mode is enabled: + - Relative paths resolve relative to the sandbox directory - All input file paths are validated to ensure they reside in the sandbox - Processed files are saved within the sandbox @@ -222,25 +227,42 @@ When sandbox mode is enabled: Processed files are saved to a location determined by the AI. To guide output placement, use natural language (e.g., "save the result to `output/result.pdf`") or create an `output` directory in your sandbox. +### Authentication + +The server authenticates to the Nutrient DWS API (`https://api.nutrient.io`) using one of: + +| Method | When | Config | +|--------|------|--------| +| **API key** | `NUTRIENT_DWS_API_KEY` is set | Static key passed as Bearer token to DWS API | +| **OAuth browser flow** | No API key set | Opens browser for Nutrient OAuth consent, caches token locally | + +When no API key is configured, the server opens a browser-based OAuth flow on the first tool call (similar to `gh auth login`). Tokens are cached at `~/.nutrient/credentials.json` and refreshed automatically. + ### Environment Variables -| Variable | Required | Description | -|----------|----------|-------------| -| `NUTRIENT_DWS_API_KEY` | Yes | Your Nutrient DWS API key ([get one free](https://dashboard.nutrient.io/sign_up/)) | -| `SANDBOX_PATH` | Recommended | Directory to restrict file operations to | +| Variable | Required | Description | +| ---------------------- | ----------- | ---------------------------------------------------------------------------------- | +| `NUTRIENT_DWS_API_KEY` | No* | Nutrient DWS API key ([get one free](https://dashboard.nutrient.io/sign_up/)) | +| `SANDBOX_PATH` | Recommended | Directory to restrict file operations to | +| `LOG_LEVEL` | No | Console log level for Winston logger (`debug` default) | + +\* If omitted, the server uses an OAuth browser flow to authenticate with the Nutrient API. ## Troubleshooting **Server not appearing in Claude Desktop?** + - Ensure Node.js 18+ is installed (`node --version`) - Check the config file path is correct for your OS - Restart Claude Desktop completely (check Task Manager/Activity Monitor) **"API key invalid" errors?** + - Verify your API key at [dashboard.nutrient.io](https://dashboard.nutrient.io) - Ensure the key is set correctly in the `env` section (no extra spaces) **Files not found?** + - Check that `SANDBOX_PATH` points to an existing directory - Ensure your documents are inside the sandbox directory - Use the `sandbox_file_tree` tool to verify visible files diff --git a/docs/testing.md b/docs/testing.md new file mode 100644 index 0000000..1660f74 --- /dev/null +++ b/docs/testing.md @@ -0,0 +1,172 @@ +# DWS MCP Server — Local Testing + +This guide covers local testing against both production DWS (`api.nutrient.io`) and local DWS debug builds. Docker/deployment steps are intentionally omitted. + +## Prerequisites + +- Node.js 18+ +- pnpm +- Project dependencies installed: + +```bash +pnpm install +``` + +## Run Commands + +- `pnpm run dev`: hot reload for local development (recommended) +- `pnpm run build && pnpm start`: production-like local run from `dist/` + +--- + +## stdio Transport + +### With API key + +```bash +export NUTRIENT_DWS_API_KEY=your_dws_api_key +pnpm run dev +``` + +### With OAuth browser flow + +When no API key is set, the server opens a browser for Nutrient OAuth consent on the first tool call. Tokens are cached at `~/.nutrient/credentials.json`. + +```bash +pnpm run dev +``` + +To test against a local DWS auth server instead of production: + +```bash +export AUTH_SERVER_URL=http://localhost:4000 +export DWS_API_BASE_URL=http://localhost:4000 +pnpm run dev +``` + +The OAuth flow will use `{AUTH_SERVER_URL}/oauth/authorize` and `{AUTH_SERVER_URL}/oauth/token`. The `CLIENT_ID` env var can override the default client ID (`nutrient-dws-mcp-server`). + +--- + +## HTTP Transport (JWT/OAuth) + +HTTP transport validates OAuth access tokens via JWKS. The user's token is forwarded directly to the DWS API — no `NUTRIENT_DWS_API_KEY` needed. + +### Against production DWS + +All auth/JWKS settings default to `api.nutrient.io`, so minimal config is: + +```bash +export MCP_TRANSPORT=http +export MCP_DEBUG_LOGGING=true + +pnpm run dev +``` + +The MCP client (Claude Code, MCP Inspector) discovers the auth server via `/.well-known/oauth-protected-resource`, registers via DCR, and redirects the user to sign in at `api.nutrient.io`. The user's OAuth access token is forwarded directly to the DWS API. + +### Against local DWS debug build + +For testing against the Louisville `hosted` app (port 4000): + +```bash +export MCP_TRANSPORT=http +export PORT=3000 +export MCP_HOST=127.0.0.1 + +export DWS_API_BASE_URL=http://localhost:4000 +export AUTH_SERVER_URL=http://localhost:4000 +export JWKS_URL=http://localhost:4000/.well-known/jwks.json +export ISSUER=http://localhost:4000 + +export MCP_DEBUG_LOGGING=true + +pnpm run dev +``` + +This requires the local DWS instance to expose: + +- `/.well-known/oauth-authorization-server` — OAuth server metadata +- `/.well-known/jwks.json` — JWKS for token validation +- `/oauth/register` — Dynamic Client Registration (DCR) +- `/oauth/token` — token endpoint + +The DWS `hosted` app seeds a default MCP client (`dws-mcp-server`) but in JWT-forward mode no server-side client credentials are needed — the user's OAuth token is passed through directly. + +### Verify + +```bash +# OAuth discovery +curl http://127.0.0.1:3000/.well-known/oauth-protected-resource + +# Unauthenticated (expect 401 + WWW-Authenticate) +curl -X POST http://127.0.0.1:3000/mcp \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -d '{"jsonrpc":"2.0","id":1,"method":"tools/list","params":{}}' + +# Authenticated (use a valid JWT from your auth server) +export ACCESS_TOKEN=eyJ... +curl -X POST http://127.0.0.1:3000/mcp \ + -H "Authorization: Bearer $ACCESS_TOKEN" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -d '{"jsonrpc":"2.0","id":1,"method":"tools/list","params":{}}' +``` + +--- + +## Environment Variable Reference + +### Common + +| Variable | Default | Description | +|----------------------|----------------------------|------------------------------------------| +| `MCP_TRANSPORT` | `stdio` | `stdio` or `http` | +| `PORT` | `3000` | HTTP port | +| `MCP_HOST` | `127.0.0.1` | HTTP bind host | +| `MCP_ALLOWED_HOSTS` | — | Comma/space-separated allowed hostnames | +| `DWS_API_BASE_URL` | `https://api.nutrient.io` | DWS API base URL | +| `NUTRIENT_DWS_API_KEY` | — | DWS API key (optional in stdio+OAuth mode) | +| `SANDBOX_PATH` | — | Filesystem sandbox root | +| `MCP_DEBUG_LOGGING` | — | Request/response logging (`true`/`1`) | +| `LOG_LEVEL` | `debug` | Winston logger level | + +### HTTP auth (JWT/OAuth) + +| Variable | Default | Description | +|-----------------------|------------------------------------------------------|------------------------------------| +| `AUTH_SERVER_URL` | `https://api.nutrient.io` | Authorization server base URL | +| `JWKS_URL` | `https://api.nutrient.io/.well-known/jwks.json` | JWKS endpoint | +| `RESOURCE_URL` | `http://localhost:3000/mcp` | Protected resource URL | +| `ISSUER` | `AUTH_SERVER_URL` | JWT issuer | +| `CLIENT_ID` | `nutrient-dws-mcp-server` | OAuth client ID (stdio OAuth flow) | + +### Audience matching (JWT mode) + +Accepted audience values: `dws-mcp`, plus `RESOURCE_URL` variants (origin, path, with/without trailing slash). + +--- + +## MCP Inspector + +The [MCP Inspector](https://github.com/modelcontextprotocol/inspector) is a browser-based tool for interactively testing and debugging MCP servers. + +```bash +npx @modelcontextprotocol/inspector +``` + +Opens at `http://localhost:6274`. Point it at `http://localhost:3000/mcp`. + +--- + +## Common Failures + +| Error | Cause | Fix | +|-------|-------|-----| +| `Cannot POST /` | Client points to `/` instead of `/mcp` | Use `http://localhost:3000/mcp` | +| `401 invalid_token` | Missing/invalid bearer or JWT | Check token value or JWKS config | +| `unexpected "aud" claim value` | Token audience mismatch | Check `RESOURCE_URL` matches your MCP endpoint | +| `401` from DWS on tool calls | Forwarded OAuth token not accepted by DWS | Ensure auth server issues DWS-compatible tokens | +| `Protected resource does not match` | `RESOURCE_URL` doesn't match client's URL | Use public URL (e.g. ngrok), not localhost | +| Browser doesn't open (stdio OAuth) | Running in headless/CI | Set `NUTRIENT_DWS_API_KEY` instead | diff --git a/package.json b/package.json index 11bda74..1f76805 100644 --- a/package.json +++ b/package.json @@ -38,27 +38,39 @@ "LICENSE" ], "scripts": { + "dev": "tsx watch src/index.ts", + "start": "node dist/index.js", "build": "tsc && shx chmod +x dist/index.js", "format": "prettier --write .", "lint": "eslint .", "pretest": "tsc --project tsconfig.test.json --noEmit", "test": "vitest run", + "test:ci": "vitest run --exclude tests/build-api-examples.test.ts --exclude tests/signing-api-examples.test.ts", + "test:integration": "vitest run tests/build-api-examples.test.ts tests/signing-api-examples.test.ts", "test:watch": "vitest", "clean": "shx rm -rf dist" }, "dependencies": { "@modelcontextprotocol/sdk": "^1.25.2", "axios": "^1.13.2", + "express": "^5.2.1", "form-data": "^4.0.5", + "jose": "^6.2.1", + "open": "^11.0.0", + "winston": "^3.19.0", "zod": "^3.25.76" }, "devDependencies": { "@eslint/js": "^9.39.2", + "@types/express": "^5.0.6", "@types/node": "^22.19.5", + "@types/supertest": "^7.2.0", "dotenv": "^16.6.1", "eslint": "^9.39.2", "prettier": "^3.7.4", "shx": "^0.4.0", + "supertest": "^7.2.2", + "tsx": "^4.21.0", "typescript": "^5.9.3", "typescript-eslint": "^8.52.0", "vitest": "^4.0.16" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7a601ce..7e9165b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,9 +14,21 @@ importers: axios: specifier: ^1.13.2 version: 1.13.2 + express: + specifier: ^5.2.1 + version: 5.2.1 form-data: specifier: ^4.0.5 version: 4.0.5 + jose: + specifier: ^6.2.1 + version: 6.2.1 + open: + specifier: ^11.0.0 + version: 11.0.0 + winston: + specifier: ^3.19.0 + version: 3.19.0 zod: specifier: ^3.25.76 version: 3.25.76 @@ -24,9 +36,15 @@ importers: '@eslint/js': specifier: ^9.39.2 version: 9.39.2 + '@types/express': + specifier: ^5.0.6 + version: 5.0.6 '@types/node': specifier: ^22.19.5 version: 22.19.5 + '@types/supertest': + specifier: ^7.2.0 + version: 7.2.0 dotenv: specifier: ^16.6.1 version: 16.6.1 @@ -39,6 +57,12 @@ importers: shx: specifier: ^0.4.0 version: 0.4.0 + supertest: + specifier: ^7.2.2 + version: 7.2.2 + tsx: + specifier: ^4.21.0 + version: 4.21.0 typescript: specifier: ^5.9.3 version: 5.9.3 @@ -47,10 +71,17 @@ importers: version: 8.52.0(eslint@9.39.2)(typescript@5.9.3) vitest: specifier: ^4.0.16 - version: 4.0.16(@types/node@22.19.5) + version: 4.0.16(@types/node@22.19.5)(tsx@4.21.0) packages: + '@colors/colors@1.6.0': + resolution: {integrity: sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==} + engines: {node: '>=0.1.90'} + + '@dabh/diagnostics@2.0.8': + resolution: {integrity: sha512-R4MSXTVnuMzGD7bzHdW2ZhhdPC/igELENcq5IjEverBvq5hn1SXCWcsi6eSsdWP0/Ur+SItRRjAktmdoX/8R/Q==} + '@esbuild/aix-ppc64@0.27.2': resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==} engines: {node: '>=18'} @@ -280,6 +311,10 @@ packages: '@cfworker/json-schema': optional: true + '@noble/hashes@1.8.0': + resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} + engines: {node: ^14.21.3 || >=16} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -292,6 +327,9 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} + '@paralleldrive/cuid2@2.3.1': + resolution: {integrity: sha512-XO7cAxhnTZl0Yggq6jOgjiOHhbgcO4NqFqwSmQpjK3b6TEE6Uj/jfSk6wzYyemh3+I0sHirKSetjQwn5cZktFw==} + '@rollup/rollup-android-arm-eabi@4.55.1': resolution: {integrity: sha512-9R0DM/ykwfGIlNu6+2U09ga0WXeZ9MRC2Ter8jnz8415VbuIykVuc6bhdrbORFZANDmTDvq26mJrEVTl8TdnDg==} cpu: [arm] @@ -417,24 +455,69 @@ packages: cpu: [x64] os: [win32] + '@so-ric/colorspace@1.1.6': + resolution: {integrity: sha512-/KiKkpHNOBgkFJwu9sh48LkHSMYGyuTcSFK/qMBdnOAlrRJzRSXAOFB5qwzaVQuDl8wAvHVMkaASQDReTahxuw==} + '@standard-schema/spec@1.1.0': resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + '@types/body-parser@1.19.6': + resolution: {integrity: sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==} + '@types/chai@5.2.3': resolution: {integrity: sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==} + '@types/connect@3.4.38': + resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==} + + '@types/cookiejar@2.1.5': + resolution: {integrity: sha512-he+DHOWReW0nghN24E1WUqM0efK4kI9oTqDm6XmK8ZPe2djZ90BSNdGnIyCLzCPw7/pogPlGbzI2wHGGmi4O/Q==} + '@types/deep-eql@4.0.2': resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==} '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/express-serve-static-core@5.1.1': + resolution: {integrity: sha512-v4zIMr/cX7/d2BpAEX3KNKL/JrT1s43s96lLvvdTmza1oEvDudCqK9aF/djc/SWgy8Yh0h30TZx5VpzqFCxk5A==} + + '@types/express@5.0.6': + resolution: {integrity: sha512-sKYVuV7Sv9fbPIt/442koC7+IIwK5olP1KWeD88e/idgoJqDm3JV/YUiPwkoKK92ylff2MGxSz1CSjsXelx0YA==} + + '@types/http-errors@2.0.5': + resolution: {integrity: sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==} + '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + '@types/methods@1.1.4': + resolution: {integrity: sha512-ymXWVrDiCxTBE3+RIrrP533E70eA+9qu7zdWoHuOmGujkYtzf4HQF96b8nwHLqhuf4ykX61IGRIB38CC6/sImQ==} + '@types/node@22.19.5': resolution: {integrity: sha512-HfF8+mYcHPcPypui3w3mvzuIErlNOh2OAG+BCeBZCEwyiD5ls2SiCwEyT47OELtf7M3nHxBdu0FsmzdKxkN52Q==} + '@types/qs@6.15.0': + resolution: {integrity: sha512-JawvT8iBVWpzTrz3EGw9BTQFg3BQNmwERdKE22vlTxawwtbyUSlMppvZYKLZzB5zgACXdXxbD3m1bXaMqP/9ow==} + + '@types/range-parser@1.2.7': + resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==} + + '@types/send@1.2.1': + resolution: {integrity: sha512-arsCikDvlU99zl1g69TcAB3mzZPpxgw0UQnaHeC1Nwb015xp8bknZv5rIfri9xTOcMuaVgvabfIRA7PSZVuZIQ==} + + '@types/serve-static@2.2.0': + resolution: {integrity: sha512-8mam4H1NHLtu7nmtalF7eyBH14QyOASmcxHhSfEoRyr0nP/YdoesEtU+uSRvMe96TW/HPTtkoKqQLl53N7UXMQ==} + + '@types/superagent@8.1.9': + resolution: {integrity: sha512-pTVjI73witn+9ILmoJdajHGW2jkSaOzhiFYF1Rd3EQ94kymLqB9PjD9ISg7WaALC7+dCHT0FGe9T2LktLq/3GQ==} + + '@types/supertest@7.2.0': + resolution: {integrity: sha512-uh2Lv57xvggst6lCqNdFAmDSvoMG7M/HDtX4iUCquxQ5EGPtaPM5PL5Hmi7LCvOG8db7YaCPNJEeoI8s/WzIQw==} + + '@types/triple-beam@1.3.5': + resolution: {integrity: sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==} + '@typescript-eslint/eslint-plugin@8.52.0': resolution: {integrity: sha512-okqtOgqu2qmZJ5iN4TWlgfF171dZmx2FzdOv2K/ixL2LZWDStL8+JgQerI2sa8eAEfoydG9+0V96m7V+P8yE1Q==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -558,10 +641,16 @@ packages: argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + asap@2.0.6: + resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} + assertion-error@2.0.1: resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} engines: {node: '>=12'} + async@3.2.6: + resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} + asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} @@ -585,6 +674,10 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} + bundle-name@4.1.0: + resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} + engines: {node: '>=18'} + bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} @@ -613,13 +706,32 @@ packages: resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} engines: {node: '>=7.0.0'} + color-convert@3.1.3: + resolution: {integrity: sha512-fasDH2ont2GqF5HpyO4w0+BcewlhHEZOFn9c1ckZdHpJ56Qb7MHhH/IcJZbBGgvdtwdwNbLvxiBEdg336iA9Sg==} + engines: {node: '>=14.6'} + color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + color-name@2.1.0: + resolution: {integrity: sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==} + engines: {node: '>=12.20'} + + color-string@2.1.4: + resolution: {integrity: sha512-Bb6Cq8oq0IjDOe8wJmi4JeNn763Xs9cfrBcaylK1tPypWzyoy2G3l90v9k64kjphl/ZJjPIShFztenRomi8WTg==} + engines: {node: '>=18'} + + color@5.0.3: + resolution: {integrity: sha512-ezmVcLR3xAVp8kYOm4GS45ZLLgIE6SPAFoduLr6hTDajwb3KZ2F46gulK3XpcwRFb5KKGCSezCBAY4Dw4HsyXA==} + engines: {node: '>=18'} + combined-stream@1.0.8: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} + component-emitter@1.3.1: + resolution: {integrity: sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ==} + concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} @@ -639,6 +751,9 @@ packages: resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} engines: {node: '>= 0.6'} + cookiejar@2.1.4: + resolution: {integrity: sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==} + cors@2.8.5: resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==} engines: {node: '>= 0.10'} @@ -663,6 +778,18 @@ packages: deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + default-browser-id@5.0.1: + resolution: {integrity: sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==} + engines: {node: '>=18'} + + default-browser@5.5.0: + resolution: {integrity: sha512-H9LMLr5zwIbSxrmvikGuI/5KGhZ8E2zH3stkMgM5LpOWDutGM2JZaj460Udnf1a+946zc7YBgrqEWwbk7zHvGw==} + engines: {node: '>=18'} + + define-lazy-prop@3.0.0: + resolution: {integrity: sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==} + engines: {node: '>=12'} + delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} @@ -671,6 +798,9 @@ packages: resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} engines: {node: '>= 0.8'} + dezalgo@1.0.4: + resolution: {integrity: sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==} + dotenv@16.6.1: resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==} engines: {node: '>=12'} @@ -682,6 +812,9 @@ packages: ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} + enabled@2.0.0: + resolution: {integrity: sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==} + encodeurl@2.0.0: resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} engines: {node: '>= 0.8'} @@ -808,6 +941,9 @@ packages: fast-levenshtein@2.0.6: resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + fast-safe-stringify@2.1.1: + resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} + fast-uri@3.1.0: resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} @@ -823,6 +959,9 @@ packages: picomatch: optional: true + fecha@4.2.3: + resolution: {integrity: sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==} + file-entry-cache@8.0.0: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} @@ -846,6 +985,9 @@ packages: flatted@3.3.3: resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} + fn.name@1.1.0: + resolution: {integrity: sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==} + follow-redirects@1.15.11: resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} engines: {node: '>=4.0'} @@ -859,6 +1001,10 @@ packages: resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==} engines: {node: '>= 6'} + formidable@3.5.4: + resolution: {integrity: sha512-YikH+7CUTOtP44ZTnUhR7Ic2UASBPOqmaRkRKxRbywPTe5VxF7RRCck4af9wutiZ/QKM5nME9Bie2fFaPz5Gug==} + engines: {node: '>=14.0.0'} + forwarded@0.2.0: resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} engines: {node: '>= 0.6'} @@ -887,6 +1033,9 @@ packages: resolution: {integrity: sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==} engines: {node: '>=6'} + get-tsconfig@4.13.6: + resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==} + glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} @@ -962,6 +1111,11 @@ packages: resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} engines: {node: '>= 0.4'} + is-docker@3.0.0: + resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + hasBin: true + is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} @@ -970,6 +1124,15 @@ packages: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} + is-in-ssh@1.0.0: + resolution: {integrity: sha512-jYa6Q9rH90kR1vKB6NM7qqd1mge3Fx4Dhw5TVlK1MUBqhEOuCagrEHMevNuCcbECmXZ0ThXkRm+Ymr51HwEPAw==} + engines: {node: '>=20'} + + is-inside-container@1.0.0: + resolution: {integrity: sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==} + engines: {node: '>=14.16'} + hasBin: true + is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} @@ -981,11 +1144,19 @@ packages: resolution: {integrity: sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==} engines: {node: '>=0.10.0'} + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + is-wsl@3.1.1: + resolution: {integrity: sha512-e6rvdUCiQCAuumZslxRJWR/Doq4VpPR82kqclvcS0efgt430SlGIk05vdCN58+VrzgtIcfNODjozVielycD4Sw==} + engines: {node: '>=16'} + isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - jose@6.1.3: - resolution: {integrity: sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==} + jose@6.2.1: + resolution: {integrity: sha512-jUaKr1yrbfaImV7R2TN/b3IcZzsw38/chqMpo2XJ7i2F8AfM/lA4G1goC3JVEwg0H7UldTmSt3P68nt31W7/mw==} js-yaml@4.1.1: resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} @@ -1009,6 +1180,9 @@ packages: keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + kuler@2.0.0: + resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==} + levn@0.4.1: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} @@ -1020,6 +1194,10 @@ packages: lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + logform@2.7.0: + resolution: {integrity: sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ==} + engines: {node: '>= 12.0.0'} + magic-string@0.30.21: resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} @@ -1039,6 +1217,10 @@ packages: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} + methods@1.1.2: + resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} + engines: {node: '>= 0.6'} + micromatch@4.0.8: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} @@ -1059,6 +1241,11 @@ packages: resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} engines: {node: '>=18'} + mime@2.6.0: + resolution: {integrity: sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==} + engines: {node: '>=4.0.0'} + hasBin: true + minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} @@ -1109,6 +1296,13 @@ packages: once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + one-time@1.0.0: + resolution: {integrity: sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==} + + open@11.0.0: + resolution: {integrity: sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw==} + engines: {node: '>=20'} + optionator@0.9.4: resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} engines: {node: '>= 0.8.0'} @@ -1173,6 +1367,10 @@ packages: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} + powershell-utils@0.1.0: + resolution: {integrity: sha512-dM0jVuXJPsDN6DvRpea484tCUaMiXWjuCn++HGTqUWzGDjv5tZkEZldAJ/UMlqRYGFrD/etByo4/xOuC/snX2A==} + engines: {node: '>=20'} + prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} @@ -1211,6 +1409,10 @@ packages: resolution: {integrity: sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==} engines: {node: '>= 0.10'} + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + rechoir@0.6.2: resolution: {integrity: sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==} engines: {node: '>= 0.10'} @@ -1223,6 +1425,9 @@ packages: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + resolve@1.22.11: resolution: {integrity: sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==} engines: {node: '>= 0.4'} @@ -1241,9 +1446,20 @@ packages: resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} engines: {node: '>= 18'} + run-applescript@7.1.0: + resolution: {integrity: sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==} + engines: {node: '>=18'} + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safe-stable-stringify@2.5.0: + resolution: {integrity: sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==} + engines: {node: '>=10'} + safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} @@ -1319,6 +1535,9 @@ packages: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} + stack-trace@0.0.10: + resolution: {integrity: sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==} + stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} @@ -1329,6 +1548,9 @@ packages: std-env@3.10.0: resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + strip-eof@1.0.0: resolution: {integrity: sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==} engines: {node: '>=0.10.0'} @@ -1337,6 +1559,14 @@ packages: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} + superagent@10.3.0: + resolution: {integrity: sha512-B+4Ik7ROgVKrQsXTV0Jwp2u+PXYLSlqtDAhYnkkD+zn3yg8s/zjA2MeGayPoY/KICrbitwneDHrjSotxKL+0XQ==} + engines: {node: '>=14.18.0'} + + supertest@7.2.2: + resolution: {integrity: sha512-oK8WG9diS3DlhdUkcFn4tkNIiIbBx9lI2ClF8K+b2/m8Eyv47LSawxUzZQSNKUrVb2KsqeTDCcjAAVPYaSLVTA==} + engines: {node: '>=14.18.0'} + supports-color@7.2.0: resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} engines: {node: '>=8'} @@ -1345,6 +1575,9 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} + text-hex@1.0.0: + resolution: {integrity: sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==} + tinybench@2.9.0: resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} @@ -1368,12 +1601,21 @@ packages: resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} engines: {node: '>=0.6'} + triple-beam@1.4.1: + resolution: {integrity: sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==} + engines: {node: '>= 14.0.0'} + ts-api-utils@2.4.0: resolution: {integrity: sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==} engines: {node: '>=18.12'} peerDependencies: typescript: '>=4.8.4' + tsx@4.21.0: + resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==} + engines: {node: '>=18.0.0'} + hasBin: true + type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} @@ -1404,6 +1646,9 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + vary@1.1.2: resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} engines: {node: '>= 0.8'} @@ -1496,6 +1741,14 @@ packages: engines: {node: '>=8'} hasBin: true + winston-transport@4.9.0: + resolution: {integrity: sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A==} + engines: {node: '>= 12.0.0'} + + winston@3.19.0: + resolution: {integrity: sha512-LZNJgPzfKR+/J3cHkxcpHKpKKvGfDZVPS4hfJCc4cCG0CgYzvlD6yE/S3CIL/Yt91ak327YCpiF/0MyeZHEHKA==} + engines: {node: '>= 12.0.0'} + word-wrap@1.2.5: resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} engines: {node: '>=0.10.0'} @@ -1503,6 +1756,10 @@ packages: wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + wsl-utils@0.3.1: + resolution: {integrity: sha512-g/eziiSUNBSsdDJtCLB8bdYEUMj4jR7AGeUo96p/3dTafgjHhpF4RiCFPiRILwjQoDXx5MqkBr4fwWtR3Ky4Wg==} + engines: {node: '>=20'} + yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} @@ -1517,6 +1774,14 @@ packages: snapshots: + '@colors/colors@1.6.0': {} + + '@dabh/diagnostics@2.0.8': + dependencies: + '@so-ric/colorspace': 1.1.6 + enabled: 2.0.0 + kuler: 2.0.0 + '@esbuild/aix-ppc64@0.27.2': optional: true @@ -1670,7 +1935,7 @@ snapshots: eventsource-parser: 3.0.6 express: 5.2.1 express-rate-limit: 7.5.1(express@5.2.1) - jose: 6.1.3 + jose: 6.2.1 json-schema-typed: 8.0.2 pkce-challenge: 5.0.1 raw-body: 3.0.2 @@ -1680,6 +1945,8 @@ snapshots: - hono - supports-color + '@noble/hashes@1.8.0': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -1692,6 +1959,10 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.20.1 + '@paralleldrive/cuid2@2.3.1': + dependencies: + '@noble/hashes': 1.8.0 + '@rollup/rollup-android-arm-eabi@4.55.1': optional: true @@ -1767,23 +2038,83 @@ snapshots: '@rollup/rollup-win32-x64-msvc@4.55.1': optional: true + '@so-ric/colorspace@1.1.6': + dependencies: + color: 5.0.3 + text-hex: 1.0.0 + '@standard-schema/spec@1.1.0': {} + '@types/body-parser@1.19.6': + dependencies: + '@types/connect': 3.4.38 + '@types/node': 22.19.5 + '@types/chai@5.2.3': dependencies: '@types/deep-eql': 4.0.2 assertion-error: 2.0.1 + '@types/connect@3.4.38': + dependencies: + '@types/node': 22.19.5 + + '@types/cookiejar@2.1.5': {} + '@types/deep-eql@4.0.2': {} '@types/estree@1.0.8': {} + '@types/express-serve-static-core@5.1.1': + dependencies: + '@types/node': 22.19.5 + '@types/qs': 6.15.0 + '@types/range-parser': 1.2.7 + '@types/send': 1.2.1 + + '@types/express@5.0.6': + dependencies: + '@types/body-parser': 1.19.6 + '@types/express-serve-static-core': 5.1.1 + '@types/serve-static': 2.2.0 + + '@types/http-errors@2.0.5': {} + '@types/json-schema@7.0.15': {} + '@types/methods@1.1.4': {} + '@types/node@22.19.5': dependencies: undici-types: 6.21.0 + '@types/qs@6.15.0': {} + + '@types/range-parser@1.2.7': {} + + '@types/send@1.2.1': + dependencies: + '@types/node': 22.19.5 + + '@types/serve-static@2.2.0': + dependencies: + '@types/http-errors': 2.0.5 + '@types/node': 22.19.5 + + '@types/superagent@8.1.9': + dependencies: + '@types/cookiejar': 2.1.5 + '@types/methods': 1.1.4 + '@types/node': 22.19.5 + form-data: 4.0.5 + + '@types/supertest@7.2.0': + dependencies: + '@types/methods': 1.1.4 + '@types/superagent': 8.1.9 + + '@types/triple-beam@1.3.5': {} + '@typescript-eslint/eslint-plugin@8.52.0(@typescript-eslint/parser@8.52.0(eslint@9.39.2)(typescript@5.9.3))(eslint@9.39.2)(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.2 @@ -1884,13 +2215,13 @@ snapshots: chai: 6.2.2 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.16(vite@7.3.1(@types/node@22.19.5))': + '@vitest/mocker@4.0.16(vite@7.3.1(@types/node@22.19.5)(tsx@4.21.0))': dependencies: '@vitest/spy': 4.0.16 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.3.1(@types/node@22.19.5) + vite: 7.3.1(@types/node@22.19.5)(tsx@4.21.0) '@vitest/pretty-format@4.0.16': dependencies: @@ -1949,8 +2280,12 @@ snapshots: argparse@2.0.1: {} + asap@2.0.6: {} + assertion-error@2.0.1: {} + async@3.2.6: {} + asynckit@0.4.0: {} axios@1.13.2: @@ -1990,6 +2325,10 @@ snapshots: dependencies: fill-range: 7.1.1 + bundle-name@4.1.0: + dependencies: + run-applescript: 7.1.0 + bytes@3.1.2: {} call-bind-apply-helpers@1.0.2: @@ -2015,12 +2354,29 @@ snapshots: dependencies: color-name: 1.1.4 + color-convert@3.1.3: + dependencies: + color-name: 2.1.0 + color-name@1.1.4: {} + color-name@2.1.0: {} + + color-string@2.1.4: + dependencies: + color-name: 2.1.0 + + color@5.0.3: + dependencies: + color-convert: 3.1.3 + color-string: 2.1.4 + combined-stream@1.0.8: dependencies: delayed-stream: 1.0.0 + component-emitter@1.3.1: {} + concat-map@0.0.1: {} content-disposition@1.0.1: {} @@ -2031,6 +2387,8 @@ snapshots: cookie@0.7.2: {} + cookiejar@2.1.4: {} + cors@2.8.5: dependencies: object-assign: 4.1.1 @@ -2056,10 +2414,24 @@ snapshots: deep-is@0.1.4: {} + default-browser-id@5.0.1: {} + + default-browser@5.5.0: + dependencies: + bundle-name: 4.1.0 + default-browser-id: 5.0.1 + + define-lazy-prop@3.0.0: {} + delayed-stream@1.0.0: {} depd@2.0.0: {} + dezalgo@1.0.4: + dependencies: + asap: 2.0.6 + wrappy: 1.0.2 + dotenv@16.6.1: {} dunder-proto@1.0.1: @@ -2070,6 +2442,8 @@ snapshots: ee-first@1.1.1: {} + enabled@2.0.0: {} + encodeurl@2.0.0: {} end-of-stream@1.4.5: @@ -2267,6 +2641,8 @@ snapshots: fast-levenshtein@2.0.6: {} + fast-safe-stringify@2.1.1: {} + fast-uri@3.1.0: {} fastq@1.20.1: @@ -2277,6 +2653,8 @@ snapshots: optionalDependencies: picomatch: 4.0.3 + fecha@4.2.3: {} + file-entry-cache@8.0.0: dependencies: flat-cache: 4.0.1 @@ -2308,6 +2686,8 @@ snapshots: flatted@3.3.3: {} + fn.name@1.1.0: {} + follow-redirects@1.15.11: {} form-data@4.0.5: @@ -2318,6 +2698,12 @@ snapshots: hasown: 2.0.2 mime-types: 2.1.35 + formidable@3.5.4: + dependencies: + '@paralleldrive/cuid2': 2.3.1 + dezalgo: 1.0.4 + once: 1.4.0 + forwarded@0.2.0: {} fresh@2.0.0: {} @@ -2349,6 +2735,10 @@ snapshots: dependencies: pump: 3.0.3 + get-tsconfig@4.13.6: + dependencies: + resolve-pkg-maps: 1.0.0 + glob-parent@5.1.2: dependencies: is-glob: 4.0.3 @@ -2408,21 +2798,35 @@ snapshots: dependencies: hasown: 2.0.2 + is-docker@3.0.0: {} + is-extglob@2.1.1: {} is-glob@4.0.3: dependencies: is-extglob: 2.1.1 + is-in-ssh@1.0.0: {} + + is-inside-container@1.0.0: + dependencies: + is-docker: 3.0.0 + is-number@7.0.0: {} is-promise@4.0.0: {} is-stream@1.1.0: {} + is-stream@2.0.1: {} + + is-wsl@3.1.1: + dependencies: + is-inside-container: 1.0.0 + isexe@2.0.0: {} - jose@6.1.3: {} + jose@6.2.1: {} js-yaml@4.1.1: dependencies: @@ -2442,6 +2846,8 @@ snapshots: dependencies: json-buffer: 3.0.1 + kuler@2.0.0: {} + levn@0.4.1: dependencies: prelude-ls: 1.2.1 @@ -2453,6 +2859,15 @@ snapshots: lodash.merge@4.6.2: {} + logform@2.7.0: + dependencies: + '@colors/colors': 1.6.0 + '@types/triple-beam': 1.3.5 + fecha: 4.2.3 + ms: 2.1.3 + safe-stable-stringify: 2.5.0 + triple-beam: 1.4.1 + magic-string@0.30.21: dependencies: '@jridgewell/sourcemap-codec': 1.5.5 @@ -2465,6 +2880,8 @@ snapshots: merge2@1.4.1: {} + methods@1.1.2: {} + micromatch@4.0.8: dependencies: braces: 3.0.3 @@ -2482,6 +2899,8 @@ snapshots: dependencies: mime-db: 1.54.0 + mime@2.6.0: {} + minimatch@3.1.2: dependencies: brace-expansion: 1.1.12 @@ -2520,6 +2939,19 @@ snapshots: dependencies: wrappy: 1.0.2 + one-time@1.0.0: + dependencies: + fn.name: 1.1.0 + + open@11.0.0: + dependencies: + default-browser: 5.5.0 + define-lazy-prop: 3.0.0 + is-in-ssh: 1.0.0 + is-inside-container: 1.0.0 + powershell-utils: 0.1.0 + wsl-utils: 0.3.1 + optionator@0.9.4: dependencies: deep-is: 0.1.4 @@ -2571,6 +3003,8 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.1 + powershell-utils@0.1.0: {} + prelude-ls@1.2.1: {} prettier@3.7.4: {} @@ -2604,6 +3038,12 @@ snapshots: iconv-lite: 0.7.2 unpipe: 1.0.0 + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + rechoir@0.6.2: dependencies: resolve: 1.22.11 @@ -2612,6 +3052,8 @@ snapshots: resolve-from@4.0.0: {} + resolve-pkg-maps@1.0.0: {} + resolve@1.22.11: dependencies: is-core-module: 2.16.1 @@ -2661,10 +3103,16 @@ snapshots: transitivePeerDependencies: - supports-color + run-applescript@7.1.0: {} + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 + safe-buffer@5.2.1: {} + + safe-stable-stringify@2.5.0: {} + safer-buffer@2.1.2: {} semver@5.7.2: {} @@ -2756,22 +3204,52 @@ snapshots: source-map-js@1.2.1: {} + stack-trace@0.0.10: {} + stackback@0.0.2: {} statuses@2.0.2: {} std-env@3.10.0: {} + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + strip-eof@1.0.0: {} strip-json-comments@3.1.1: {} + superagent@10.3.0: + dependencies: + component-emitter: 1.3.1 + cookiejar: 2.1.4 + debug: 4.4.3 + fast-safe-stringify: 2.1.1 + form-data: 4.0.5 + formidable: 3.5.4 + methods: 1.1.2 + mime: 2.6.0 + qs: 6.14.1 + transitivePeerDependencies: + - supports-color + + supertest@7.2.2: + dependencies: + cookie-signature: 1.2.2 + methods: 1.1.2 + superagent: 10.3.0 + transitivePeerDependencies: + - supports-color + supports-color@7.2.0: dependencies: has-flag: 4.0.0 supports-preserve-symlinks-flag@1.0.0: {} + text-hex@1.0.0: {} + tinybench@2.9.0: {} tinyexec@1.0.2: {} @@ -2789,10 +3267,19 @@ snapshots: toidentifier@1.0.1: {} + triple-beam@1.4.1: {} + ts-api-utils@2.4.0(typescript@5.9.3): dependencies: typescript: 5.9.3 + tsx@4.21.0: + dependencies: + esbuild: 0.27.2 + get-tsconfig: 4.13.6 + optionalDependencies: + fsevents: 2.3.3 + type-check@0.4.0: dependencies: prelude-ls: 1.2.1 @@ -2824,9 +3311,11 @@ snapshots: dependencies: punycode: 2.3.1 + util-deprecate@1.0.2: {} + vary@1.1.2: {} - vite@7.3.1(@types/node@22.19.5): + vite@7.3.1(@types/node@22.19.5)(tsx@4.21.0): dependencies: esbuild: 0.27.2 fdir: 6.5.0(picomatch@4.0.3) @@ -2837,11 +3326,12 @@ snapshots: optionalDependencies: '@types/node': 22.19.5 fsevents: 2.3.3 + tsx: 4.21.0 - vitest@4.0.16(@types/node@22.19.5): + vitest@4.0.16(@types/node@22.19.5)(tsx@4.21.0): dependencies: '@vitest/expect': 4.0.16 - '@vitest/mocker': 4.0.16(vite@7.3.1(@types/node@22.19.5)) + '@vitest/mocker': 4.0.16(vite@7.3.1(@types/node@22.19.5)(tsx@4.21.0)) '@vitest/pretty-format': 4.0.16 '@vitest/runner': 4.0.16 '@vitest/snapshot': 4.0.16 @@ -2858,7 +3348,7 @@ snapshots: tinyexec: 1.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 - vite: 7.3.1(@types/node@22.19.5) + vite: 7.3.1(@types/node@22.19.5)(tsx@4.21.0) why-is-node-running: 2.3.0 optionalDependencies: '@types/node': 22.19.5 @@ -2888,10 +3378,35 @@ snapshots: siginfo: 2.0.0 stackback: 0.0.2 + winston-transport@4.9.0: + dependencies: + logform: 2.7.0 + readable-stream: 3.6.2 + triple-beam: 1.4.1 + + winston@3.19.0: + dependencies: + '@colors/colors': 1.6.0 + '@dabh/diagnostics': 2.0.8 + async: 3.2.6 + is-stream: 2.0.1 + logform: 2.7.0 + one-time: 1.0.0 + readable-stream: 3.6.2 + safe-stable-stringify: 2.5.0 + stack-trace: 0.0.10 + triple-beam: 1.4.1 + winston-transport: 4.9.0 + word-wrap@1.2.5: {} wrappy@1.0.2: {} + wsl-utils@0.3.1: + dependencies: + is-wsl: 3.1.1 + powershell-utils: 0.1.0 + yocto-queue@0.1.0: {} zod-to-json-schema@3.25.1(zod@3.25.76): diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index df94eba..32d3990 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -2,6 +2,7 @@ supportedArchitectures: os: - win32 - darwin + - linux cpu: - x64 - arm64 diff --git a/src/auth/nutrient-oauth.ts b/src/auth/nutrient-oauth.ts new file mode 100644 index 0000000..66f0c04 --- /dev/null +++ b/src/auth/nutrient-oauth.ts @@ -0,0 +1,391 @@ +import { createServer, type IncomingMessage, type ServerResponse } from 'node:http' +import { randomBytes, createHash } from 'node:crypto' +import { readFile, writeFile, mkdir } from 'node:fs/promises' +import { homedir } from 'node:os' +import { join } from 'node:path' +import { logger } from '../logger.js' + +/** Fixed callback port for OAuth redirect URI. Must match the registered redirect_uri on the auth server. */ +const DEFAULT_CALLBACK_PORT = 19423 + +export type NutrientOAuthConfig = { + /** Nutrient OAuth authorize endpoint. */ + authorizeUrl: string + /** Nutrient OAuth token endpoint. */ + tokenUrl: string + /** OAuth client ID. If omitted, the server registers via DCR using `registrationUrl`. */ + clientId?: string + /** OAuth Dynamic Client Registration endpoint. Required when `clientId` is not set. */ + registrationUrl?: string + /** Human-readable client name sent during DCR. */ + clientName?: string + /** OAuth scopes to request. */ + scopes: string[] + /** Path to cache credentials. Defaults to `~/.nutrient/credentials.json`. */ + credentialsPath?: string + /** Path to cache DCR client registration. Defaults to `~/.nutrient/client.json`. */ + clientRegistrationPath?: string + /** Fixed port for the OAuth callback server. Defaults to 19423. */ + callbackPort?: number + /** OAuth resource parameter (RFC 8707). Identifies the target API. */ + resource?: string +} + +type CachedCredentials = { + accessToken: string + refreshToken?: string + expiresAt?: number +} + +type CachedClientRegistration = { + clientId: string + registrationUrl: string + registeredAt: string +} + +const DEFAULT_CREDENTIALS_PATH = join(homedir(), '.nutrient', 'credentials.json') +const DEFAULT_CLIENT_REGISTRATION_PATH = join(homedir(), '.nutrient', 'client.json') + +function generateCodeVerifier(): string { + return randomBytes(32).toString('base64url') +} + +function generateCodeChallenge(verifier: string): string { + return createHash('sha256').update(verifier).digest('base64url') +} + +async function readCachedCredentials(credentialsPath: string): Promise { + try { + const content = await readFile(credentialsPath, 'utf-8') + return JSON.parse(content) as CachedCredentials + } catch { + return null + } +} + +async function writeCachedCredentials(credentialsPath: string, credentials: CachedCredentials): Promise { + const dir = join(credentialsPath, '..') + await mkdir(dir, { recursive: true, mode: 0o700 }) + await writeFile(credentialsPath, JSON.stringify(credentials, null, 2), { mode: 0o600 }) +} + +async function readCachedClientRegistration(path: string): Promise { + try { + const content = await readFile(path, 'utf-8') + return JSON.parse(content) as CachedClientRegistration + } catch { + return null + } +} + +async function writeCachedClientRegistration(path: string, registration: CachedClientRegistration): Promise { + const dir = join(path, '..') + await mkdir(dir, { recursive: true, mode: 0o700 }) + await writeFile(path, JSON.stringify(registration, null, 2), { mode: 0o600 }) +} + +async function registerClient(config: NutrientOAuthConfig): Promise { + if (!config.registrationUrl) { + throw new Error('DCR requires registrationUrl when clientId is not configured') + } + + const callbackPort = config.callbackPort ?? DEFAULT_CALLBACK_PORT + const redirectUri = `http://localhost:${callbackPort}/callback` + + const registrationPayload = { + client_name: config.clientName ?? 'Nutrient DWS MCP Server', + redirect_uris: [redirectUri], + grant_types: ['authorization_code', 'refresh_token'], + response_types: ['code'], + token_endpoint_auth_method: 'none', + } + + logger.info('Registering OAuth client via DCR', { registrationUrl: config.registrationUrl }) + logger.debug('DCR payload', registrationPayload) + + const response = await fetch(config.registrationUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(registrationPayload), + }) + + if (!response.ok) { + const errorText = await response.text() + logger.error('DCR failed', { status: response.status, body: errorText }) + throw new Error(`Dynamic client registration failed (${response.status}): ${errorText}`) + } + + const data = (await response.json()) as { client_id: string } + + if (!data.client_id) { + throw new Error('DCR response missing client_id') + } + + logger.info('OAuth client registered', { clientId: data.client_id }) + return data.client_id +} + +/** + * Resolves the OAuth client ID — either from config, cached DCR registration, or by registering a new client. + */ +async function resolveClientId(config: NutrientOAuthConfig): Promise { + if (config.clientId) { + return config.clientId + } + + const registrationPath = config.clientRegistrationPath ?? DEFAULT_CLIENT_REGISTRATION_PATH + + const cached = await readCachedClientRegistration(registrationPath) + if (cached && cached.registrationUrl === config.registrationUrl) { + logger.debug('Using cached DCR client', { clientId: cached.clientId }) + return cached.clientId + } + + const clientId = await registerClient(config) + + await writeCachedClientRegistration(registrationPath, { + clientId, + registrationUrl: config.registrationUrl!, + registeredAt: new Date().toISOString(), + }) + + return clientId +} + +function isTokenExpired(credentials: CachedCredentials): boolean { + if (!credentials.expiresAt) { + return false + } + // Consider expired 60 seconds early to avoid edge cases + return Date.now() >= (credentials.expiresAt - 60_000) +} + +async function refreshAccessToken( + config: NutrientOAuthConfig, + clientId: string, + refreshToken: string, +): Promise { + try { + logger.debug('Attempting token refresh', { tokenUrl: config.tokenUrl, clientId }) + const response = await fetch(config.tokenUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + body: new URLSearchParams({ + grant_type: 'refresh_token', + client_id: clientId, + refresh_token: refreshToken, + }), + }) + + if (!response.ok) { + logger.warn('Token refresh failed', { status: response.status, statusText: response.statusText }) + return null + } + + const data = (await response.json()) as { + access_token: string + refresh_token?: string + expires_in?: number + } + + return { + accessToken: data.access_token, + refreshToken: data.refresh_token ?? refreshToken, + expiresAt: data.expires_in ? Date.now() + data.expires_in * 1000 : undefined, + } + } catch { + return null + } +} + +async function exchangeCodeForToken( + config: NutrientOAuthConfig, + clientId: string, + code: string, + codeVerifier: string, + redirectUri: string, +): Promise { + const response = await fetch(config.tokenUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + body: new URLSearchParams({ + grant_type: 'authorization_code', + client_id: clientId, + code, + redirect_uri: redirectUri, + code_verifier: codeVerifier, + }), + }) + + if (!response.ok) { + const errorText = await response.text() + logger.error('Token exchange failed', { status: response.status, body: errorText }) + throw new Error(`Token exchange failed (${response.status}): ${errorText}`) + } + + const data = (await response.json()) as { + access_token: string + refresh_token?: string + expires_in?: number + } + + return { + accessToken: data.access_token, + refreshToken: data.refresh_token, + expiresAt: data.expires_in ? Date.now() + data.expires_in * 1000 : undefined, + } +} + +function buildAuthorizeUrl( + config: NutrientOAuthConfig, + clientId: string, + redirectUri: string, + codeChallenge: string, + state: string, +): string { + const url = new URL(config.authorizeUrl) + url.searchParams.set('response_type', 'code') + url.searchParams.set('client_id', clientId) + url.searchParams.set('redirect_uri', redirectUri) + url.searchParams.set('code_challenge', codeChallenge) + url.searchParams.set('code_challenge_method', 'S256') + url.searchParams.set('state', state) + + if (config.scopes.length > 0) { + url.searchParams.set('scope', config.scopes.join(' ')) + } + + if (config.resource) { + url.searchParams.set('resource', config.resource) + } + + return url.toString() +} + +async function performBrowserOAuthFlow(config: NutrientOAuthConfig, clientId: string): Promise { + const codeVerifier = generateCodeVerifier() + const codeChallenge = generateCodeChallenge(codeVerifier) + const state = randomBytes(16).toString('hex') + + return new Promise((resolve, reject) => { + const server = createServer(async (req: IncomingMessage, res: ServerResponse) => { + try { + const url = new URL(req.url ?? '/', `http://localhost`) + if (url.pathname !== '/callback') { + res.writeHead(404) + res.end('Not found') + return + } + + const error = url.searchParams.get('error') + if (error) { + const description = url.searchParams.get('error_description') ?? error + res.writeHead(400, { 'Content-Type': 'text/html' }) + res.end(`

Authorization Failed

${description}

You can close this tab.

`) + server.close() + reject(new Error(`OAuth authorization failed: ${description}`)) + return + } + + const returnedState = url.searchParams.get('state') + if (returnedState !== state) { + res.writeHead(400, { 'Content-Type': 'text/html' }) + res.end('

Invalid State

OAuth state mismatch. Please try again.

') + server.close() + reject(new Error('OAuth state mismatch')) + return + } + + const code = url.searchParams.get('code') + if (!code) { + res.writeHead(400, { 'Content-Type': 'text/html' }) + res.end('

Missing Code

No authorization code received.

') + server.close() + reject(new Error('No authorization code received')) + return + } + + const cbPort = config.callbackPort ?? DEFAULT_CALLBACK_PORT + const redirectUri = `http://localhost:${cbPort}/callback` + + const credentials = await exchangeCodeForToken(config, clientId, code, codeVerifier, redirectUri) + + res.writeHead(200, { 'Content-Type': 'text/html' }) + res.end('

Authenticated!

You can close this tab and return to your terminal.

') + server.close() + resolve(credentials) + } catch (err) { + res.writeHead(500, { 'Content-Type': 'text/html' }) + res.end('

Error

Something went wrong during authentication.

') + server.close() + reject(err) + } + }) + + const callbackPort = config.callbackPort ?? DEFAULT_CALLBACK_PORT + + server.listen(callbackPort, '127.0.0.1', async () => { + const redirectUri = `http://localhost:${callbackPort}/callback` + const authorizeUrl = buildAuthorizeUrl(config, clientId, redirectUri, codeChallenge, state) + + logger.info('OAuth callback server listening', { port: callbackPort, redirectUri }) + logger.debug('Authorize URL', { authorizeUrl }) + + // Dynamic import to avoid bundling issues — `open` is an ESM-only package + const { default: open } = await import('open') + logger.info('Opening browser for Nutrient authentication...') + await open(authorizeUrl) + }) + + server.on('error', reject) + }) +} + +/** + * Returns a valid Nutrient DWS API access token. + * + * Checks cached credentials first, attempts token refresh if expired, + * and falls back to a browser-based OAuth flow if no valid token is available. + */ +export async function getToken(config: NutrientOAuthConfig): Promise { + const credentialsPath = config.credentialsPath ?? DEFAULT_CREDENTIALS_PATH + + // 0. Resolve client ID (from config, cached DCR, or fresh DCR registration) + const clientId = await resolveClientId(config) + + logger.debug('getToken called', { clientId, credentialsPath }) + + // 1. Check cached token + const cached = await readCachedCredentials(credentialsPath) + + if (cached) { + // 2. Valid token — return it + if (!isTokenExpired(cached)) { + logger.debug('Using cached token (not expired)') + return cached.accessToken + } + + logger.debug('Cached token expired', { expiresAt: cached.expiresAt ? new Date(cached.expiresAt).toISOString() : 'unknown' }) + + // 3. Expired but has refresh token — try refresh + if (cached.refreshToken) { + logger.info('Attempting token refresh') + const refreshed = await refreshAccessToken(config, clientId, cached.refreshToken) + if (refreshed) { + logger.info('Token refreshed successfully') + await writeCachedCredentials(credentialsPath, refreshed) + return refreshed.accessToken + } + logger.warn('Token refresh failed, falling back to browser flow') + } + } else { + logger.info('No cached credentials found') + } + + // 4. No valid token — browser OAuth flow + logger.info('Starting browser OAuth flow', { authorizeUrl: config.authorizeUrl, clientId }) + const credentials = await performBrowserOAuthFlow(config, clientId) + logger.info('Browser OAuth flow completed successfully') + await writeCachedCredentials(credentialsPath, credentials) + return credentials.accessToken +} diff --git a/src/dws/ai-redact.ts b/src/dws/ai-redact.ts index e175b91..e1dd86b 100644 --- a/src/dws/ai-redact.ts +++ b/src/dws/ai-redact.ts @@ -5,7 +5,7 @@ import { CallToolResult } from '@modelcontextprotocol/sdk/types.js' import { handleApiError, handleFileResponse } from './utils.js' import { createErrorResponse } from '../responses.js' import { resolveReadFilePath, resolveWriteFilePath } from '../fs/sandbox.js' -import { callNutrientApi } from './api.js' +import { DwsApiClient } from './client.js' /** * Performs an AI redaction call to the Nutrient DWS AI Redact API. @@ -14,6 +14,7 @@ export async function performAiRedactCall( filePath: string, criteria: string, outputPath: string, + apiClient: DwsApiClient, stage?: boolean, apply?: boolean, ): Promise { @@ -28,9 +29,7 @@ export async function performAiRedactCall( // Guard against output overwriting input if (resolvedInputPath === resolvedOutputPath) { - return createErrorResponse( - 'Error: Output path must be different from input path to prevent data corruption.', - ) + return createErrorResponse('Error: Output path must be different from input path to prevent data corruption.') } const fileBuffer = await fs.promises.readFile(resolvedInputPath) @@ -51,7 +50,7 @@ export async function performAiRedactCall( formData.append('file1', fileBuffer, { filename: fileName }) formData.append('data', JSON.stringify(dataPayload)) - const response = await callNutrientApi('ai/redact', formData) + const response = await apiClient.post('ai/redact', formData) return handleFileResponse(response, resolvedOutputPath, 'AI redaction completed successfully. Output saved to') } catch (e: unknown) { diff --git a/src/dws/api.ts b/src/dws/api.ts index b8540d9..e4dd77c 100644 --- a/src/dws/api.ts +++ b/src/dws/api.ts @@ -1,31 +1,29 @@ -import FormData from 'form-data' -import axios from 'axios' -import { getApiKey } from './utils.js' -import { getVersion } from '../version.js' +import { DwsApiClient, createApiClientFromApiKey, createApiClientFromTokenResolver } from './client.js' /** - * Makes an API call to the Nutrient API - * @param endpoint The API endpoint to call (e.g., 'sign', 'build') - * @param data The data to send (FormData or JSON object) - * @returns The API response + * Discriminated union describing how to authenticate with the DWS API. + * + * - Provide `apiKey` for static API-key auth (stdio mode, static HTTP mode). + * - Provide `tokenResolver` for dynamic token auth (JWT/OAuth mode). */ -export async function callNutrientApi(endpoint: string, data: FormData | Record) { - const apiKey = getApiKey() - const isFormData = data instanceof FormData +export type ApiClientAuthContext = + | { + apiKey: string + baseUrl?: string + } + | { + tokenResolver: () => Promise + baseUrl?: string + } - const defaultHeaders: Record = { - Authorization: `Bearer ${apiKey}`, - 'User-Agent': `NutrientDWSMCPServer/${getVersion()}`, +/** + * Factory that creates a {@link DwsApiClient} from an auth context. + * Selects the appropriate authentication strategy based on the context shape. + */ +export function createApiClient(context: ApiClientAuthContext): DwsApiClient { + if ('apiKey' in context) { + return createApiClientFromApiKey(context.apiKey, context.baseUrl) } - const headers: Record = isFormData - ? defaultHeaders - : { - ...defaultHeaders, - 'Content-Type': 'application/json', - } - return axios.post(`https://api.nutrient.io/${endpoint}`, data, { - headers, - responseType: 'stream', - }) + return createApiClientFromTokenResolver(context.tokenResolver, context.baseUrl) } diff --git a/src/dws/build.ts b/src/dws/build.ts index 66c3696..636aac5 100644 --- a/src/dws/build.ts +++ b/src/dws/build.ts @@ -7,12 +7,16 @@ import { CallToolResult } from '@modelcontextprotocol/sdk/types.js' import { FileReference } from './types.js' import { createErrorResponse } from '../responses.js' import { resolveReadFilePath, resolveWriteFilePath } from '../fs/sandbox.js' -import { callNutrientApi } from './api.js' +import { DwsApiClient } from './client.js' /** * Performs a build call to the Nutrient DWS Processor API */ -export async function performBuildCall(instructions: Instructions, outputFilePath: string): Promise { +export async function performBuildCall( + instructions: Instructions, + outputFilePath: string, + apiClient: DwsApiClient, +): Promise { const { instructions: adjustedInstructions, fileReferences } = await processInstructions(instructions) if (fileReferences.size === 0) { @@ -22,7 +26,7 @@ export async function performBuildCall(instructions: Instructions, outputFilePat try { // We resolve the output path first to fail early const resolvedOutputPath = await resolveWriteFilePath(outputFilePath) - const response = await makeApiBuildCall(adjustedInstructions, fileReferences) + const response = await makeApiBuildCall(adjustedInstructions, fileReferences, apiClient) if (adjustedInstructions.output?.type === 'json-content') { return handleJsonContentResponse(response) @@ -131,11 +135,15 @@ async function processFileReference(reference: string): Promise { /** * Make the API call to the build endpoint */ -async function makeApiBuildCall(instructions: Instructions, fileReferences: Map) { +async function makeApiBuildCall( + instructions: Instructions, + fileReferences: Map, + apiClient: DwsApiClient, +) { const allInputsAreUrls = Array.from(fileReferences.values()).every((fileRef) => fileRef.url) if (allInputsAreUrls) { - return callNutrientApi('build', instructions) + return apiClient.post('build', instructions) } else { const formData = new FormData() formData.append('instructions', JSON.stringify(instructions)) @@ -146,6 +154,6 @@ async function makeApiBuildCall(instructions: Instructions, fileReferences: Map< } } - return callNutrientApi('build', formData) + return apiClient.post('build', formData) } } diff --git a/src/dws/client.ts b/src/dws/client.ts new file mode 100644 index 0000000..9e784f1 --- /dev/null +++ b/src/dws/client.ts @@ -0,0 +1,96 @@ +import axios, { AxiosInstance, AxiosResponse } from 'axios' +import FormData from 'form-data' +import { getVersion } from '../version.js' + +/** Async function that returns a bearer token for authenticating with the DWS API. */ +export type DwsTokenResolver = () => Promise + +export type DwsApiClientOptions = { + /** DWS API base URL. Defaults to `https://api.nutrient.io`. */ + baseUrl?: string + /** Provides the bearer token for each request. Called on every API call. */ + tokenResolver: DwsTokenResolver + /** Optional custom Axios instance (useful for testing or proxy configuration). */ + httpClient?: AxiosInstance +} + +/** + * HTTP client for the Nutrient Document Web Services (DWS) API. + * + * Handles authentication, content-type negotiation, and streaming responses. + * All responses are returned as streams (`responseType: 'stream'`). + */ +export class DwsApiClient { + private readonly baseUrl: string + private readonly tokenResolver: DwsTokenResolver + private readonly httpClient: AxiosInstance + + constructor(options: DwsApiClientOptions) { + this.baseUrl = options.baseUrl ?? 'https://api.nutrient.io' + this.tokenResolver = options.tokenResolver + this.httpClient = options.httpClient ?? axios.create() + } + + private async buildHeaders(payload?: FormData | Record) { + const token = await this.tokenResolver() + + const headers: Record = { + Authorization: `Bearer ${token}`, + 'User-Agent': `NutrientDWSMCPServer/${getVersion()}`, + } + + if (payload instanceof FormData) { + return { + ...headers, + ...payload.getHeaders(), + } + } + + if (payload) { + headers['Content-Type'] = 'application/json' + } + + return headers + } + + private buildUrl(endpoint: string): string { + const normalizedEndpoint = endpoint.startsWith('/') ? endpoint.slice(1) : endpoint + return new URL(normalizedEndpoint, this.baseUrl.endsWith('/') ? this.baseUrl : `${this.baseUrl}/`).toString() + } + + /** POST to a DWS endpoint. Automatically sets Content-Type based on the payload type. */ + async post(endpoint: string, data: FormData | Record): Promise { + const headers = await this.buildHeaders(data) + + return this.httpClient.post(this.buildUrl(endpoint), data, { + headers, + responseType: 'stream', + }) + } + + /** GET a DWS endpoint. */ + async get(endpoint: string): Promise { + const headers = await this.buildHeaders() + + return this.httpClient.get(this.buildUrl(endpoint), { + headers, + responseType: 'stream', + }) + } +} + +/** Creates a {@link DwsApiClient} that authenticates with a static API key. */ +export function createApiClientFromApiKey(apiKey: string, baseUrl?: string): DwsApiClient { + return new DwsApiClient({ + baseUrl, + tokenResolver: async () => apiKey, + }) +} + +/** Creates a {@link DwsApiClient} that resolves a fresh token on each request (e.g. for JWT/OAuth flows). */ +export function createApiClientFromTokenResolver(tokenResolver: DwsTokenResolver, baseUrl?: string): DwsApiClient { + return new DwsApiClient({ + baseUrl, + tokenResolver, + }) +} diff --git a/src/dws/credits.ts b/src/dws/credits.ts index 87ff6b4..176db9a 100644 --- a/src/dws/credits.ts +++ b/src/dws/credits.ts @@ -1,7 +1,6 @@ -import axios from 'axios' -import { getApiKey, pipeToString } from './utils.js' -import { getVersion } from '../version.js' +import { pipeToString } from './utils.js' import { CallToolResult } from '@modelcontextprotocol/sdk/types.js' +import { DwsApiClient } from './client.js' /** * Account info response from DWS API (GET /account/info) @@ -32,16 +31,8 @@ export function sanitizeAccountInfo(data: AccountInfoResponse): Omit { - const apiKey = getApiKey() - - const response = await axios.get('https://api.nutrient.io/account/info', { - headers: { - Authorization: `Bearer ${apiKey}`, - 'User-Agent': `NutrientDWSMCPServer/${getVersion()}`, - }, - responseType: 'stream', - }) +export async function performCheckCreditsCall(apiClient: DwsApiClient): Promise { + const response = await apiClient.get('account/info') const raw = await pipeToString(response.data) diff --git a/src/dws/sign.ts b/src/dws/sign.ts index 6c29263..d344055 100644 --- a/src/dws/sign.ts +++ b/src/dws/sign.ts @@ -2,10 +2,10 @@ import FormData from 'form-data' import { handleApiError, handleFileResponse } from './utils.js' import { CallToolResult } from '@modelcontextprotocol/sdk/types.js' import { SignatureOptions } from '../schemas.js' -import { callNutrientApi } from './api.js' import { resolveReadFilePath, resolveWriteFilePath } from '../fs/sandbox.js' import fs from 'fs' import path from 'path' +import { DwsApiClient } from './client.js' /** * Performs a sign call to the Nutrient DWS API @@ -13,6 +13,7 @@ import path from 'path' export async function performSignCall( filePath: string, outputFilePath: string, + apiClient: DwsApiClient, signatureOptions: SignatureOptions = { signatureType: 'cms', flatten: false }, watermarkImagePath?: string, graphicImagePath?: string, @@ -36,7 +37,7 @@ export async function performSignCall( await addFileToFormData(formData, 'graphic', graphicImagePath) } - const response = await callNutrientApi('sign', formData) + const response = await apiClient.post('sign', formData) return handleFileResponse(response, resolvedOutputPath, 'File signed successfully') } catch (e: unknown) { diff --git a/src/dws/utils.ts b/src/dws/utils.ts index cea5dac..d69c27d 100644 --- a/src/dws/utils.ts +++ b/src/dws/utils.ts @@ -35,18 +35,6 @@ export async function pipeToBuffer(responseData: Readable): Promise { }) } -/** - * Validates that the API key is set in the environment - * @returns Object with error information if API key is not set - */ -export function getApiKey(): string { - if (!process.env.NUTRIENT_DWS_API_KEY) { - throw new Error('NUTRIENT_DWS_API_KEY not set in environment') - } - - return process.env.NUTRIENT_DWS_API_KEY -} - /** * Handles API errors and converts them to a standard format * @returns Object with error information diff --git a/src/http/authMiddleware.ts b/src/http/authMiddleware.ts new file mode 100644 index 0000000..950a554 --- /dev/null +++ b/src/http/authMiddleware.ts @@ -0,0 +1,46 @@ +import type { RequestHandler } from 'express' +import { Environment } from '../utils/environment.js' +import { createJwtAuthMiddleware } from './jwtAuth.js' + +function addAudienceWithTrailingSlashVariants(target: Set, value: string) { + const trimmed = value.trim().replace(/\/+$/, '') + if (!trimmed) { + return + } + + target.add(trimmed) + target.add(`${trimmed}/`) +} + +export function buildJwtAudiences(resourceUrl: string): string[] { + const audiences = new Set(['dws-mcp']) + addAudienceWithTrailingSlashVariants(audiences, resourceUrl) + + try { + const parsed = new URL(resourceUrl) + addAudienceWithTrailingSlashVariants(audiences, parsed.origin) + + const normalizedPath = parsed.pathname.replace(/\/+$/, '') + if (normalizedPath && normalizedPath !== '/') { + addAudienceWithTrailingSlashVariants(audiences, `${parsed.origin}${normalizedPath}`) + } + } catch { + // Keep best-effort audience list when resourceUrl is not a valid URL. + } + + return Array.from(audiences) +} + +export function createAuthMiddleware(environment: Environment): RequestHandler { + if (!environment.issuer) { + throw new Error('JWT auth requires ISSUER (defaults to AUTH_SERVER_URL)') + } + + return createJwtAuthMiddleware({ + jwksUrl: environment.jwksUrl, + issuer: environment.issuer, + audience: buildJwtAudiences(environment.resourceUrl), + requiredScope: 'mcp:invoke', + resourceMetadataUrl: environment.protectedResourceMetadataUrl, + }) +} diff --git a/src/http/authUtils.ts b/src/http/authUtils.ts new file mode 100644 index 0000000..5d5cd3b --- /dev/null +++ b/src/http/authUtils.ts @@ -0,0 +1,18 @@ +import { createHash } from 'node:crypto' + +export function hashPrincipal(input: string): string { + return createHash('sha256').update(input).digest('hex') +} + +export function parseBearerToken(authHeader?: string): string | undefined { + if (!authHeader) { + return undefined + } + + const [scheme, token] = authHeader.split(/\s+/, 2) + if (!scheme || !token || scheme.toLowerCase() !== 'bearer') { + return undefined + } + + return token +} diff --git a/src/http/jwtAuth.ts b/src/http/jwtAuth.ts new file mode 100644 index 0000000..08337e9 --- /dev/null +++ b/src/http/jwtAuth.ts @@ -0,0 +1,123 @@ +import type { RequestHandler } from 'express' +import { AuthInfo } from '@modelcontextprotocol/sdk/server/auth/types.js' +import { createRemoteJWKSet, jwtVerify, JWTPayload } from 'jose' +import { RequestWithAuth } from './types.js' +import { buildWwwAuthenticateHeader } from './protectedResource.js' +import { hashPrincipal, parseBearerToken } from './authUtils.js' + +function parseScopes(payload: JWTPayload): string[] { + if (typeof payload.scope !== 'string') { + return [] + } + + return payload.scope + .split(/\s+/) + .map((scope) => scope.trim()) + .filter(Boolean) +} + +function parseAllowedTools(payload: JWTPayload): string[] | undefined { + const rawClaim = payload.allowed_tools + + if (Array.isArray(rawClaim)) { + const tools = rawClaim.filter((tool): tool is string => typeof tool === 'string' && tool.trim().length > 0) + return tools.length > 0 ? tools : undefined + } + + if (typeof rawClaim === 'string') { + const tools = rawClaim + .split(/[\s,]+/) + .map((tool) => tool.trim()) + .filter(Boolean) + + return tools.length > 0 ? tools : undefined + } + + return undefined +} + +function toAuthInfo(token: string, payload: JWTPayload): AuthInfo { + const sub = typeof payload.sub === 'string' ? payload.sub : '' + const azp = typeof payload.azp === 'string' ? payload.azp : '' + const sid = typeof payload.sid === 'string' ? payload.sid : '' + + return { + token, + clientId: azp || sub || 'unknown-client', + scopes: parseScopes(payload), + expiresAt: typeof payload.exp === 'number' ? payload.exp : undefined, + extra: { + allowedTools: parseAllowedTools(payload), + principalFingerprint: hashPrincipal(`${sub}|${azp}|${sid}`), + subject: sub, + authorizedParty: azp, + sessionId: sid, + }, + } +} + +export function createJwtAuthMiddleware(options: { + jwksUrl: string + issuer: string + audience: string | string[] + requiredScope: string + resourceMetadataUrl: string +}): RequestHandler { + const jwks = createRemoteJWKSet(new URL(options.jwksUrl)) + + return async (req, res, next) => { + const token = parseBearerToken(req.headers.authorization) + + if (!token) { + res.set('WWW-Authenticate', buildWwwAuthenticateHeader({ resourceMetadataUrl: options.resourceMetadataUrl })) + res.status(401).json({ + error: 'invalid_token', + error_description: 'Missing or malformed Authorization header', + }) + return + } + + try { + const { payload } = await jwtVerify(token, jwks, { + issuer: options.issuer, + audience: options.audience, + clockTolerance: '30s', + }) + + const scopes = parseScopes(payload) + if (!scopes.includes(options.requiredScope)) { + res.set( + 'WWW-Authenticate', + buildWwwAuthenticateHeader({ + resourceMetadataUrl: options.resourceMetadataUrl, + error: 'invalid_token', + errorDescription: `Required scope "${options.requiredScope}" is missing`, + scope: options.requiredScope, + }), + ) + res.status(401).json({ + error: 'invalid_token', + error_description: `Required scope "${options.requiredScope}" is missing`, + }) + return + } + + ;(req as RequestWithAuth).auth = toAuthInfo(token, payload) + next() + } catch (error) { + const errorDescription = error instanceof Error ? error.message : 'Invalid token' + res.set( + 'WWW-Authenticate', + buildWwwAuthenticateHeader({ + resourceMetadataUrl: options.resourceMetadataUrl, + error: 'invalid_token', + errorDescription, + }), + ) + res.status(401).json({ + error: 'invalid_token', + error_description: errorDescription, + }) + } + } +} diff --git a/src/http/protectedResource.ts b/src/http/protectedResource.ts new file mode 100644 index 0000000..2dbed73 --- /dev/null +++ b/src/http/protectedResource.ts @@ -0,0 +1,45 @@ +import type { RequestHandler } from 'express' + +type ProtectedResourceConfig = { + resourceUrl: string + authServerUrl: string + resourceMetadataUrl: string +} + +export function createProtectedResourceHandler(config: ProtectedResourceConfig): RequestHandler { + return (_req, res) => { + res.json({ + resource: config.resourceUrl, + authorization_servers: [config.authServerUrl], + }) + } +} + +function quote(value: string): string { + return value.replace(/\\/g, '\\\\').replace(/"/g, '\\"') +} + +export function buildWwwAuthenticateHeader(options: { + resourceMetadataUrl: string + error?: string + errorDescription?: string + scope?: string +}) { + const params: string[] = [] + + if (options.error) { + params.push(`error="${quote(options.error)}"`) + } + + if (options.errorDescription) { + params.push(`error_description="${quote(options.errorDescription)}"`) + } + + if (options.scope) { + params.push(`scope="${quote(options.scope)}"`) + } + + params.push(`resource_metadata="${quote(options.resourceMetadataUrl)}"`) + + return `Bearer ${params.join(', ')}` +} diff --git a/src/http/requestLogger.ts b/src/http/requestLogger.ts new file mode 100644 index 0000000..1431e32 --- /dev/null +++ b/src/http/requestLogger.ts @@ -0,0 +1,88 @@ +import type { RequestHandler } from 'express' +import { randomUUID } from 'node:crypto' +import { logger as globalLogger, setRequestId } from '../logger.js' + +type HttpLogLevel = 'debug' | 'info' +type HttpLoggerMeta = Record +type HttpLogger = (level: HttpLogLevel, message: string, meta?: HttpLoggerMeta) => void + +function parseDebugFlag(value?: string): boolean { + if (!value) { + return false + } + + const normalized = value.trim().toLowerCase() + return normalized === '1' || normalized === 'true' || normalized === 'yes' || normalized === 'on' +} + +export function isMcpDebugLoggingEnabled(env: NodeJS.ProcessEnv = process.env): boolean { + return parseDebugFlag(env.MCP_DEBUG_LOGGING) +} + +function defaultLogger(level: HttpLogLevel, message: string, meta?: HttpLoggerMeta) { + const payload = meta ? `${message} ${JSON.stringify(meta)}` : message + globalLogger.log({ level, message: payload }) +} + +function inspectBody(body: unknown) { + if (typeof body === 'string') { + return body + } + + if (Buffer.isBuffer(body)) { + return body.toString('utf8') + } + + try { + return JSON.stringify(body) + } catch { + return String(body) + } +} + +function sendInterceptor( + res: Parameters[1], + send: Parameters[1]['send'], + onSend: (content: unknown) => void, +) { + return ((content?: unknown) => { + onSend(content) + res.send = send + return res.send(content as never) + }) as typeof res.send +} + +export function createRequestLoggerMiddleware(options?: { logger?: HttpLogger }): RequestHandler { + const logger = options?.logger ?? defaultLogger + + return (req, res, next) => { + const requestIdHeader = req.headers['x-request-id'] + const requestId = (typeof requestIdHeader === 'string' ? requestIdHeader : requestIdHeader?.[0]) ?? randomUUID() + + setRequestId(requestId) + res.setHeader('x-request-id', requestId) + + logger('info', `<<< ${req.method} ${req.url}`) + + if (req.body !== undefined) { + logger('debug', inspectBody(req.body)) + } + + let responseBody: unknown + + res.send = sendInterceptor(res, res.send.bind(res), (content) => { + responseBody = content + }) + + res.on('finish', () => { + setRequestId(requestId) + logger('info', `>>> Sent ${res.statusCode}`) + + if (responseBody !== undefined) { + logger('debug', inspectBody(responseBody)) + } + }) + + next() + } +} diff --git a/src/http/types.ts b/src/http/types.ts new file mode 100644 index 0000000..47ba39c --- /dev/null +++ b/src/http/types.ts @@ -0,0 +1,43 @@ +import { AuthInfo } from '@modelcontextprotocol/sdk/server/auth/types.js' +import type { Request } from 'express' + +export type McpAuthInfoExtra = { + allowedTools?: string[] + principalFingerprint?: string + subject?: string + authorizedParty?: string + sessionId?: string + [key: string]: unknown +} + +export type McpAuthInfo = AuthInfo & { + extra?: McpAuthInfoExtra +} + +export type RequestWithAuth = Request & { + auth?: McpAuthInfo +} + +export function getAllowedTools(authInfo?: AuthInfo): string[] | undefined { + const tools = (authInfo?.extra as McpAuthInfoExtra | undefined)?.allowedTools + if (!Array.isArray(tools) || tools.length === 0) { + return undefined + } + + const validTools = tools.filter((tool): tool is string => typeof tool === 'string' && tool.length > 0) + return validTools.length > 0 ? validTools : undefined +} + +export function getPrincipalFingerprint(authInfo?: AuthInfo): string | undefined { + const fingerprint = (authInfo?.extra as McpAuthInfoExtra | undefined)?.principalFingerprint + return typeof fingerprint === 'string' && fingerprint.length > 0 ? fingerprint : undefined +} + +export function isToolAllowed(toolName: string, authInfo?: AuthInfo): boolean { + const allowedTools = getAllowedTools(authInfo) + if (!allowedTools) { + return true + } + + return allowedTools.includes(toolName) +} diff --git a/src/index.ts b/src/index.ts index 4d7f537..a0bebf6 100644 --- a/src/index.ts +++ b/src/index.ts @@ -3,11 +3,18 @@ /** * Nutrient DWS API MCP Server * - * This server provides a Model Context Protocol (MCP) interface to the Nutrient DWS Processor API. + * Supports stdio and Streamable HTTP MCP transports. */ +import express, { Request, Response } from 'express' +import { randomUUID } from 'node:crypto' +import { fileURLToPath } from 'node:url' +import { resolve } from 'node:path' import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js' import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js' +import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js' +import { createMcpExpressApp } from '@modelcontextprotocol/sdk/server/express.js' +import { AuthInfo } from '@modelcontextprotocol/sdk/server/auth/types.js' import { AiRedactArgsSchema, BuildAPIArgsSchema, @@ -24,24 +31,50 @@ import { setSandboxDirectory } from './fs/sandbox.js' import { createErrorResponse } from './responses.js' import { getVersion } from './version.js' import { parseSandboxPath } from './utils/sandbox.js' +import { createApiClient } from './dws/api.js' +import { DwsApiClient } from './dws/client.js' +import { getToken, type NutrientOAuthConfig } from './auth/nutrient-oauth.js' +import { createAuthMiddleware } from './http/authMiddleware.js' +import { createProtectedResourceHandler } from './http/protectedResource.js' +import { createRequestLoggerMiddleware, isMcpDebugLoggingEnabled } from './http/requestLogger.js' +import { getAllowedTools, getPrincipalFingerprint, isToolAllowed, RequestWithAuth } from './http/types.js' +import { Environment, getEnvironment } from './utils/environment.js' +import { logger } from './logger.js' -const server = new McpServer( - { - name: 'nutrient-dws-mcp-server', - version: getVersion(), - }, - { - capabilities: { - tools: {}, - logging: {}, - }, - }, -) +type ServerMode = 'stdio' | 'http' + +type HttpSessionContext = { + server: McpServer + transport: StreamableHTTPServerTransport + principalFingerprint: string +} -function addToolsToServer(server: McpServer, sandboxEnabled: boolean = false) { - server.tool( - 'document_processor', - `Processes documents using Nutrient DWS Processor API. Reads from and writes to file system or sandbox (if enabled). +type RunServerResult = { + mode: ServerMode + close: () => Promise +} + +function buildPermissionDeniedResponse(toolName: string) { + return createErrorResponse(`Permission denied: Tool "${toolName}" is not allowed for this token.`) +} + +function canInvokeTool(toolName: string, authInfo?: AuthInfo) { + return isToolAllowed(toolName, authInfo) +} + +function addToolsToServer(options: { + server: McpServer + sandboxEnabled: boolean + apiClient: DwsApiClient + allowedTools?: string[] +}) { + const { server, sandboxEnabled, apiClient, allowedTools } = options + const shouldRegisterTool = (toolName: string) => !allowedTools || allowedTools.includes(toolName) + + if (shouldRegisterTool('document_processor')) { + server.tool( + 'document_processor', + `Processes documents using Nutrient DWS Processor API. Reads from and writes to file system or sandbox (if enabled). Features: • Import XFDF annotations @@ -52,19 +85,25 @@ Features: • Redaction creation and application Output formats: PDF, PDF/A, images (PNG, JPEG, WebP), JSON extraction, Office (DOCX, XLSX, PPTX)`, - BuildAPIArgsSchema.shape, - async ({ instructions, outputPath }) => { - try { - return performBuildCall(instructions, outputPath) - } catch (error) { - return createErrorResponse(`Error: ${error instanceof Error ? error.message : String(error)}`) - } - }, - ) + BuildAPIArgsSchema.shape, + async ({ instructions, outputPath }, extra) => { + if (!canInvokeTool('document_processor', extra.authInfo)) { + return buildPermissionDeniedResponse('document_processor') + } - server.tool( - 'document_signer', - `Digitally signs PDF files using Nutrient DWS Sign API. Reads from and writes to file system or sandbox (if enabled). + try { + return await performBuildCall(instructions, outputPath, apiClient) + } catch (error) { + return createErrorResponse(`Error: ${error instanceof Error ? error.message : String(error)}`) + } + }, + ) + } + + if (shouldRegisterTool('document_signer')) { + server.tool( + 'document_signer', + `Digitally signs PDF files using Nutrient DWS Sign API. Reads from and writes to file system or sandbox (if enabled). Signature types: • CMS/PKCS#7 (standard digital signatures) @@ -79,19 +118,32 @@ Appearance options: Positioning: • Place on specific page coordinates • Use existing signature form fields`, - SignAPIArgsSchema.shape, - async ({ filePath, signatureOptions, watermarkImagePath, graphicImagePath, outputPath }) => { - try { - return performSignCall(filePath, outputPath, signatureOptions, watermarkImagePath, graphicImagePath) - } catch (error) { - return createErrorResponse(`Error: ${error instanceof Error ? error.message : String(error)}`) - } - }, - ) + SignAPIArgsSchema.shape, + async ({ filePath, signatureOptions, watermarkImagePath, graphicImagePath, outputPath }, extra) => { + if (!canInvokeTool('document_signer', extra.authInfo)) { + return buildPermissionDeniedResponse('document_signer') + } + + try { + return await performSignCall( + filePath, + outputPath, + apiClient, + signatureOptions, + watermarkImagePath, + graphicImagePath, + ) + } catch (error) { + return createErrorResponse(`Error: ${error instanceof Error ? error.message : String(error)}`) + } + }, + ) + } - server.tool( - 'ai_redactor', - `AI-powered document redaction using Nutrient DWS AI Redaction API. Reads from and writes to file system or sandbox (if enabled). + if (shouldRegisterTool('ai_redactor')) { + server.tool( + 'ai_redactor', + `AI-powered document redaction using Nutrient DWS AI Redaction API. Reads from and writes to file system or sandbox (if enabled). Automatically detects and permanently removes sensitive information from documents using AI analysis. Detected content types include: @@ -102,105 +154,503 @@ Detected content types include: • Any custom criteria you specify By default (when neither stage nor apply is set), redactions are detected and immediately applied. Set stage to true to detect and stage redactions without applying them. Set apply to true to apply previously staged redactions.`, - AiRedactArgsSchema.shape, - async ({ filePath, criteria, outputPath, stage, apply }) => { - try { - return performAiRedactCall(filePath, criteria, outputPath, stage, apply) - } catch (error) { - return createErrorResponse(`Error: ${error instanceof Error ? error.message : String(error)}`) - } - }, - ) + AiRedactArgsSchema.shape, + async ({ filePath, criteria, outputPath, stage, apply }, extra) => { + if (!canInvokeTool('ai_redactor', extra.authInfo)) { + return buildPermissionDeniedResponse('ai_redactor') + } - server.tool( - 'check_credits', - `Check your Nutrient DWS API credit balance and usage for the current billing period. + try { + return await performAiRedactCall(filePath, criteria, outputPath, apiClient, stage, apply) + } catch (error) { + return createErrorResponse(`Error: ${error instanceof Error ? error.message : String(error)}`) + } + }, + ) + } + + if (shouldRegisterTool('check_credits')) { + server.tool( + 'check_credits', + `Check your Nutrient DWS API credit balance and usage for the current billing period. Returns: subscription type, total credits, used credits, and remaining credits.`, - CheckCreditsArgsSchema.shape, - async () => { - try { - return performCheckCreditsCall() - } catch (error) { - return createErrorResponse(`Error: ${error instanceof Error ? error.message : String(error)}`) - } - }, - ) + CheckCreditsArgsSchema.shape, + async (_args, extra) => { + if (!canInvokeTool('check_credits', extra.authInfo)) { + return buildPermissionDeniedResponse('check_credits') + } - if (sandboxEnabled) { - server.tool( - 'sandbox_file_tree', - 'Returns the file tree of the sandbox directory. It will recurse into subdirectories and return a list of files and directories.', - {}, - async () => performDirectoryTreeCall('.'), + try { + return await performCheckCreditsCall(apiClient) + } catch (error) { + return createErrorResponse(`Error: ${error instanceof Error ? error.message : String(error)}`) + } + }, ) - } else { + } + + if (sandboxEnabled) { + if (shouldRegisterTool('sandbox_file_tree')) { + server.tool( + 'sandbox_file_tree', + 'Returns the file tree of the sandbox directory. It will recurse into subdirectories and return a list of files and directories.', + {}, + async (_args, extra) => { + if (!canInvokeTool('sandbox_file_tree', extra.authInfo)) { + return buildPermissionDeniedResponse('sandbox_file_tree') + } + + return performDirectoryTreeCall('.') + }, + ) + } + } else if (shouldRegisterTool('directory_tree')) { server.tool( 'directory_tree', 'Returns the directory tree of a given path. All paths are resolved relative to root directory.', DirectoryTreeArgsSchema.shape, - async ({ path }) => performDirectoryTreeCall(path), + async ({ path }, extra) => { + if (!canInvokeTool('directory_tree', extra.authInfo)) { + return buildPermissionDeniedResponse('directory_tree') + } + + return performDirectoryTreeCall(path) + }, ) } } -async function parseCommandLineArgs() { - const args = process.argv.slice(2) +function createMcpServer(options: { sandboxEnabled: boolean; apiClient: DwsApiClient; allowedTools?: string[] }) { + const server = new McpServer( + { + name: 'nutrient-dws-mcp-server', + version: getVersion(), + }, + { + capabilities: { + tools: {}, + logging: {}, + }, + }, + ) - try { - const sandboxDir = parseSandboxPath(args, process.env.SANDBOX_PATH) || null - return { sandboxDir } - } catch (error) { - await server.server.sendLoggingMessage({ - level: 'error', - data: `Error: ${error instanceof Error ? error.message : String(error)}`, - }) - process.exit(1) + addToolsToServer({ + server, + sandboxEnabled: options.sandboxEnabled, + apiClient: options.apiClient, + allowedTools: options.allowedTools, + }) + + return server +} + +function getSessionId(req: Request): string | undefined { + const headerValue = req.headers['mcp-session-id'] + + if (Array.isArray(headerValue)) { + return headerValue[0] } + + return headerValue } -export async function runServer() { - const { sandboxDir } = await parseCommandLineArgs() +function isInitializeRequest(body: unknown): boolean { + if (!body || typeof body !== 'object') { + return false + } - if (sandboxDir) { + // Handle JSON-RPC batch requests (array of messages) + if (Array.isArray(body)) { + return body.length > 0 && typeof body[0] === 'object' && body[0] !== null && body[0].method === 'initialize' + } + + const request = body as { method?: unknown } + return request.method === 'initialize' +} + +function sendJsonRpcError(res: Response, code: number, message: string, id: string | number | null = null) { + res.status(400).json({ + jsonrpc: '2.0', + error: { + code, + message, + }, + id, + }) +} + +function createSessionApiClient(options: { + environment: Environment + authInfo: AuthInfo +}): DwsApiClient { + const { environment, authInfo } = options + + return createApiClient({ + baseUrl: environment.dwsApiBaseUrl, + tokenResolver: async () => authInfo.token, + }) +} + +export function createHttpApp(options: { environment: Environment; sandboxEnabled: boolean }) { + const { environment, sandboxEnabled } = options + + const sessions = new Map() + + const app = createMcpExpressApp({ + host: environment.host, + allowedHosts: environment.allowedHosts.length > 0 ? environment.allowedHosts : undefined, + }) + + app.use(express.json({ limit: '25mb' })) + + // CORS: Permissive policy for local-first MCP server; tighten for hosted deployments + app.use((_req, res, next) => { + res.header('Access-Control-Allow-Origin', '*') + res.header('Access-Control-Allow-Methods', 'GET, POST, DELETE, OPTIONS') + res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization, Mcp-Session-Id') + res.header('Access-Control-Expose-Headers', 'Mcp-Session-Id') + if (_req.method === 'OPTIONS') { + res.sendStatus(204) + return + } + next() + }) + + if (isMcpDebugLoggingEnabled(process.env)) { + app.use(createRequestLoggerMiddleware()) + } + + app.get('/health', (_req, res) => { + res.json({ status: 'ok', version: getVersion() }) + }) + + app.get( + '/.well-known/oauth-protected-resource', + createProtectedResourceHandler({ + resourceUrl: environment.resourceUrl, + authServerUrl: environment.authServerUrl, + resourceMetadataUrl: environment.protectedResourceMetadataUrl, + }), + ) + + const authMiddleware = createAuthMiddleware(environment) + + const handleExistingSessionRequest = async (req: Request, res: Response, parsedBody?: unknown) => { + const sessionId = getSessionId(req) + if (!sessionId) { + res.status(400).send('Missing MCP session ID') + return + } + + const sessionContext = sessions.get(sessionId) + if (!sessionContext) { + console.warn(`Session miss: unknown session ID ${sessionId} (active sessions: ${sessions.size})`) + res.status(404).send('Unknown MCP session ID') + return + } + + const authInfo = (req as RequestWithAuth).auth + const principalFingerprint = getPrincipalFingerprint(authInfo) + if (!principalFingerprint) { + res.status(401).send('Missing principal fingerprint') + return + } + + if (principalFingerprint !== sessionContext.principalFingerprint) { + res.status(403).send('Session is bound to a different principal') + return + } + + await sessionContext.transport.handleRequest(req, res, parsedBody) + } + + app.post('/mcp', authMiddleware, async (req, res) => { try { - await setSandboxDirectory(sandboxDir) + const sessionId = getSessionId(req) + + if (sessionId) { + await handleExistingSessionRequest(req, res, req.body) + return + } + + if (!isInitializeRequest(req.body)) { + sendJsonRpcError(res, -32000, 'Bad Request: No valid session ID provided', null) + return + } + + const authInfo = (req as RequestWithAuth).auth + const principalFingerprint = getPrincipalFingerprint(authInfo) + + if (!authInfo || !principalFingerprint) { + res.status(401).send('Missing auth context') + return + } + + const allowedTools = getAllowedTools(authInfo) + const apiClient = createSessionApiClient({ + environment, + authInfo, + }) + + const server = createMcpServer({ + sandboxEnabled, + apiClient, + allowedTools, + }) + + const transport = new StreamableHTTPServerTransport({ + sessionIdGenerator: () => randomUUID(), + onsessioninitialized: (newSessionId) => { + sessions.set(newSessionId, { + server, + transport, + principalFingerprint, + }) + }, + onsessionclosed: async (closedSessionId) => { + const context = sessions.get(closedSessionId) + if (context) { + sessions.delete(closedSessionId) + await context.server.close().catch(() => {}) + } + }, + }) + + transport.onclose = () => { + const currentSessionId = transport.sessionId + if (!currentSessionId) { + return + } + + const context = sessions.get(currentSessionId) + if (!context) { + return + } + + sessions.delete(currentSessionId) + void context.server.close().catch(() => {}) + } + + await server.connect(transport) + await transport.handleRequest(req, res, req.body) } catch (error) { - console.error(`Error setting sandbox directory: ${error instanceof Error ? error.message : String(error)}`) - process.exit(1) + console.error('Error handling MCP POST request:', error) + if (!res.headersSent) { + res.status(500).json({ + jsonrpc: '2.0', + error: { + code: -32603, + message: 'Internal server error', + }, + id: null, + }) + } } - } else { - console.warn( - 'Info: No sandbox directory specified. File operations will not be restricted.\n' + - 'Sandboxed mode is recommended - To enable sandboxed mode and restrict file operations, set SANDBOX_PATH environment variable', - ) + }) + + app.get('/mcp', authMiddleware, async (req, res) => { + try { + await handleExistingSessionRequest(req, res) + } catch (error) { + console.error('Error handling MCP GET request:', error) + if (!res.headersSent) { + res.status(500).send('Internal server error') + } + } + }) + + app.delete('/mcp', authMiddleware, async (req, res) => { + try { + await handleExistingSessionRequest(req, res) + } catch (error) { + console.error('Error handling MCP DELETE request:', error) + if (!res.headersSent) { + res.status(500).send('Internal server error') + } + } + }) + + const close = async () => { + const closePromises = [...sessions.values()].map(async (context) => { + await context.transport.close().catch(() => {}) + await context.server.close().catch(() => {}) + }) + + await Promise.all(closePromises) + sessions.clear() } - addToolsToServer(server, sandboxDir !== null) + return { app, close } +} - const transport = new StdioServerTransport() - await server.connect(transport) +async function parseCommandLineArgs() { + const args = process.argv.slice(2) + const sandboxDir = parseSandboxPath(args, process.env.SANDBOX_PATH) || null + return { sandboxDir } +} - return server +async function prepareSandbox(sandboxDir: string | null) { + if (sandboxDir) { + await setSandboxDirectory(sandboxDir) + return + } + + console.warn( + 'Info: No sandbox directory specified. File operations will not be restricted.\n' + + 'Sandboxed mode is recommended - To enable sandboxed mode and restrict file operations, set SANDBOX_PATH environment variable', + ) } -runServer() - .then(async (server) => { - server.server.getClientCapabilities() - await server.server.sendLoggingMessage({ - level: 'info', - data: `Nutrient DWS MCP Server ${getVersion()} running.`, +function createStdioApiClient(environment: Environment): DwsApiClient { + if (environment.nutrientApiKey) { + return createApiClient({ + apiKey: environment.nutrientApiKey, + baseUrl: environment.dwsApiBaseUrl, }) + } + + const oauthConfig: NutrientOAuthConfig = { + authorizeUrl: `${environment.authServerUrl}/oauth/authorize`, + tokenUrl: `${environment.authServerUrl}/oauth/token`, + registrationUrl: `${environment.authServerUrl}/oauth/register`, + clientId: environment.clientId, + scopes: ['mcp:invoke', 'offline_access'], + resource: environment.dwsApiBaseUrl, + } + + return createApiClient({ + tokenResolver: () => getToken(oauthConfig), + baseUrl: environment.dwsApiBaseUrl, + }) +} + +async function runStdioServer(options: { + sandboxEnabled: boolean + environment: Environment +}): Promise { + const { sandboxEnabled, environment } = options + + logger.info('Starting stdio transport', { + version: getVersion(), + authMethod: environment.nutrientApiKey ? 'api-key' : 'oauth-browser-flow', + sandboxEnabled, + dwsApiBaseUrl: environment.dwsApiBaseUrl, }) - .catch((error) => { - console.error('Fatal error running server:', error) - process.exit(1) + + const apiClient = createStdioApiClient(environment) + + const server = createMcpServer({ + sandboxEnabled, + apiClient, }) -process.stdin.on('close', async () => { + const transport = new StdioServerTransport() + await server.connect(transport) + + logger.info('stdio transport connected') + await server.server.sendLoggingMessage({ level: 'info', - data: `Nutrient DWS MCP Server ${getVersion()} closed.`, + data: `Nutrient DWS MCP Server ${getVersion()} running on stdio transport.`, + }) + + return { + mode: 'stdio', + close: async () => { + await server.close() + }, + } +} + +async function runHttpServer(options: { sandboxEnabled: boolean; environment: Environment }): Promise { + const { sandboxEnabled, environment } = options + const { app, close: closeSessions } = createHttpApp({ environment, sandboxEnabled }) + + const httpServer = app.listen(environment.port, environment.host) + + await new Promise((resolvePromise, rejectPromise) => { + httpServer.once('listening', () => resolvePromise()) + httpServer.once('error', (error) => rejectPromise(error)) + }) + + console.log( + `Nutrient DWS MCP Server ${getVersion()} running on HTTP transport at http://${environment.host}:${environment.port}/mcp`, + ) + + return { + mode: 'http', + close: async () => { + await closeSessions() + await new Promise((resolvePromise, rejectPromise) => { + httpServer.close((error) => { + if (error) { + rejectPromise(error) + return + } + + resolvePromise() + }) + }) + }, + } +} + +export async function runServer(): Promise { + const environment = getEnvironment() + const { sandboxDir } = await parseCommandLineArgs() + + await prepareSandbox(sandboxDir) + + const sandboxEnabled = sandboxDir !== null + + if (environment.transportMode === 'http') { + return runHttpServer({ sandboxEnabled, environment }) + } + + return runStdioServer({ sandboxEnabled, environment }) +} + +function isMainModule() { + const entryFile = process.argv[1] + if (!entryFile) { + return false + } + + return resolve(fileURLToPath(import.meta.url)) === resolve(entryFile) +} + +if (isMainModule()) { + let activeServer: RunServerResult | undefined + + runServer() + .then((result) => { + activeServer = result + }) + .catch((error) => { + console.error('Fatal error running server:', error) + process.exit(1) + }) + + process.on('SIGINT', async () => { + if (activeServer) { + await activeServer.close().catch(() => {}) + } + + process.exit(0) + }) + + process.on('SIGTERM', async () => { + if (activeServer) { + await activeServer.close().catch(() => {}) + } + + process.exit(0) + }) + + process.stdin.on('close', async () => { + if (activeServer?.mode === 'stdio') { + await activeServer.close().catch(() => {}) + } }) - await server.close() -}) +} diff --git a/src/logger.ts b/src/logger.ts new file mode 100644 index 0000000..73768a5 --- /dev/null +++ b/src/logger.ts @@ -0,0 +1,76 @@ +import { AsyncLocalStorage } from 'node:async_hooks' +import { join } from 'node:path' +import { tmpdir } from 'node:os' +import winston from 'winston' + +type RequestContext = { + requestId?: string +} + +const asyncLocalStorage = new AsyncLocalStorage() + +/** + * Sets the request ID used for logging for the current asynchronous execution context. + */ +export function setRequestId(requestId: string) { + const store = asyncLocalStorage.getStore() + + if (store) { + store.requestId = requestId + return + } + + asyncLocalStorage.enterWith({ requestId }) +} + +function getRequestId() { + const store = asyncLocalStorage.getStore() + return store?.requestId ?? null +} + +const customMessageFormat = winston.format.printf(({ level, message, timestamp }) => { + const requestId = getRequestId() + const serializedMessage = typeof message === 'string' ? message : JSON.stringify(message) + + if (requestId) { + return `${timestamp} [${level}]: ${serializedMessage} requestId=${requestId}` + } + + return `${timestamp} [${level}]: ${serializedMessage}` +}) + +const isStdioMode = process.env.MCP_TRANSPORT !== 'http' +const logFilePath = process.env.MCP_LOG_FILE || (isStdioMode ? join(tmpdir(), 'nutrient-dws-mcp-server.log') : undefined) + +function createTransports(): winston.transport[] { + // In stdio mode, Console transport interferes with MCP protocol — use file only + if (logFilePath) { + return [ + new winston.transports.File({ + filename: logFilePath, + format: winston.format.combine( + winston.format.timestamp({ format: 'HH:mm:ss.SSS' }), + customMessageFormat, + ), + }), + ] + } + + return [ + new winston.transports.Console({ + format: winston.format.combine( + winston.format.timestamp({ format: 'HH:mm:ss.SSS' }), + winston.format.colorize(), + winston.format.json(), + customMessageFormat, + ), + }), + ] +} + +export const logger = winston.createLogger({ + level: process.env.LOG_LEVEL || 'debug', + format: winston.format.json(), + defaultMeta: { service: 'dws-mcp-server' }, + transports: createTransports(), +}) diff --git a/src/utils/environment.ts b/src/utils/environment.ts new file mode 100644 index 0000000..f22a0d1 --- /dev/null +++ b/src/utils/environment.ts @@ -0,0 +1,106 @@ +import { z } from 'zod' + +export type TransportMode = 'stdio' | 'http' +export type TokenEndpointAuthMethod = 'client_secret_basic' | 'private_key_jwt' + +export type Environment = { + transportMode: TransportMode + port: number + host: string + allowedHosts: string[] + nutrientApiKey?: string + dwsApiBaseUrl: string + resourceUrl: string + authServerUrl: string + protectedResourceMetadataUrl: string + jwksUrl: string + issuer?: string + tokenEndpointAuthMethod: TokenEndpointAuthMethod + clientId?: string + clientSecret?: string + clientAssertionPrivateKey?: string + clientAssertionAlg?: string + clientAssertionKid?: string +} + +const RawEnvironmentSchema = z.object({ + MCP_TRANSPORT: z.enum(['stdio', 'http']).default('stdio'), + PORT: z.coerce.number().int().positive().default(3000), + MCP_HOST: z.string().default('127.0.0.1'), + MCP_ALLOWED_HOSTS: z.string().optional(), + NUTRIENT_DWS_API_KEY: z.string().optional(), + DWS_API_BASE_URL: z.string().url().default('https://api.nutrient.io'), + RESOURCE_URL: z.string().url().default('http://localhost:3000/mcp'), + AUTH_SERVER_URL: z.string().url().default('https://api.nutrient.io'), + JWKS_URL: z.string().url().default('https://api.nutrient.io/.well-known/jwks.json'), + ISSUER: z.string().url().optional(), + TOKEN_ENDPOINT_AUTH_METHOD: z.enum(['client_secret_basic', 'private_key_jwt']).default('client_secret_basic'), + CLIENT_ID: z.string().optional(), + CLIENT_SECRET: z.string().optional(), + CLIENT_ASSERTION_PRIVATE_KEY: z.string().optional(), + CLIENT_ASSERTION_ALG: z.string().default('RS256'), + CLIENT_ASSERTION_KID: z.string().optional(), +}) + +type RawEnvironment = z.infer + +let cachedEnvironment: Environment | undefined + +function splitList(value?: string): string[] { + if (!value) { + return [] + } + + return value + .split(/[\s,]+/) + .map((entry) => entry.trim()) + .filter(Boolean) +} + +function getProtectedResourceMetadataUrl(resourceUrl: string): string { + return new URL('/.well-known/oauth-protected-resource', resourceUrl).toString() +} + + +function parseEnvironment(rawEnv: NodeJS.ProcessEnv): Environment { + const raw = RawEnvironmentSchema.parse(rawEnv) + + const allowedHosts = splitList(raw.MCP_ALLOWED_HOSTS) + + return { + transportMode: raw.MCP_TRANSPORT, + port: raw.PORT, + host: raw.MCP_HOST, + allowedHosts, + nutrientApiKey: raw.NUTRIENT_DWS_API_KEY, + dwsApiBaseUrl: raw.DWS_API_BASE_URL, + resourceUrl: raw.RESOURCE_URL, + authServerUrl: raw.AUTH_SERVER_URL, + protectedResourceMetadataUrl: getProtectedResourceMetadataUrl(raw.RESOURCE_URL), + jwksUrl: raw.JWKS_URL, + issuer: raw.ISSUER ?? raw.AUTH_SERVER_URL, + tokenEndpointAuthMethod: raw.TOKEN_ENDPOINT_AUTH_METHOD, + clientId: raw.CLIENT_ID, + clientSecret: raw.CLIENT_SECRET, + clientAssertionPrivateKey: raw.CLIENT_ASSERTION_PRIVATE_KEY, + clientAssertionAlg: raw.CLIENT_ASSERTION_ALG, + clientAssertionKid: raw.CLIENT_ASSERTION_KID, + } +} + +export function getEnvironment(): Environment { + if (!cachedEnvironment) { + cachedEnvironment = parseEnvironment(process.env) + } + + return cachedEnvironment +} + +export function resetEnvironmentForTests() { + cachedEnvironment = undefined +} + +export function getAllowedToolsFromEnvironmentList(value?: string): string[] | undefined { + const tools = splitList(value) + return tools.length > 0 ? tools : undefined +} diff --git a/tests/authMiddleware.test.ts b/tests/authMiddleware.test.ts new file mode 100644 index 0000000..f57c87d --- /dev/null +++ b/tests/authMiddleware.test.ts @@ -0,0 +1,24 @@ +import { describe, expect, it } from 'vitest' +import { buildJwtAudiences } from '../src/http/authMiddleware.js' + +describe('buildJwtAudiences', () => { + it('includes root and path audience variants for resource URLs', () => { + const audiences = buildJwtAudiences('http://localhost:3000/mcp') + + expect(audiences).toEqual( + expect.arrayContaining([ + 'dws-mcp', + 'http://localhost:3000', + 'http://localhost:3000/', + 'http://localhost:3000/mcp', + 'http://localhost:3000/mcp/', + ]), + ) + }) + + it('keeps defaults for non-URL resource values', () => { + const audiences = buildJwtAudiences('dws-mcp-dev') + + expect(audiences).toEqual(expect.arrayContaining(['dws-mcp', 'dws-mcp-dev', 'dws-mcp-dev/'])) + }) +}) diff --git a/tests/build-api-examples.test.ts b/tests/build-api-examples.test.ts index a9c54c4..9119ff8 100644 --- a/tests/build-api-examples.test.ts +++ b/tests/build-api-examples.test.ts @@ -5,15 +5,19 @@ import path from 'path' import { performBuildCall } from '../src/dws/build.js' import { BuildAPIArgs } from '../src/schemas.js' import { setSandboxDirectory } from '../src/fs/sandbox.js' +import { createApiClient } from '../src/dws/api.js' +import { DwsApiClient } from '../src/dws/client.js' dotenvConfig() describe('performBuildCall with build-api-examples', () => { let outputDirectory: string + let apiClient: DwsApiClient beforeAll(async () => { const assetsDir = path.join(__dirname, `assets`) await setSandboxDirectory(assetsDir) + apiClient = createApiClient({ apiKey: process.env.NUTRIENT_DWS_API_KEY! }) outputDirectory = `test-output-${new Date().toISOString().replace(/[:.]/g, '-')}` }) @@ -80,7 +84,7 @@ describe('performBuildCall with build-api-examples', () => { it.each(fileOutputExamples)('should process $name', async ({ example }) => { const { instructions, outputPath } = example - const result = await performBuildCall(instructions, `${outputDirectory}/${outputPath}`) + const result = await performBuildCall(instructions, `${outputDirectory}/${outputPath}`, apiClient) expect(result).toEqual( expect.objectContaining({ @@ -98,7 +102,7 @@ describe('performBuildCall with build-api-examples', () => { it.each(jsonOutputExamples)('should process $name', async ({ example }) => { const { instructions } = example - const result = await performBuildCall(instructions, 'dummy_path.pdf') + const result = await performBuildCall(instructions, 'dummy_path.pdf', apiClient) expect(result).toEqual( expect.objectContaining({ diff --git a/tests/environment.test.ts b/tests/environment.test.ts new file mode 100644 index 0000000..f516452 --- /dev/null +++ b/tests/environment.test.ts @@ -0,0 +1,64 @@ +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { getEnvironment, resetEnvironmentForTests } from '../src/utils/environment.js' + +describe('environment', () => { + const originalEnv = process.env + + beforeEach(() => { + process.env = { ...originalEnv } + resetEnvironmentForTests() + }) + + afterEach(() => { + process.env = originalEnv + resetEnvironmentForTests() + }) + + it('parses default stdio configuration', () => { + process.env.NUTRIENT_DWS_API_KEY = 'dws-key' + + const environment = getEnvironment() + + expect(environment.transportMode).toBe('stdio') + expect(environment.nutrientApiKey).toBe('dws-key') + }) + + it('defaults JWKS URL to api.nutrient.io in HTTP mode', () => { + process.env.MCP_TRANSPORT = 'http' + + const environment = getEnvironment() + + expect(environment.jwksUrl).toBe('https://api.nutrient.io/.well-known/jwks.json') + }) + + it('accepts private_key_jwt mode without client secret', () => { + process.env.MCP_TRANSPORT = 'http' + process.env.JWKS_URL = 'https://auth.example.com/.well-known/jwks.json' + process.env.CLIENT_ID = 'client-id' + process.env.TOKEN_ENDPOINT_AUTH_METHOD = 'private_key_jwt' + process.env.CLIENT_ASSERTION_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\\nabc\\n-----END PRIVATE KEY-----' + + const environment = getEnvironment() + + expect(environment.tokenEndpointAuthMethod).toBe('private_key_jwt') + expect(environment.clientSecret).toBeUndefined() + expect(environment.clientAssertionPrivateKey).toContain('BEGIN PRIVATE KEY') + }) + + it('defaults issuer to AUTH_SERVER_URL', () => { + process.env.MCP_TRANSPORT = 'http' + + const environment = getEnvironment() + + expect(environment.issuer).toBe('https://api.nutrient.io') + }) + + it('allows overriding issuer', () => { + process.env.MCP_TRANSPORT = 'http' + process.env.ISSUER = 'https://custom-issuer.example.com' + + const environment = getEnvironment() + + expect(environment.issuer).toBe('https://custom-issuer.example.com') + }) +}) diff --git a/tests/httpTransport.test.ts b/tests/httpTransport.test.ts new file mode 100644 index 0000000..9d1c7e7 --- /dev/null +++ b/tests/httpTransport.test.ts @@ -0,0 +1,273 @@ +import { createServer, type Server } from 'node:http' +import request from 'supertest' +import { afterAll, afterEach, beforeAll, describe, expect, it } from 'vitest' +import { exportJWK, generateKeyPair, SignJWT } from 'jose' +import { createHttpApp } from '../src/index.js' +import { Environment } from '../src/utils/environment.js' + +// ── Test JWKS server ────────────────────────────────────────────────────────── + +let jwksServer: Server +let jwksUrl: string +let testKeyPair: Awaited> +let testKid: string + +beforeAll(async () => { + testKid = 'test-key-1' + testKeyPair = await generateKeyPair('RS256') + + const publicJwk = await exportJWK(testKeyPair.publicKey) + publicJwk.kid = testKid + publicJwk.use = 'sig' + publicJwk.alg = 'RS256' + + const jwksPayload = JSON.stringify({ keys: [publicJwk] }) + + jwksServer = createServer((_req, res) => { + res.writeHead(200, { 'Content-Type': 'application/json' }) + res.end(jwksPayload) + }) + + await new Promise((resolve) => { + jwksServer.listen(0, '127.0.0.1', () => resolve()) + }) + + const address = jwksServer.address() + if (!address || typeof address === 'string') { + throw new Error('JWKS server did not bind') + } + + jwksUrl = `http://127.0.0.1:${address.port}/.well-known/jwks.json` +}) + +afterAll(async () => { + await new Promise((resolve) => jwksServer.close(() => resolve())) +}) + +// ── Helpers ─────────────────────────────────────────────────────────────────── + +const TEST_ISSUER = 'https://auth.example.com' +const TEST_RESOURCE_URL = 'https://mcp.example.com/mcp' + +async function signTestJwt(overrides: Record = {}, subject = 'user-1') { + const builder = new SignJWT({ + scope: 'mcp:invoke', + azp: 'test-client', + ...overrides, + }) + .setProtectedHeader({ alg: 'RS256', kid: testKid }) + .setIssuer(TEST_ISSUER) + .setSubject(subject) + .setAudience(TEST_RESOURCE_URL) + .setIssuedAt() + .setExpirationTime('5m') + + return builder.sign(testKeyPair.privateKey) +} + +function createEnvironment(overrides: Partial = {}): Environment { + return { + transportMode: 'http', + port: 3000, + host: '127.0.0.1', + allowedHosts: [], + nutrientApiKey: 'dws-api-key', + dwsApiBaseUrl: 'https://api.nutrient.io', + resourceUrl: TEST_RESOURCE_URL, + authServerUrl: TEST_ISSUER, + protectedResourceMetadataUrl: 'https://mcp.example.com/.well-known/oauth-protected-resource', + jwksUrl, + issuer: TEST_ISSUER, + tokenEndpointAuthMethod: 'client_secret_basic', + clientId: undefined, + clientSecret: undefined, + clientAssertionPrivateKey: undefined, + clientAssertionAlg: undefined, + clientAssertionKid: undefined, + ...overrides, + } +} + +const initializeRequest = { + jsonrpc: '2.0', + id: 1, + method: 'initialize', + params: { + protocolVersion: '2025-03-26', + capabilities: { + tools: {}, + }, + clientInfo: { + name: 'vitest-client', + version: '1.0.0', + }, + }, +} + +// ── Tests ───────────────────────────────────────────────────────────────────── + +describe('http transport', () => { + let closeApp: (() => Promise) | undefined + + afterEach(async () => { + if (closeApp) { + await closeApp() + closeApp = undefined + } + }) + + async function initializeSession(app: Parameters[0], token: string) { + const response = await request(app) + .post('/mcp') + .set('authorization', `Bearer ${token}`) + .set('accept', 'application/json, text/event-stream') + .send(initializeRequest) + + expect(response.status).toBe(200) + + const sessionId = response.headers['mcp-session-id'] + expect(typeof sessionId).toBe('string') + + await request(app) + .post('/mcp') + .set('authorization', `Bearer ${token}`) + .set('mcp-session-id', sessionId as string) + .set('accept', 'application/json, text/event-stream') + .send({ + jsonrpc: '2.0', + method: 'notifications/initialized', + params: {}, + }) + + return sessionId as string + } + + it('serves health and protected resource metadata endpoints', async () => { + const { app, close } = createHttpApp({ environment: createEnvironment(), sandboxEnabled: false }) + closeApp = close + + const healthResponse = await request(app).get('/health') + expect(healthResponse.status).toBe(200) + expect(healthResponse.body.status).toBe('ok') + + const metadataResponse = await request(app).get('/.well-known/oauth-protected-resource') + expect(metadataResponse.status).toBe(200) + expect(metadataResponse.body).toEqual({ + resource: TEST_RESOURCE_URL, + authorization_servers: [TEST_ISSUER], + }) + }) + + it('returns 401 and WWW-Authenticate on unauthenticated /mcp', async () => { + const { app, close } = createHttpApp({ environment: createEnvironment(), sandboxEnabled: false }) + closeApp = close + + const response = await request(app).post('/mcp').send(initializeRequest) + + expect(response.status).toBe(401) + expect(response.headers['www-authenticate']).toContain('resource_metadata=') + }) + + it('binds MCP session to principal fingerprint', async () => { + const { app, close } = createHttpApp({ environment: createEnvironment(), sandboxEnabled: false }) + closeApp = close + + const token1 = await signTestJwt({}, 'user-1') + const token2 = await signTestJwt({}, 'user-2') + + const sessionId = await initializeSession(app, token1) + + const response = await request(app) + .post('/mcp') + .set('authorization', `Bearer ${token2}`) + .set('mcp-session-id', sessionId) + .set('accept', 'application/json') + .send({ + jsonrpc: '2.0', + id: 2, + method: 'tools/list', + params: {}, + }) + + expect(response.status).toBe(403) + expect(response.text).toContain('different principal') + }) + + it('filters tools/list according to allowed tools in JWT', async () => { + const { app, close } = createHttpApp({ environment: createEnvironment(), sandboxEnabled: false }) + closeApp = close + + const token = await signTestJwt({ allowed_tools: ['check_credits'] }) + const sessionId = await initializeSession(app, token) + + const response = await request(app) + .post('/mcp') + .set('authorization', `Bearer ${token}`) + .set('mcp-session-id', sessionId) + .set('accept', 'application/json, text/event-stream') + .send({ + jsonrpc: '2.0', + id: 2, + method: 'tools/list', + params: {}, + }) + + expect(response.status).toBe(200) + + const toolsFromJson = response.body?.result?.tools + let tools: Array<{ name: string }> = Array.isArray(toolsFromJson) ? toolsFromJson : [] + + if (tools.length === 0 && response.text) { + const dataLines = response.text + .split('\n') + .map((line) => line.trim()) + .filter((line) => line.startsWith('data:')) + + for (const line of dataLines) { + const payload = line.slice('data:'.length).trim() + if (!payload) { + continue + } + + const parsed = JSON.parse(payload) as { result?: { tools?: Array<{ name: string }> } } + if (Array.isArray(parsed.result?.tools)) { + tools = parsed.result.tools + break + } + } + } + + const toolNames = tools.map((tool: { name: string }) => tool.name) + + expect(toolNames).toEqual(['check_credits']) + }) + + it('cleans up session on DELETE /mcp', async () => { + const { app, close } = createHttpApp({ environment: createEnvironment(), sandboxEnabled: false }) + closeApp = close + + const token = await signTestJwt() + const sessionId = await initializeSession(app, token) + + const deleteResponse = await request(app) + .delete('/mcp') + .set('authorization', `Bearer ${token}`) + .set('mcp-session-id', sessionId) + + expect(deleteResponse.status).toBe(200) + + const postResponse = await request(app) + .post('/mcp') + .set('authorization', `Bearer ${token}`) + .set('mcp-session-id', sessionId) + .set('accept', 'application/json') + .send({ + jsonrpc: '2.0', + id: 3, + method: 'tools/list', + params: {}, + }) + + expect(postResponse.status).toBe(404) + }) +}) diff --git a/tests/jwtAuth.test.ts b/tests/jwtAuth.test.ts new file mode 100644 index 0000000..7f286e4 --- /dev/null +++ b/tests/jwtAuth.test.ts @@ -0,0 +1,192 @@ +import express from 'express' +import request from 'supertest' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import { createServer, Server } from 'node:http' +import { createJwtAuthMiddleware } from '../src/http/jwtAuth.js' +import { RequestWithAuth } from '../src/http/types.js' +import { generateKeyPair, exportJWK, JWK, SignJWT } from 'jose' + +describe('jwt auth middleware', () => { + let jwksServer: Server + let jwksUrl: string + let issuer: string + let privateKey: CryptoKey + let publicJwk: JWK + + beforeAll(async () => { + const keyPair = await generateKeyPair('RS256') + privateKey = keyPair.privateKey + publicJwk = await exportJWK(keyPair.publicKey) + publicJwk.kid = 'test-key' + publicJwk.alg = 'RS256' + publicJwk.use = 'sig' + + jwksServer = createServer((req, res) => { + if (req.url === '/jwks') { + res.writeHead(200, { 'content-type': 'application/json' }) + res.end(JSON.stringify({ keys: [publicJwk] })) + return + } + + res.writeHead(404) + res.end() + }) + + await new Promise((resolve) => { + jwksServer.listen(0, '127.0.0.1', () => resolve()) + }) + + const address = jwksServer.address() + if (!address || typeof address === 'string') { + throw new Error('Failed to start JWKS server') + } + + issuer = `http://127.0.0.1:${address.port}` + jwksUrl = `${issuer}/jwks` + }) + + afterAll(async () => { + await new Promise((resolve, reject) => { + jwksServer.close((error) => { + if (error) { + reject(error) + return + } + + resolve() + }) + }) + }) + + async function createToken(overrides: Record = {}) { + const now = Math.floor(Date.now() / 1000) + + return new SignJWT({ + sub: 'user-1', + azp: 'client-1', + sid: 'session-1', + iss: issuer, + aud: 'dws-mcp', + scope: 'mcp:invoke', + exp: now + 300, + ...overrides, + }) + .setProtectedHeader({ alg: 'RS256', kid: 'test-key' }) + .sign(privateKey) + } + + function createApp(audience: string | string[] = 'dws-mcp') { + const app = express() + app.use( + createJwtAuthMiddleware({ + jwksUrl, + issuer, + audience, + requiredScope: 'mcp:invoke', + resourceMetadataUrl: `${issuer}/.well-known/oauth-protected-resource`, + }), + ) + + app.get('/protected', (req, res) => { + const authInfo = (req as RequestWithAuth).auth + res.json({ + clientId: authInfo?.clientId, + scopes: authInfo?.scopes, + allowedTools: authInfo?.extra?.allowedTools, + }) + }) + + return app + } + + it('accepts valid JWTs', async () => { + const token = await createToken() + const app = createApp() + + const response = await request(app).get('/protected').set('authorization', `Bearer ${token}`) + + expect(response.status).toBe(200) + expect(response.body.clientId).toBe('client-1') + expect(response.body.scopes).toContain('mcp:invoke') + }) + + it('accepts JWTs whose audience matches the resource URL when configured', async () => { + const resourceUrl = 'http://localhost:3000/mcp' + const token = await createToken({ aud: resourceUrl }) + const app = createApp(['dws-mcp', resourceUrl]) + + const response = await request(app).get('/protected').set('authorization', `Bearer ${token}`) + + expect(response.status).toBe(200) + expect(response.body.clientId).toBe('client-1') + }) + + it('rejects JWTs with wrong audience', async () => { + const token = await createToken({ aud: 'wrong-audience' }) + const app = createApp() + + const response = await request(app).get('/protected').set('authorization', `Bearer ${token}`) + + expect(response.status).toBe(401) + expect(response.body.error).toBe('invalid_token') + }) + + it('rejects JWTs without required scope', async () => { + const token = await createToken({ scope: 'other:scope' }) + const app = createApp() + + const response = await request(app).get('/protected').set('authorization', `Bearer ${token}`) + + expect(response.status).toBe(401) + expect(response.body.error).toBe('invalid_token') + }) + + it('rejects expired JWTs', async () => { + const now = Math.floor(Date.now() / 1000) + const token = await createToken({ exp: now - 120 }) + const app = createApp() + + const response = await request(app).get('/protected').set('authorization', `Bearer ${token}`) + + expect(response.status).toBe(401) + expect(response.body.error).toBe('invalid_token') + }) + + it('maps allowed_tools claim to AuthInfo.extra.allowedTools', async () => { + const token = await createToken({ allowed_tools: ['check_credits', 'document_processor'] }) + const app = createApp() + + const response = await request(app).get('/protected').set('authorization', `Bearer ${token}`) + + expect(response.status).toBe(200) + expect(response.body.allowedTools).toEqual(['check_credits', 'document_processor']) + }) + + it('falls back to unknown-client when sub and azp are missing', async () => { + const token = await createToken({ sub: undefined, azp: undefined, sid: undefined }) + const app = express() + + app.use( + createJwtAuthMiddleware({ + jwksUrl, + issuer, + audience: 'dws-mcp', + requiredScope: 'mcp:invoke', + resourceMetadataUrl: `${issuer}/.well-known/oauth-protected-resource`, + }), + ) + + app.get('/protected', (req, res) => { + const authInfo = (req as RequestWithAuth).auth + res.json({ + clientId: authInfo?.clientId, + extra: authInfo?.extra, + }) + }) + + const response = await request(app).get('/protected').set('authorization', `Bearer ${token}`) + + expect(response.status).toBe(200) + expect(response.body.clientId).toBe('unknown-client') + }) +}) diff --git a/tests/protectedResource.test.ts b/tests/protectedResource.test.ts new file mode 100644 index 0000000..ba333f5 --- /dev/null +++ b/tests/protectedResource.test.ts @@ -0,0 +1,35 @@ +import express from 'express' +import request from 'supertest' +import { describe, expect, it } from 'vitest' +import { buildWwwAuthenticateHeader, createProtectedResourceHandler } from '../src/http/protectedResource.js' + +describe('protected resource metadata', () => { + it('serves RFC9728 metadata document', async () => { + const app = express() + + app.get( + '/.well-known/oauth-protected-resource', + createProtectedResourceHandler({ + resourceUrl: 'https://mcp.nutrient.io/mcp', + authServerUrl: 'https://api.nutrient.io', + resourceMetadataUrl: 'https://mcp.nutrient.io/.well-known/oauth-protected-resource', + }), + ) + + const response = await request(app).get('/.well-known/oauth-protected-resource') + + expect(response.status).toBe(200) + expect(response.body).toEqual({ + resource: 'https://mcp.nutrient.io/mcp', + authorization_servers: ['https://api.nutrient.io'], + }) + }) + + it('builds WWW-Authenticate header with resource metadata', () => { + const header = buildWwwAuthenticateHeader({ + resourceMetadataUrl: 'https://mcp.nutrient.io/.well-known/oauth-protected-resource', + }) + + expect(header).toBe('Bearer resource_metadata="https://mcp.nutrient.io/.well-known/oauth-protected-resource"') + }) +}) diff --git a/tests/requestLogger.test.ts b/tests/requestLogger.test.ts new file mode 100644 index 0000000..7687256 --- /dev/null +++ b/tests/requestLogger.test.ts @@ -0,0 +1,68 @@ +import express from 'express' +import request from 'supertest' +import { describe, expect, it } from 'vitest' +import { createRequestLoggerMiddleware, isMcpDebugLoggingEnabled } from '../src/http/requestLogger.js' + +type LogEntry = { + level: 'debug' | 'info' + message: string +} + +describe('request logger middleware', () => { + it('logs request and response in readable arrow format', async () => { + const entries: LogEntry[] = [] + const logger = (level: 'debug' | 'info', message: string) => { + entries.push({ level, message }) + } + + const app = express() + app.use(express.json()) + app.use(createRequestLoggerMiddleware({ logger })) + app.post('/mcp', (req, res) => { + res.status(200).json({ + ok: true, + echo: req.body, + }) + }) + + const response = await request(app) + .post('/mcp') + .set('authorization', 'Bearer super-secret') + .set('x-request-id', 'request-123') + .send({ jsonrpc: '2.0', method: 'initialize' }) + + expect(response.status).toBe(200) + expect(response.headers['x-request-id']).toBe('request-123') + + expect(entries).toContainEqual({ level: 'info', message: '<<< POST /mcp' }) + + expect(entries).toContainEqual({ + level: 'debug', + message: JSON.stringify({ jsonrpc: '2.0', method: 'initialize' }), + }) + + expect(entries).toContainEqual({ level: 'info', message: '>>> Sent 200' }) + + expect(entries).toContainEqual({ + level: 'debug', + message: JSON.stringify({ + ok: true, + echo: { jsonrpc: '2.0', method: 'initialize' }, + }), + }) + }) +}) + +describe('isMcpDebugLoggingEnabled', () => { + it('recognizes common truthy values', () => { + expect(isMcpDebugLoggingEnabled({ MCP_DEBUG_LOGGING: 'true' })).toBe(true) + expect(isMcpDebugLoggingEnabled({ MCP_DEBUG_LOGGING: '1' })).toBe(true) + expect(isMcpDebugLoggingEnabled({ MCP_DEBUG_LOGGING: 'on' })).toBe(true) + }) + + it('returns false for unset or falsey values', () => { + expect(isMcpDebugLoggingEnabled({})).toBe(false) + expect(isMcpDebugLoggingEnabled({ MCP_DEBUG_LOGGING: 'false' })).toBe(false) + expect(isMcpDebugLoggingEnabled({ MCP_DEBUG_LOGGING: '0' })).toBe(false) + }) +}) diff --git a/tests/signing-api-examples.test.ts b/tests/signing-api-examples.test.ts index 14d5cef..507bd80 100644 --- a/tests/signing-api-examples.test.ts +++ b/tests/signing-api-examples.test.ts @@ -5,15 +5,19 @@ import { performSignCall } from '../src/dws/sign.js' import { SignAPIArgs } from '../src/schemas.js' import path from 'path' import { setSandboxDirectory } from '../src/fs/sandbox.js' +import { createApiClient } from '../src/dws/api.js' +import { DwsApiClient } from '../src/dws/client.js' dotenvConfig() describe('performSignCall with signing-api-examples', () => { let outputDirectory: string + let apiClient: DwsApiClient beforeAll(async () => { const assetsDir = path.join(__dirname, `assets`) await setSandboxDirectory(assetsDir) + apiClient = createApiClient({ apiKey: process.env.NUTRIENT_DWS_API_KEY! }) outputDirectory = `test-output-${new Date().toISOString().replace(/[:.]/g, '-')}` }) @@ -44,6 +48,7 @@ describe('performSignCall with signing-api-examples', () => { const result = await performSignCall( filePath, `${outputDirectory}/${outputPath}`, + apiClient, signatureOptions, watermarkImagePath, graphicImagePath, diff --git a/tests/unit.test.ts b/tests/unit.test.ts index b924728..fc4c2a5 100644 --- a/tests/unit.test.ts +++ b/tests/unit.test.ts @@ -9,12 +9,12 @@ import { performSignCall } from '../src/dws/sign.js' import { performAiRedactCall } from '../src/dws/ai-redact.js' import { performDirectoryTreeCall } from '../src/fs/directoryTree.js' import * as sandbox from '../src/fs/sandbox.js' -import * as api from '../src/dws/api.js' -import axios, { InternalAxiosRequestConfig } from 'axios' +import axios from 'axios' import path from 'path' import { FileHandle } from 'fs/promises' import { parseSandboxPath } from '../src/utils/sandbox.js' import { CallToolResult, TextContent } from '@modelcontextprotocol/sdk/types.js' +import { DwsApiClient } from '../src/dws/client.js' dotenvConfig() @@ -29,7 +29,23 @@ function getTextContent(result: CallToolResult, index: number = 0): string { vi.mock('axios') vi.mock('node:fs', { spy: true }) -vi.mock('../src/dws/api.js') + +function createMockApiClient(mockResponse?: { data: Readable; status?: number }): DwsApiClient { + const defaultResponse = { + data: createMockStream('default mock response'), + status: 200, + statusText: 'OK', + headers: {}, + config: {}, + } + + const response = mockResponse ? { ...defaultResponse, data: mockResponse.data, status: mockResponse.status ?? 200 } : defaultResponse + + return { + post: vi.fn().mockResolvedValue(response), + get: vi.fn().mockResolvedValue(response), + } as unknown as DwsApiClient +} function createMockStream(content: string | Buffer): Readable { const readable = new Readable() @@ -65,17 +81,6 @@ describe('API Functions', () => { vi.spyOn(fs.promises, 'mkdir').mockReturnValue(Promise.resolve(undefined)) vi.spyOn(fs.promises, 'unlink').mockImplementation(async () => {}) vi.spyOn(fs.promises, 'rm').mockImplementation(async () => {}) - - vi.mocked(api.callNutrientApi).mockImplementation(async () => { - const mockStream = createMockStream('default mock response') - return { - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - } - }) }) afterEach(() => { @@ -85,27 +90,28 @@ describe('API Functions', () => { describe('performBuildCall', () => { it('should throw an error if file does not exist', async () => { const resolvedPath = path.resolve('/test.pdf') + const mockClient = createMockApiClient() vi.spyOn(fs.promises, 'access').mockImplementation(async () => { throw new Error(`Path not found: ${resolvedPath}`) }) - const buildCall = performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf') + const buildCall = performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf', mockClient) await expect(buildCall).rejects.toThrowError( `Error with referenced file /test.pdf: Path not found: ${resolvedPath}`, ) }) - it('should throw an error if API key is not set', async () => { - // Mock callNutrientApi to throw an error - vi.mocked(api.callNutrientApi).mockRejectedValue( + it('should return an error when the API client rejects', async () => { + const mockClient = createMockApiClient() + vi.mocked(mockClient.post).mockRejectedValue( new Error( 'Error: NUTRIENT_DWS_API_KEY environment variable is required. Please visit https://www.nutrient.io/api/ to get your free API key.', ), ) - const result = await performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf') + const result = await performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf', mockClient) expect(result.isError).toBe(true) expect(getTextContent(result)).toContain('NUTRIENT_DWS_API_KEY environment variable is required') @@ -113,52 +119,31 @@ describe('API Functions', () => { }) it('should use application/json when all inputs are URLs', async () => { - const mockStream = createMockStream('processed content') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) + const mockClient = createMockApiClient({ data: createMockStream('processed content') }) const instructions = { parts: [{ file: 'https://example.com/test.pdf' }], } - await performBuildCall(instructions, '/test_processed.pdf') + await performBuildCall(instructions, '/test_processed.pdf', mockClient) - expect(api.callNutrientApi).toHaveBeenCalledWith('build', instructions) + expect(mockClient.post).toHaveBeenCalledWith('build', instructions) }) it('should use multipart/form-data when local files are included', async () => { - const mockStream = createMockStream('processed content') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) + const mockClient = createMockApiClient({ data: createMockStream('processed content') }) const instructions = { parts: [{ file: '/test.pdf' }], } - await performBuildCall(instructions, '/test_processed.pdf') + await performBuildCall(instructions, '/test_processed.pdf', mockClient) - expect(api.callNutrientApi).toHaveBeenCalledWith('build', expect.any(Object)) + expect(mockClient.post).toHaveBeenCalledWith('build', expect.any(Object)) }) it('should handle json-content output type', async () => { - const mockStream = createMockStream('{"result": "success"}') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) + const mockClient = createMockApiClient({ data: createMockStream('{"result": "success"}') }) const instructions: Instructions = { parts: [{ file: 'https://example.com/test.pdf' }], @@ -170,7 +155,7 @@ describe('API Functions', () => { }, } - const result = await performBuildCall(instructions, '/test_processed.pdf') + const result = await performBuildCall(instructions, '/test_processed.pdf', mockClient) expect(result.isError).toBe(false) expect(result.content[0].type).toBe('text') @@ -178,35 +163,30 @@ describe('API Functions', () => { }) it('should handle file output and save to disk', async () => { - const mockStream = createMockStream('processed content') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) + const mockClient = createMockApiClient({ data: createMockStream('processed content') }) - await performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf') + await performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf', mockClient) expect(fs.promises.writeFile).toHaveBeenCalledWith(expect.stringContaining('_processed.pdf'), expect.any(Buffer)) }) it('should handle errors from the API', async () => { + const mockClient = createMockApiClient() const mockError = { response: { data: createMockStream('Error message from API'), }, } vi.mocked(axios.isAxiosError).mockImplementation(() => true) - vi.mocked(api.callNutrientApi).mockRejectedValueOnce(mockError) - const result = await performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf') + vi.mocked(mockClient.post).mockRejectedValueOnce(mockError) + const result = await performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf', mockClient) expect(result.isError).toBe(true) expect(getTextContent(result)).toContain('Error processing API response: Error message from API') }) it('should handle HostedErrorResponse format from the API', async () => { + const mockClient = createMockApiClient() const hostedErrorResponse = { details: 'The request is malformed', status: 400, @@ -224,9 +204,9 @@ describe('API Functions', () => { }, } vi.mocked(axios.isAxiosError).mockImplementation(() => true) - vi.mocked(api.callNutrientApi).mockRejectedValueOnce(mockError) + vi.mocked(mockClient.post).mockRejectedValueOnce(mockError) - const result = await performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf') + const result = await performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf', mockClient) expect(result.isError).toBe(true) @@ -243,26 +223,28 @@ describe('API Functions', () => { describe('performSignCall', () => { it('should throw an error if file does not exist', async () => { const resolvedPath = path.resolve('/test.pdf') + const mockClient = createMockApiClient() vi.spyOn(fs.promises, 'access').mockImplementation(async () => { throw new Error(`Error with referenced file /test.pdf: Path not found: ${resolvedPath}`) }) - const buildCall = performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf') + const buildCall = performBuildCall({ parts: [{ file: '/test.pdf' }] }, '/test_processed.pdf', mockClient) await expect(buildCall).rejects.toThrowError( `Error with referenced file /test.pdf: Path not found: ${resolvedPath}`, ) }) - it('should throw an error if API key is not set', async () => { - vi.mocked(api.callNutrientApi).mockRejectedValueOnce( + it('should return an error when the API client rejects', async () => { + const mockClient = createMockApiClient() + vi.mocked(mockClient.post).mockRejectedValueOnce( new Error( 'Error: NUTRIENT_DWS_API_KEY environment variable is required. Please visit https://www.nutrient.io/api/ to get your free API key.', ), ) - const result = await performSignCall('/test.pdf', '/test_processed.pdf') + const result = await performSignCall('/test.pdf', '/test_processed.pdf', mockClient) expect(result.isError).toBe(true) expect(getTextContent(result)).toContain('NUTRIENT_DWS_API_KEY environment variable is required') @@ -270,14 +252,7 @@ describe('API Functions', () => { }) it('should send the file and signature options to the API', async () => { - const mockStream = createMockStream('signed content') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) + const mockClient = createMockApiClient({ data: createMockStream('signed content') }) const signatureOptions: SignatureOptions = { signatureType: 'cms', @@ -289,24 +264,18 @@ describe('API Functions', () => { }, } - await performSignCall('/test.pdf', '/test_processed.pdf', signatureOptions) + await performSignCall('/test.pdf', '/test_processed.pdf', mockClient, signatureOptions) - expect(api.callNutrientApi).toHaveBeenCalledWith('sign', expect.any(Object)) + expect(mockClient.post).toHaveBeenCalledWith('sign', expect.any(Object)) }) it('should include watermark image if provided', async () => { - const mockStream = createMockStream('signed content') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) + const mockClient = createMockApiClient({ data: createMockStream('signed content') }) await performSignCall( '/test.pdf', '/test_processed.pdf', + mockClient, { signatureType: 'cms', flatten: false }, '/watermark.png', ) @@ -315,18 +284,12 @@ describe('API Functions', () => { }) it('should include graphic image if provided', async () => { - const mockStream = createMockStream('signed content') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) + const mockClient = createMockApiClient({ data: createMockStream('signed content') }) await performSignCall( '/test.pdf', '/test_processed.pdf', + mockClient, { signatureType: 'cms', flatten: false }, undefined, '/graphic.png', @@ -336,30 +299,24 @@ describe('API Functions', () => { }) it('should save the result to disk', async () => { - const mockStream = createMockStream('signed content') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) + const mockClient = createMockApiClient({ data: createMockStream('signed content') }) - await performSignCall('/test.pdf', '/test_signed.pdf') + await performSignCall('/test.pdf', '/test_signed.pdf', mockClient) expect(fs.promises.writeFile).toHaveBeenCalledWith(expect.stringContaining('_signed.pdf'), expect.any(Buffer)) }) it('should handle errors from the API', async () => { + const mockClient = createMockApiClient() const mockError = { response: { data: createMockStream('Error message from API'), }, } vi.mocked(axios.isAxiosError).mockImplementation(() => true) - vi.mocked(api.callNutrientApi).mockRejectedValueOnce(mockError) + vi.mocked(mockClient.post).mockRejectedValueOnce(mockError) - const result = await performSignCall('/test.pdf', '/test_processed.pdf') + const result = await performSignCall('/test.pdf', '/test_processed.pdf', mockClient) expect(result.isError).toBe(true) expect(getTextContent(result)).toContain('Error processing API response: Error message from API') @@ -372,29 +329,39 @@ describe('API Functions', () => { }) it('should return an error if file does not exist', async () => { + const mockClient = createMockApiClient() vi.spyOn(sandbox, 'resolveReadFilePath').mockRejectedValueOnce(new Error('Path not found: /missing.pdf')) - const result = await performAiRedactCall('/missing.pdf', 'All personally identifiable information', '/out.pdf') + const result = await performAiRedactCall('/missing.pdf', 'All personally identifiable information', '/out.pdf', mockClient) expect(result.isError).toBe(true) expect(getTextContent(result)).toContain('Error: Path not found: /missing.pdf') }) it('should return an error when stage and apply are both true', async () => { + const mockClient = createMockApiClient() vi.spyOn(sandbox, 'resolveReadFilePath').mockResolvedValueOnce('/input.pdf') vi.spyOn(sandbox, 'resolveWriteFilePath').mockResolvedValueOnce('/output.pdf') - const result = await performAiRedactCall('/input.pdf', 'All personally identifiable information', '/output.pdf', true, true) + const result = await performAiRedactCall( + '/input.pdf', + 'All personally identifiable information', + '/output.pdf', + mockClient, + true, + true, + ) expect(result.isError).toBe(true) expect(getTextContent(result)).toBe('Error: stage and apply cannot both be true. Choose one mode.') }) it('should return an error when output path equals input path', async () => { + const mockClient = createMockApiClient() vi.spyOn(sandbox, 'resolveReadFilePath').mockResolvedValueOnce('/same.pdf') vi.spyOn(sandbox, 'resolveWriteFilePath').mockResolvedValueOnce('/same.pdf') - const result = await performAiRedactCall('/same.pdf', 'All personally identifiable information', '/same.pdf') + const result = await performAiRedactCall('/same.pdf', 'All personally identifiable information', '/same.pdf', mockClient) expect(result.isError).toBe(true) expect(getTextContent(result)).toContain( @@ -403,31 +370,20 @@ describe('API Functions', () => { }) it('should call the API and save the result to disk', async () => { + const mockClient = createMockApiClient({ data: createMockStream('redacted content') }) vi.spyOn(sandbox, 'resolveReadFilePath').mockResolvedValueOnce('/input.pdf') vi.spyOn(sandbox, 'resolveWriteFilePath').mockResolvedValueOnce('/redacted.pdf') - const mockStream = createMockStream('redacted content') - vi.mocked(api.callNutrientApi).mockResolvedValueOnce({ - data: mockStream, - status: 200, - statusText: 'OK', - headers: {}, - config: {} as InternalAxiosRequestConfig, - }) - - const result = await performAiRedactCall( - '/input.pdf', - 'All personally identifiable information', - '/redacted.pdf', - ) + const result = await performAiRedactCall('/input.pdf', 'All personally identifiable information', '/redacted.pdf', mockClient) expect(result.isError).toBe(false) expect(getTextContent(result)).toContain('AI redaction completed successfully') expect(fs.promises.writeFile).toHaveBeenCalledWith('/redacted.pdf', expect.any(Buffer)) - expect(api.callNutrientApi).toHaveBeenCalledWith('ai/redact', expect.any(Object)) + expect(mockClient.post).toHaveBeenCalledWith('ai/redact', expect.any(Object)) }) it('should handle errors from the API', async () => { + const mockClient = createMockApiClient() vi.spyOn(sandbox, 'resolveReadFilePath').mockResolvedValueOnce('/input.pdf') vi.spyOn(sandbox, 'resolveWriteFilePath').mockResolvedValueOnce('/redacted.pdf') @@ -437,9 +393,9 @@ describe('API Functions', () => { }, } vi.mocked(axios.isAxiosError).mockImplementation(() => true) - vi.mocked(api.callNutrientApi).mockRejectedValueOnce(mockError) + vi.mocked(mockClient.post).mockRejectedValueOnce(mockError) - const result = await performAiRedactCall('/input.pdf', 'All personally identifiable information', '/redacted.pdf') + const result = await performAiRedactCall('/input.pdf', 'All personally identifiable information', '/redacted.pdf', mockClient) expect(result.isError).toBe(true) expect(getTextContent(result)).toContain('Error processing API response: Error message from API') @@ -888,12 +844,9 @@ describe('API Functions', () => { usage: { totalCredits: 100, usedCredits: 42 }, } - vi.stubEnv('NUTRIENT_DWS_API_KEY', 'test-key') - vi.spyOn(axios, 'get').mockResolvedValue({ - data: Readable.from([JSON.stringify(apiResponse)]), - }) + const mockClient = createMockApiClient({ data: Readable.from([JSON.stringify(apiResponse)]) }) - const result = await performCheckCreditsCall() + const result = await performCheckCreditsCall(mockClient) expect(result.isError).toBe(false) const text = (result.content[0] as TextContent).text @@ -904,31 +857,15 @@ describe('API Functions', () => { expect(parsed.remainingCredits).toBe(58) // Must not contain the API key expect(text).not.toContain('sk_live_secret') - - vi.restoreAllMocks() }) it('should handle non-JSON API response', async () => { - vi.stubEnv('NUTRIENT_DWS_API_KEY', 'test-key') - vi.spyOn(axios, 'get').mockResolvedValue({ - data: Readable.from(['not json']), - }) + const mockClient = createMockApiClient({ data: Readable.from(['not json']) }) - const result = await performCheckCreditsCall() + const result = await performCheckCreditsCall(mockClient) expect(result.isError).toBe(true) expect((result.content[0] as TextContent).text).toContain('Unexpected non-JSON response') - - vi.restoreAllMocks() - }) - - it('should error when API key is not set', async () => { - vi.stubEnv('NUTRIENT_DWS_API_KEY', '') - delete process.env.NUTRIENT_DWS_API_KEY - - await expect(performCheckCreditsCall()).rejects.toThrow('NUTRIENT_DWS_API_KEY not set') - - vi.restoreAllMocks() }) }) })