Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
"build:dev": "bun run ./scripts/build.ts --dev",
"build:dev:full": "bun run ./scripts/build.ts --dev --feature-set=dev-full",
"compile": "bun run ./scripts/build.ts --compile",
"dev": "bun run ./src/entrypoints/cli.tsx"
"dev": "bun run ./src/entrypoints/cli.tsx",
"test": "bun test"
},
"dependencies": {
"@alcalzone/ansi-tokenize": "^0.3.0",
Expand Down
27 changes: 19 additions & 8 deletions src/services/api/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import {
getClaudeAIOAuthTokens,
getCodexOAuthTokens,
isClaudeAISubscriber,
isCodexSubscriber,
refreshAndGetAwsCredentials,
refreshGcpCredentialsIfNeeded,
} from 'src/utils/auth.js'
Expand All @@ -35,7 +34,13 @@ import {
getVertexRegionForModel,
isEnvTruthy,
} from '../../utils/envUtils.js'
import { createCodexFetch } from './codex-fetch-adapter.js'
import {
resolveCodexProviderBridge,
} from './codex-provider-bridge.js'
import {
createCodexFetch,
createResponsesFetch,
} from './codex-fetch-adapter.js'

/**
* Environment variables for different client types:
Expand Down Expand Up @@ -305,15 +310,21 @@ export async function getAnthropicClient({
return new AnthropicVertex(vertexArgs) as unknown as Anthropic
}

// ── Codex (OpenAI) provider via fetch adapter ─────────────────────
if (isCodexSubscriber()) {
if (getAPIProvider() === 'openai') {
const codexTokens = getCodexOAuthTokens()
if (codexTokens?.accessToken) {
const codexFetch = createCodexFetch(codexTokens.accessToken)
const bridge = resolveCodexProviderBridge({
codexOAuthAccessToken: codexTokens?.accessToken ?? null,
})

if (bridge) {
const bridgeFetch =
bridge.kind === 'chatgpt'
? createCodexFetch(bridge.accessToken)
: createResponsesFetch(bridge)
const clientConfig: ConstructorParameters<typeof Anthropic>[0] = {
apiKey: 'codex-placeholder', // SDK requires a key but the fetch adapter handles auth
apiKey: 'codex-placeholder',
...ARGS,
fetch: codexFetch as unknown as typeof globalThis.fetch,
fetch: bridgeFetch as unknown as typeof globalThis.fetch,
...(isDebugToStdErr() && { logger: createStderrLogger() }),
}
return new Anthropic(clientConfig)
Expand Down
64 changes: 51 additions & 13 deletions src/services/api/codex-fetch-adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/

import { getCodexOAuthTokens } from '../../utils/auth.js'
import type { CodexResponsesBridgeConfig } from './codex-provider-bridge.js'

// ── Available Codex models ──────────────────────────────────────────
export const CODEX_MODELS = [
Expand Down Expand Up @@ -738,15 +739,28 @@ async function translateCodexStreamToAnthropic(

const CODEX_BASE_URL = 'https://chatgpt.com/backend-api/codex/responses'

type ResponsesBridgeFetchOptions = {
accessToken: string
endpoint: string
extraHeaders?: Record<string, string>
getAccessToken?: () => string
}

/**
* Creates a fetch function that intercepts Anthropic API calls and routes them to Codex.
* @param accessToken - The Codex access token for authentication
* @returns A fetch function that translates Anthropic requests to Codex format
*/
export function createCodexFetch(
accessToken: string,
): (input: RequestInfo | URL, init?: RequestInit) => Promise<Response> {
const accountId = extractAccountId(accessToken)
function createResponsesBridgeFetch({
accessToken,
endpoint,
extraHeaders,
getAccessToken,
}: ResponsesBridgeFetchOptions): (
input: RequestInfo | URL,
init?: RequestInit,
) => Promise<Response> {
const resolveAccessToken = getAccessToken ?? (() => accessToken)

return async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
const url = input instanceof Request ? input.url : String(input)
Expand All @@ -770,23 +784,17 @@ export function createCodexFetch(
anthropicBody = {}
}

// Get current token (may have been refreshed)
const tokens = getCodexOAuthTokens()
const currentToken = tokens?.accessToken || accessToken

// Translate to Codex format
const { codexBody, codexModel } = translateToCodexBody(anthropicBody)

// Call Codex API
const codexResponse = await globalThis.fetch(CODEX_BASE_URL, {
const codexResponse = await globalThis.fetch(endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'text/event-stream',
Authorization: `Bearer ${currentToken}`,
'chatgpt-account-id': accountId,
originator: 'pi',
'OpenAI-Beta': 'responses=experimental',
Authorization: `Bearer ${resolveAccessToken()}`,
...(extraHeaders ?? {}),
},
body: JSON.stringify(codexBody),
})
Expand All @@ -810,3 +818,33 @@ export function createCodexFetch(
return translateCodexStreamToAnthropic(codexResponse, codexModel)
}
}

export function createCodexFetch(
accessToken: string,
): (input: RequestInfo | URL, init?: RequestInit) => Promise<Response> {
const accountId = extractAccountId(accessToken)

return createResponsesBridgeFetch({
accessToken,
endpoint: CODEX_BASE_URL,
getAccessToken: () => {
const tokens = getCodexOAuthTokens()
return tokens?.accessToken || accessToken
},
extraHeaders: {
'chatgpt-account-id': accountId,
originator: 'pi',
'OpenAI-Beta': 'responses=experimental',
},
})
}

export function createResponsesFetch(
config: CodexResponsesBridgeConfig,
): (input: RequestInfo | URL, init?: RequestInit) => Promise<Response> {
return createResponsesBridgeFetch({
accessToken: config.apiKey,
endpoint: config.endpoint,
extraHeaders: config.headers,
})
}
138 changes: 138 additions & 0 deletions src/services/api/codex-provider-bridge.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
import { afterEach, describe, expect, it } from 'bun:test'
import { mkdtempSync, rmSync, writeFileSync } from 'fs'
import { tmpdir } from 'os'
import { join } from 'path'
import { resolveCodexProviderBridge } from './codex-provider-bridge.js'

const tempDirs: string[] = []

function createCodexHome(): string {
const dir = mkdtempSync(join(tmpdir(), 'free-code-codex-'))
tempDirs.push(dir)
return dir
}

afterEach(() => {
while (tempDirs.length > 0) {
rmSync(tempDirs.pop()!, { force: true, recursive: true })
}
})

describe('resolveCodexProviderBridge', () => {
it('resolves a custom Responses provider from config.toml', () => {
const codexHomeDir = createCodexHome()
writeFileSync(
join(codexHomeDir, 'config.toml'),
`
model_provider = "openai-custom"

[model_providers.openai-custom]
name = "OpenAI Custom"
base_url = "https://example.com/v1"
env_key = "OPENAI_CUSTOM_API_KEY"
wire_api = "responses"

[model_providers.openai-custom.http_headers]
x-static = "static-value"

[model_providers.openai-custom.env_http_headers]
x-workspace = "OPENAI_WORKSPACE_ID"

[model_providers.openai-custom.query_params]
api-version = "2025-04-01"
`,
)

const bridge = resolveCodexProviderBridge({
codexHomeDir,
env: {
OPENAI_CUSTOM_API_KEY: 'sk-custom',
OPENAI_WORKSPACE_ID: 'ws_123',
},
})

expect(bridge).toEqual({
kind: 'responses',
providerId: 'openai-custom',
providerName: 'OpenAI Custom',
endpoint: 'https://example.com/v1/responses?api-version=2025-04-01',
apiKey: 'sk-custom',
headers: {
'x-static': 'static-value',
'x-workspace': 'ws_123',
},
})
})

it('uses auth.json OPENAI_API_KEY for the default openai provider', () => {
const codexHomeDir = createCodexHome()
writeFileSync(
join(codexHomeDir, 'auth.json'),
JSON.stringify({
OPENAI_API_KEY: 'sk-from-auth-file',
}),
)

const bridge = resolveCodexProviderBridge({
codexHomeDir,
env: {},
})

expect(bridge).toEqual({
kind: 'responses',
providerId: 'openai',
providerName: 'OpenAI',
endpoint: 'https://api.openai.com/v1/responses',
apiKey: 'sk-from-auth-file',
headers: {},
})
})

it('falls back to ChatGPT auth.json tokens when no API key is available', () => {
const codexHomeDir = createCodexHome()
writeFileSync(
join(codexHomeDir, 'auth.json'),
JSON.stringify({
tokens: {
access_token: 'chatgpt-token',
},
}),
)

const bridge = resolveCodexProviderBridge({
codexHomeDir,
env: {},
})

expect(bridge).toEqual({
kind: 'chatgpt',
providerId: 'openai',
accessToken: 'chatgpt-token',
})
})

it('rejects providers that do not speak the Responses API', () => {
const codexHomeDir = createCodexHome()
writeFileSync(
join(codexHomeDir, 'config.toml'),
`
model_provider = "legacy"

[model_providers.legacy]
name = "Legacy"
base_url = "https://example.com/v1"
env_key = "OPENAI_API_KEY"
wire_api = "chat"
`,
)

expect(() =>
resolveCodexProviderBridge({
codexHomeDir,
env: {
OPENAI_API_KEY: 'sk-test',
},
}),
).toThrow('wire_api = "responses"')
})
})
Loading