Skip to content

Commit 2399d1f

Browse files
fix: fallback token accounting when api tokens missing
1 parent 8896aaa commit 2399d1f

File tree

1 file changed

+24
-8
lines changed

1 file changed

+24
-8
lines changed

lib/tools/compress.ts

Lines changed: 24 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ import { getCurrentParams, countAllMessageTokens, countTokens } from "../strateg
88
import type { AssistantMessage } from "@opencode-ai/sdk/v2"
99
import { findStringInMessages, collectToolIdsInRange, collectMessageIdsInRange } from "./utils"
1010
import { sendCompressNotification } from "../ui/notification"
11-
import { cacheSystemPromptTokens } from "../ui/utils"
11+
import { buildCompressionGraphData, cacheSystemPromptTokens } from "../ui/utils"
1212
import { prune as applyPruneTransforms } from "../messages/prune"
1313
import { clog, C } from "../compress-logger"
1414

@@ -350,21 +350,37 @@ export function createCompressTool(ctx: ToolContext): ReturnType<typeof tool> {
350350

351351
// Use API-reported tokens from last assistant message (matches OpenCode UI)
352352
let totalSessionTokens = 0
353+
let hasApiTokenMetadata = false
353354
for (let i = messages.length - 1; i >= 0; i--) {
354355
if (messages[i].info.role === "assistant") {
355356
const info = messages[i].info as AssistantMessage
356-
if (info.tokens?.output > 0) {
357-
totalSessionTokens =
358-
(info.tokens?.input || 0) +
359-
(info.tokens?.output || 0) +
360-
(info.tokens?.reasoning || 0) +
361-
(info.tokens?.cache?.read || 0) +
362-
(info.tokens?.cache?.write || 0)
357+
const input = info.tokens?.input || 0
358+
const output = info.tokens?.output || 0
359+
const reasoning = info.tokens?.reasoning || 0
360+
const cacheRead = info.tokens?.cache?.read || 0
361+
const cacheWrite = info.tokens?.cache?.write || 0
362+
const total = input + output + reasoning + cacheRead + cacheWrite
363+
if (total > 0) {
364+
totalSessionTokens = total
365+
hasApiTokenMetadata = true
363366
break
364367
}
365368
}
366369
}
367370

371+
if (!hasApiTokenMetadata) {
372+
const estimated = buildCompressionGraphData(
373+
state,
374+
messages,
375+
new Set<string>(),
376+
new Set<string>(),
377+
)
378+
totalSessionTokens = estimated.totalSessionTokens
379+
clog.info(C.COMPRESS, `Token Accounting Fallback`, {
380+
totalSessionTokens,
381+
})
382+
}
383+
368384
// Cap estimate — countAllMessageTokens can inflate beyond API count
369385
if (totalSessionTokens > 0 && estimatedCompressedTokens > totalSessionTokens) {
370386
estimatedCompressedTokens = Math.round(totalSessionTokens * 0.95)

0 commit comments

Comments
 (0)