Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .changeset/status-line-token-breakdown.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"@perstack/core": patch
"@perstack/runtime": patch
"@perstack/tui-components": patch
---

feat: add 2-line status layout with per-type token breakdown and provider name
2 changes: 2 additions & 0 deletions packages/core/src/adapters/event-creators.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ export function createRuntimeInitEvent(
expertName: string,
version: string,
query?: string,
providerName = "unknown",
): RuntimeEvent {
return {
type: "initializeRuntime",
Expand All @@ -82,6 +83,7 @@ export function createRuntimeInitEvent(
expertName,
experts: [],
model: "local:default",
providerName,
maxRetries: 0,
timeout: 0,
query,
Expand Down
1 change: 1 addition & 0 deletions packages/core/src/schemas/runtime.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ describe("@perstack/core: createRuntimeEvent", () => {
expertName: "test-expert",
experts: ["expert-1", "expert-2"],
model: "claude-sonnet-4-20250514",
providerName: "anthropic",
maxRetries: 3,
timeout: 30000,
})
Expand Down
1 change: 1 addition & 0 deletions packages/core/src/schemas/runtime.ts
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,7 @@ type RuntimeEventPayloads = {
expertName: string
experts: string[]
model: string
providerName: string
maxRetries: number
timeout: number
query?: string
Expand Down
1 change: 1 addition & 0 deletions packages/core/src/utils/event-filter.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ describe("@perstack/core: createFilteredEventListener", () => {
timestamp: Date.now(),
jobId: "job-1",
runId: "run-1",
providerName: "anthropic",
} as PerstackEvent
const event2 = {
type: "skillStarting",
Expand Down
1 change: 1 addition & 0 deletions packages/react/src/hooks/use-run.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ describe("useRun processing logic", () => {
jobId: "job-1",
type: "initializeRuntime",
timestamp: Date.now(),
providerName: "anthropic",
} as PerstackEvent
processRunEventToActivity(state, runtimeEvent, addActivity)

Expand Down
1 change: 1 addition & 0 deletions packages/react/src/utils/event-to-activity.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,7 @@ describe("processRunEventToActivity", () => {
jobId: "job-1",
type: "initializeRuntime",
timestamp: Date.now(),
providerName: "anthropic",
} as PerstackEvent
processRunEventToActivity(state, runtimeEvent, (a) => activities.push(a))
expect(activities).toHaveLength(0)
Expand Down
1 change: 1 addition & 0 deletions packages/runtime/src/orchestration/coordinator-executor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@ export class CoordinatorExecutor {
expertName: expertToRun.name,
experts: Object.keys(experts),
model: resolvedModel,
providerName: setting.providerConfig.providerName,
maxRetries: setting.maxRetries,
timeout: setting.timeout,
query: setting.input.text,
Expand Down
6 changes: 6 additions & 0 deletions packages/tui-components/src/execution/app.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,12 @@ export const ExecutionApp = (props: ExecutionAppProps) => {
runningCount={state.runningCount}
waitingCount={state.waitingCount}
formattedTotalTokens={state.formattedTotalTokens}
formattedReasoningTokens={state.formattedReasoningTokens}
formattedInputTokens={state.formattedInputTokens}
formattedCachedInputTokens={state.formattedCachedInputTokens}
formattedOutputTokens={state.formattedOutputTokens}
providerName={state.providerName}
cacheHitRate={state.cacheHitRate}
elapsedTime={state.elapsedTime}
/>
</Box>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,12 @@ function TreeNodeLine({
{node.actionFileArg ? <Text color={colors.muted}> {node.actionFileArg}</Text> : null}
{showUsage ? (
<>
{node.model ? (
<>
<Text dimColor> · </Text>
<Text dimColor>{node.model}</Text>
</>
) : null}
<Text dimColor> · </Text>
<Text>
{usageIcon} {usagePercent}%
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,12 @@ type InterfacePanelProps = {
runningCount: number
waitingCount: number
formattedTotalTokens: string
formattedReasoningTokens: string
formattedInputTokens: string
formattedCachedInputTokens: string
formattedOutputTokens: string
providerName: string | undefined
cacheHitRate: string
elapsedTime: string
}

Expand All @@ -24,7 +30,12 @@ export const InterfacePanel = ({
delegationTreeState,
runningCount,
waitingCount,
formattedTotalTokens,
formattedReasoningTokens,
formattedInputTokens,
formattedCachedInputTokens,
formattedOutputTokens,
providerName,
cacheHitRate,
elapsedTime,
}: InterfacePanelProps): React.ReactNode => {
const { input, handleInput } = useTextInput({
Expand Down Expand Up @@ -59,8 +70,18 @@ export const InterfacePanel = ({
{waitingCount > 0 ? <Text> · {waitingCount} waiting</Text> : null}
<Text> · </Text>
<Text>{elapsedTime}</Text>
<Text> · </Text>
<Text>{formattedTotalTokens} tokens</Text>
{providerName ? <Text> · {providerName}</Text> : null}
</Text>
<Text>
<Text dimColor>R </Text>
<Text>{formattedReasoningTokens}</Text>
<Text dimColor> · I </Text>
<Text>{formattedInputTokens}</Text>
<Text dimColor> · C </Text>
<Text>{formattedCachedInputTokens}</Text>
<Text dimColor> · O </Text>
<Text>{formattedOutputTokens}</Text>
<Text dimColor> (cache: {cacheHitRate}%)</Text>
</Text>
</>
)}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -176,13 +176,15 @@ describe("processDelegationTreeEvent", () => {
expertName: "test",
experts: [],
model: "gpt-4",
providerName: "anthropic",
maxRetries: 3,
timeout: 30000,
query: "Hello world",
})
const changed = processDelegationTreeEvent(state, event)
expect(changed).toBe(true)
expect(state.jobStartedAt).toBe(1000000)
expect(state.providerName).toBe("anthropic")
})

it("does not overwrite jobStartedAt on subsequent initializeRuntime events", () => {
Expand All @@ -194,12 +196,14 @@ describe("processDelegationTreeEvent", () => {
expertName: "root",
experts: [],
model: "gpt-4",
providerName: "anthropic",
maxRetries: 3,
timeout: 30000,
query: "Hello",
}),
)
expect(state.jobStartedAt).toBe(1000000)
expect(state.providerName).toBe("anthropic")
// Second initializeRuntime from a delegate should not overwrite
processDelegationTreeEvent(state, {
id: "evt-99",
Expand All @@ -211,11 +215,13 @@ describe("processDelegationTreeEvent", () => {
expertName: "delegate",
experts: [],
model: "gpt-4",
providerName: "openai",
maxRetries: 3,
timeout: 30000,
query: "Delegated query",
} as PerstackEvent)
expect(state.jobStartedAt).toBe(1000000)
expect(state.providerName).toBe("anthropic")
})

it("handles startRun for root expert", () => {
Expand Down Expand Up @@ -290,6 +296,51 @@ describe("processDelegationTreeEvent", () => {
expect(node.totalTokens).toBe(500)
expect(node.contextWindowUsage).toBe(0.5)
})

it("accumulates per-type token breakdown across callTools and completeRun", () => {
const state = createInitialDelegationTreeState()
processDelegationTreeEvent(
state,
makeRunEvent("startRun", "run-1", "test@1.0.0", {
initialCheckpoint: makeCheckpoint({ runId: "run-1" }),
inputMessages: [],
}),
)
processDelegationTreeEvent(
state,
makeRunEvent("callTools", "run-1", "test@1.0.0", {
newMessage: {},
toolCalls: [makeToolCall("readTextFile", { path: "a.txt" })],
usage: {
inputTokens: 100,
outputTokens: 50,
reasoningTokens: 20,
cachedInputTokens: 80,
totalTokens: 250,
},
}),
)
processDelegationTreeEvent(
state,
makeRunEvent("completeRun", "run-1", "test@1.0.0", {
checkpoint: makeCheckpoint({ runId: "run-1" }),
step: { stepNumber: 1, newMessages: [], usage: baseUsage },
text: "Done",
usage: {
inputTokens: 200,
outputTokens: 30,
reasoningTokens: 10,
cachedInputTokens: 150,
totalTokens: 390,
},
}),
)
expect(state.jobReasoningTokens).toBe(30)
expect(state.jobInputTokens).toBe(300)
expect(state.jobCachedInputTokens).toBe(230)
expect(state.jobOutputTokens).toBe(80)
expect(state.jobTotalTokens).toBe(640)
})
})

describe("delegation lifecycle", () => {
Expand Down
40 changes: 40 additions & 0 deletions packages/tui-components/src/execution/hooks/use-delegation-tree.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export type DelegationTreeNode = {
actionLabel: string
actionFileArg: string | undefined
contextWindowUsage: number
model: string | undefined
parentRunId: string | undefined
childRunIds: string[]
totalTokens: number
Expand All @@ -25,6 +26,11 @@ export type DelegationTreeState = {
nodes: Map<string, DelegationTreeNode>
rootRunId: string | undefined
jobTotalTokens: number
jobReasoningTokens: number
jobInputTokens: number
jobCachedInputTokens: number
jobOutputTokens: number
providerName: string | undefined
jobStartedAt: number | undefined
runIdAliases: Map<string, string>
}
Expand All @@ -43,6 +49,11 @@ export function createInitialDelegationTreeState(): DelegationTreeState {
nodes: new Map(),
rootRunId: undefined,
jobTotalTokens: 0,
jobReasoningTokens: 0,
jobInputTokens: 0,
jobCachedInputTokens: 0,
jobOutputTokens: 0,
providerName: undefined,
jobStartedAt: undefined,
runIdAliases: new Map(),
}
Expand Down Expand Up @@ -194,6 +205,9 @@ export function processDelegationTreeEvent(
if (state.jobStartedAt === undefined) {
state.jobStartedAt = event.timestamp
}
if (state.providerName === undefined) {
state.providerName = event.providerName
}
return true
}

Expand All @@ -218,6 +232,7 @@ export function processDelegationTreeEvent(
actionLabel: "Starting...",
actionFileArg: undefined,
contextWindowUsage: initCheckpoint.contextWindowUsage ?? 0,
model: event.model,
parentRunId,
childRunIds: [],
totalTokens: 0,
Expand Down Expand Up @@ -268,6 +283,7 @@ export function processDelegationTreeEvent(
const node = state.nodes.get(nodeId)
if (node) {
node.status = "running"
node.model = event.model
if (checkpoint.contextWindowUsage !== undefined) {
node.contextWindowUsage = checkpoint.contextWindowUsage
}
Expand Down Expand Up @@ -304,6 +320,10 @@ export function processDelegationTreeEvent(
node.actionFileArg = fileArg
node.totalTokens += event.usage.totalTokens
state.jobTotalTokens += event.usage.totalTokens
state.jobReasoningTokens += event.usage.reasoningTokens
state.jobInputTokens += event.usage.inputTokens
state.jobCachedInputTokens += event.usage.cachedInputTokens
state.jobOutputTokens += event.usage.outputTokens
}
return true
}
Expand Down Expand Up @@ -331,6 +351,10 @@ export function processDelegationTreeEvent(
node.actionFileArg = undefined
node.totalTokens += event.usage.totalTokens
state.jobTotalTokens += event.usage.totalTokens
state.jobReasoningTokens += event.usage.reasoningTokens
state.jobInputTokens += event.usage.inputTokens
state.jobCachedInputTokens += event.usage.cachedInputTokens
state.jobOutputTokens += event.usage.outputTokens
if (event.checkpoint.contextWindowUsage !== undefined) {
node.contextWindowUsage = event.checkpoint.contextWindowUsage
}
Expand Down Expand Up @@ -371,6 +395,10 @@ export function processDelegationTreeEvent(
if (node) {
node.totalTokens += event.usage.totalTokens
state.jobTotalTokens += event.usage.totalTokens
state.jobReasoningTokens += event.usage.reasoningTokens
state.jobInputTokens += event.usage.inputTokens
state.jobCachedInputTokens += event.usage.cachedInputTokens
state.jobOutputTokens += event.usage.outputTokens
}
return true
}
Expand Down Expand Up @@ -400,5 +428,17 @@ export function useDelegationTree() {
processEvent,
statusCounts: getStatusCounts(state),
formattedTotalTokens: formatTokenCount(state.jobTotalTokens),
formattedReasoningTokens: formatTokenCount(state.jobReasoningTokens),
formattedInputTokens: formatTokenCount(state.jobInputTokens),
formattedCachedInputTokens: formatTokenCount(state.jobCachedInputTokens),
formattedOutputTokens: formatTokenCount(state.jobOutputTokens),
providerName: state.providerName,
cacheHitRate:
state.jobInputTokens + state.jobCachedInputTokens > 0
? (
(state.jobCachedInputTokens / (state.jobInputTokens + state.jobCachedInputTokens)) *
100
).toFixed(2)
: "0.00",
}
}
12 changes: 12 additions & 0 deletions packages/tui-components/src/execution/hooks/use-execution-state.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,12 @@ export type ExecutionState = {
runningCount: number
waitingCount: number
formattedTotalTokens: string
formattedReasoningTokens: string
formattedInputTokens: string
formattedCachedInputTokens: string
formattedOutputTokens: string
providerName: string | undefined
cacheHitRate: string
elapsedTime: string
}

Expand Down Expand Up @@ -173,6 +179,12 @@ export const useExecutionState = (options: UseExecutionStateOptions): ExecutionS
runningCount: delegationTree.statusCounts.running,
waitingCount: delegationTree.statusCounts.waiting,
formattedTotalTokens: delegationTree.formattedTotalTokens,
formattedReasoningTokens: delegationTree.formattedReasoningTokens,
formattedInputTokens: delegationTree.formattedInputTokens,
formattedCachedInputTokens: delegationTree.formattedCachedInputTokens,
formattedOutputTokens: delegationTree.formattedOutputTokens,
providerName: delegationTree.providerName,
cacheHitRate: delegationTree.cacheHitRate,
elapsedTime,
}
}