Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions src/services/api/grok/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import type {
ChatCompletionCreateParamsStreaming,
} from 'openai/resources/chat/completions/completions.mjs'
import { getGrokClient } from './client.js'
import { updateOpenAIUsage } from '../openai/openaiShared.js'
import {
anthropicMessagesToOpenAI,
anthropicToolsToOpenAI,
Expand Down Expand Up @@ -136,7 +137,7 @@ export async function* queryModelGrok(
partialMessage = (event as any).message
ttftMs = Date.now() - start
if ((event as any).message?.usage) {
usage = { ...usage, ...(event as any).message.usage }
usage = updateOpenAIUsage(usage, (event as any).message.usage)
}
break
}
Expand Down Expand Up @@ -192,7 +193,7 @@ export async function* queryModelGrok(
case 'message_delta': {
const deltaUsage = (event as any).usage
if (deltaUsage) {
usage = { ...usage, ...deltaUsage }
usage = updateOpenAIUsage(usage, deltaUsage)
}
break
}
Expand Down
3 changes: 2 additions & 1 deletion src/services/api/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import type {
import type { AgentId } from '../../../types/ids.js'
import type { Tools } from '../../../Tool.js'
import { getOpenAIClient } from './client.js'
import { updateOpenAIUsage } from './openaiShared.js'
import {
anthropicMessagesToOpenAI,
resolveOpenAIModel,
Expand Down Expand Up @@ -449,7 +450,7 @@ export async function* queryModelOpenAI(
case 'message_delta': {
const deltaUsage = (event as any).usage
if (deltaUsage) {
usage = { ...usage, ...deltaUsage }
usage = updateOpenAIUsage(usage, deltaUsage)
}
if ((event as any).delta?.stop_reason != null) {
stopReason = (event as any).delta.stop_reason
Expand Down
46 changes: 46 additions & 0 deletions src/services/api/openai/openaiShared.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/**
* Shared utilities for OpenAI-compatible API paths.
*
* Both the OpenAI path (queryModelOpenAI) and Grok path (queryModelGrok) use
* the same adapters (openaiStreamAdapter, openaiConvertMessages), so the event
* processing logic should be shared rather than duplicated.
*/

/**
* Merge a delta usage into the accumulated usage, preserving cache-related
* fields from previous values when the delta carries explicit zeroes or
* undefined values.
*
* Mirrors updateUsage() in claude.ts: a future adapter change that omits
* cache fields from certain streaming events should not silently zero the
* accumulated counters.
*/
export function updateOpenAIUsage(
current: {
input_tokens: number
output_tokens: number
cache_creation_input_tokens: number
cache_read_input_tokens: number
},
delta: {
input_tokens?: number
output_tokens?: number
cache_creation_input_tokens?: number
cache_read_input_tokens?: number
},
): typeof current {
return {
input_tokens: delta.input_tokens ?? current.input_tokens,
output_tokens: delta.output_tokens ?? current.output_tokens,
cache_creation_input_tokens:
delta.cache_creation_input_tokens !== undefined &&
delta.cache_creation_input_tokens > 0
? delta.cache_creation_input_tokens
: current.cache_creation_input_tokens,
cache_read_input_tokens:
delta.cache_read_input_tokens !== undefined &&
delta.cache_read_input_tokens > 0
? delta.cache_read_input_tokens
: current.cache_read_input_tokens,
}
}
4 changes: 3 additions & 1 deletion src/services/api/openai/requestBody.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ export function isOpenAIThinkingEnabled(model: string): boolean {
if (isEnvDefinedFalsy(process.env.OPENAI_ENABLE_THINKING)) return false
// Explicit enable
if (isEnvTruthy(process.env.OPENAI_ENABLE_THINKING)) return true
// Auto-detect from model name (DeepSeek and MiMo models support thinking mode)
// Auto-detect from model name (DeepSeek and MiMo models support thinking mode).
// Grok is intentionally excluded — Grok reasoning models reason automatically
// and do NOT require thinking/enable_thinking request body parameters.
const modelLower = model.toLowerCase()
return modelLower.includes('deepseek') || modelLower.includes('mimo')
}
Expand Down