Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions src/buddy/__tests__/companion.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import { describe, expect, test } from 'bun:test'
import { inferLegacyCompanionBones } from '../companion.js'

describe('inferLegacyCompanionBones', () => {
test('infers species and rarity from legacy seedless companion text', () => {
expect(
inferLegacyCompanionBones({
name: 'Biscuit',
personality: 'A common mushroom of few words.',
}),
).toEqual({
species: 'mushroom',
rarity: 'common',
})
})

test('does not override seeded companions', () => {
expect(
inferLegacyCompanionBones({
name: 'Spore',
personality: 'A common mushroom of few words.',
seed: 'rehatch-1',
}),
).toEqual({})
})
})
29 changes: 27 additions & 2 deletions src/buddy/companion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { getGlobalConfig } from '../utils/config.js'
import {
type Companion,
type CompanionBones,
type CompanionSoul,
EYES,
HATS,
RARITIES,
Expand Down Expand Up @@ -125,12 +126,36 @@ export function companionUserId(): string {
return config.oauthAccount?.accountUuid ?? config.userID ?? 'anon'
}

const WORD_BOUNDARY = '[^a-z0-9]+'

function hasWord(text: string, word: string): boolean {
return new RegExp(`(^|${WORD_BOUNDARY})${word}($|${WORD_BOUNDARY})`).test(
text,
)
}

export function inferLegacyCompanionBones(
stored: CompanionSoul,
): Partial<Pick<CompanionBones, 'species' | 'rarity'>> {
if (stored.seed) return {}
const text = `${stored.name} ${stored.personality}`.toLowerCase()
const inferred: Partial<Pick<CompanionBones, 'species' | 'rarity'>> = {}
const species = SPECIES.find(species => hasWord(text, species))
const rarity = RARITIES.find(rarity => hasWord(text, rarity))
if (species) inferred.species = species
if (rarity) inferred.rarity = rarity
return inferred
}

// Regenerate bones from seed or userId, merge with stored soul.
export function getCompanion(): Companion | undefined {
const stored = getGlobalConfig().companion
if (!stored) return undefined
const seed = stored.seed ?? companionUserId()
const { bones } = rollWithSeed(seed)
// bones last so stale bones fields in old-format configs get overridden
return { ...stored, ...bones }
const legacyBones = inferLegacyCompanionBones(stored)
// Seeded companions use regenerated bones. Legacy seedless companions may
// have species/rarity embedded in their generated soul text; keep that
// visible identity coherent when the userId-derived roll drifts.
return { ...stored, ...bones, ...legacyBones }
}
2 changes: 1 addition & 1 deletion src/commands/effort/effort.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ export async function call(onDone: LocalJSXCommandOnDone, _context: unknown, arg

if (COMMON_HELP_ARGS.includes(args)) {
onDone(
'Usage: /effort [low|medium|high|max|auto]\n\nEffort levels:\n- low: Quick, straightforward implementation\n- medium: Balanced approach with standard testing\n- high: Comprehensive implementation with extensive testing\n- max: Maximum capability with deepest reasoning (Opus 4.6/4.7, DeepSeek V4 Pro)\n- auto: Use the default effort level for your model',
'Usage: /effort [low|medium|high|xhigh|max|auto]\n\nEffort levels:\n- low: Quick, straightforward implementation\n- medium: Balanced approach with standard testing\n- high: Comprehensive implementation with extensive testing\n- xhigh: Extra high reasoning for supported models, including ChatGPT Codex models\n- max: Maximum capability with deepest reasoning where supported (Opus 4.6/4.7, DeepSeek V4 Pro); maps to xhigh for ChatGPT Codex models\n- auto: Use the default effort level for your model',
);
return;
}
Expand Down
2 changes: 1 addition & 1 deletion src/commands/logout/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { isEnvTruthy } from '../../utils/envUtils.js'
export default {
type: 'local-jsx',
name: 'logout',
description: 'Sign out from your Anthropic account',
description: 'Sign out from your configured account',
isEnabled: () => !isEnvTruthy(process.env.DISABLE_LOGOUT_COMMAND),
load: () => import('./logout.js'),
} satisfies Command
22 changes: 21 additions & 1 deletion src/commands/logout/logout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,13 @@ import { getGroveNoticeConfig, getGroveSettings } from '../../services/api/grove
import { clearPolicyLimitsCache } from '../../services/policyLimits/index.js';
// flushTelemetry is loaded lazily to avoid pulling in ~1.1MB of OpenTelemetry at startup
import { clearRemoteManagedSettingsCache } from '../../services/remoteManagedSettings/index.js';
import { removeChatGPTAuth } from '../../services/api/openai/chatgptAuth.js';
import { getClaudeAIOAuthTokens, removeApiKey } from '../../utils/auth.js';
import { clearBetasCaches } from '../../utils/betas.js';
import { saveGlobalConfig } from '../../utils/config.js';
import { gracefulShutdownSync } from '../../utils/gracefulShutdown.js';
import { getSecureStorage } from '../../utils/secureStorage/index.js';
import { getSettingsForSource, updateSettingsForSource } from '../../utils/settings/settings.js';
import { clearToolSchemaCache } from '../../utils/toolSchemaCache.js';
import { resetUserCache } from '../../utils/user.js';

Expand All @@ -20,6 +22,8 @@ export async function performLogout({ clearOnboarding = false }): Promise<void>
await flushTelemetry();

await removeApiKey();
await removeChatGPTAuth();
clearChatGPTSettingsAuthMode();

// Wipe all secure storage data on logout
const secureStorage = getSecureStorage();
Expand All @@ -44,6 +48,22 @@ export async function performLogout({ clearOnboarding = false }): Promise<void>
});
}

function clearChatGPTSettingsAuthMode(): void {
delete process.env.OPENAI_AUTH_MODE;
const userSettings = getSettingsForSource('userSettings') ?? {};
const env = userSettings.env ?? {};
const hasOpenAICompatibleConfig =
Boolean(env.OPENAI_API_KEY ?? process.env.OPENAI_API_KEY) &&
Boolean(env.OPENAI_BASE_URL ?? process.env.OPENAI_BASE_URL);
const settingsUpdate: Parameters<typeof updateSettingsForSource>[1] = {
...(userSettings.modelType === 'openai' && !hasOpenAICompatibleConfig ? { modelType: undefined } : {}),
env: {
OPENAI_AUTH_MODE: undefined,
} as unknown as Record<string, string>,
};
updateSettingsForSource('userSettings', settingsUpdate);
}

// clearing anything memoized that must be invalidated when user/session/auth changes
export async function clearAuthRelatedCaches(): Promise<void> {
// Clear the OAuth token cache
Expand All @@ -70,7 +90,7 @@ export async function clearAuthRelatedCaches(): Promise<void> {
export async function call(): Promise<React.ReactNode> {
await performLogout({ clearOnboarding: true });

const message = <Text>Successfully logged out from your Anthropic account.</Text>;
const message = <Text>Successfully logged out.</Text>;

setTimeout(() => {
gracefulShutdownSync(0, 'logout');
Expand Down
3 changes: 2 additions & 1 deletion src/commands/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,10 @@ const call: LocalCommandCall = async (args, _context) => {
// Check env vars when switching to openai (including settings.env)
if (arg === 'openai') {
const mergedEnv = getMergedEnv()
const hasChatGPTAuth = mergedEnv.OPENAI_AUTH_MODE === 'chatgpt'
const hasKey = !!mergedEnv.OPENAI_API_KEY
const hasUrl = !!mergedEnv.OPENAI_BASE_URL
if (!hasKey || !hasUrl) {
if (!hasChatGPTAuth && (!hasKey || !hasUrl)) {
updateSettingsForSource('userSettings', { modelType: 'openai' })
const missing = []
if (!hasKey) missing.push('OPENAI_API_KEY')
Expand Down
135 changes: 128 additions & 7 deletions src/components/ConsoleOAuthFlow.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,14 @@ import { setClipboard, useTerminalNotification, Box, Link, Text, KeyboardShortcu
import { useKeybinding } from '../keybindings/useKeybinding.js';
import { getSSLErrorHint } from '@ant/model-provider';
import { sendNotification } from '../services/notifier.js';
import {
completeChatGPTDeviceLogin,
requestChatGPTDeviceCode,
type ChatGPTDeviceCode,
} from '../services/api/openai/chatgptAuth.js';
import { OAuthService } from '../services/oauth/index.js';
import { getOauthAccountInfo, validateForceLoginOrg } from '../utils/auth.js';

import { openBrowser } from '../utils/browser.js';
import { logError } from '../utils/log.js';
import { getSettings_DEPRECATED, updateSettingsForSource } from '../utils/settings/settings.js';
import { Select } from './CustomSelect/select.js';
Expand Down Expand Up @@ -46,6 +51,11 @@ type OAuthStatus =
opusModel: string;
activeField: 'base_url' | 'api_key' | 'haiku_model' | 'sonnet_model' | 'opus_model';
} // OpenAI Chat Completions API platform
| {
state: 'chatgpt_subscription';
phase: 'requesting' | 'waiting';
deviceCode?: ChatGPTDeviceCode;
} // ChatGPT account subscription via Codex OAuth device flow
| {
state: 'gemini_api';
baseUrl: string;
Expand Down Expand Up @@ -445,6 +455,15 @@ function OAuthStatusMessage({
),
value: 'openai_chat_api',
},
{
label: (
<Text>
ChatGPT account with subscription · <Text dimColor>Plus, Pro, Business, Edu, or Enterprise</Text>
{'\n'}
</Text>
),
value: 'chatgpt_subscription',
},
{
label: (
<Text>
Expand Down Expand Up @@ -515,6 +534,12 @@ function OAuthStatusMessage({
opusModel: process.env.OPENAI_DEFAULT_OPUS_MODEL ?? '',
activeField: 'base_url',
});
} else if (value === 'chatgpt_subscription') {
logEvent('tengu_chatgpt_subscription_selected', {});
setOAuthStatus({
state: 'chatgpt_subscription',
phase: 'requesting',
});
} else if (value === 'gemini_api') {
logEvent('tengu_gemini_api_selected', {});
setOAuthStatus({
Expand Down Expand Up @@ -807,7 +832,9 @@ function OAuthStatusMessage({

const doOpenAISave = useCallback(() => {
const finalVals = { ...openaiDisplayValues, [activeField]: openaiInputValue };
const env: Record<string, string> = {};
const env: Record<string, string | undefined> = {
OPENAI_AUTH_MODE: undefined,
};

// Validate base_url if provided
if (finalVals.base_url) {
Expand Down Expand Up @@ -836,10 +863,11 @@ function OAuthStatusMessage({
if (finalVals.haiku_model) env.OPENAI_DEFAULT_HAIKU_MODEL = finalVals.haiku_model;
if (finalVals.sonnet_model) env.OPENAI_DEFAULT_SONNET_MODEL = finalVals.sonnet_model;
if (finalVals.opus_model) env.OPENAI_DEFAULT_OPUS_MODEL = finalVals.opus_model;
const { error } = updateSettingsForSource('userSettings', {
modelType: 'openai' as any,
env,
} as any);
const settingsUpdate: Parameters<typeof updateSettingsForSource>[1] = {
modelType: 'openai',
env: env as unknown as Record<string, string>,
};
const { error } = updateSettingsForSource('userSettings', settingsUpdate);
if (error) {
setOAuthStatus({
state: 'error',
Expand All @@ -855,7 +883,13 @@ function OAuthStatusMessage({
},
});
} else {
for (const [k, v] of Object.entries(env)) process.env[k] = v;
for (const [k, v] of Object.entries(env)) {
if (v === undefined) {
delete process.env[k];
} else {
process.env[k] = v;
}
}
setOAuthStatus({ state: 'success' });
void onDone();
}
Expand Down Expand Up @@ -953,6 +987,93 @@ function OAuthStatusMessage({
);
}

case 'chatgpt_subscription': {
const status = oauthStatus as {
state: 'chatgpt_subscription';
phase: 'requesting' | 'waiting';
deviceCode?: ChatGPTDeviceCode;
};
const startedRef = useRef(false);

useEffect(() => {
if (startedRef.current) return;
startedRef.current = true;
let cancelled = false;
const controller = new AbortController();
async function runLogin() {
try {
const deviceCode = await requestChatGPTDeviceCode();
if (cancelled) return;
setOAuthStatus({
state: 'chatgpt_subscription',
phase: 'waiting',
deviceCode,
});
void openBrowser(deviceCode.verificationUrl);
await completeChatGPTDeviceLogin(deviceCode, controller.signal);
Comment on lines +998 to +1013
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor | ⚡ Quick win

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Verify whether requestChatGPTDeviceCode accepts an AbortSignal parameter.
fd -t f 'chatgptAuth.ts' --exec rg -nP -C2 'export\s+(async\s+)?function\s+requestChatGPTDeviceCode' {} \;

Repository: claude-code-best/claude-code

Length of output: 229


🏁 Script executed:

#!/bin/bash
# Get the full implementation of requestChatGPTDeviceCode
fd -t f 'chatgptAuth.ts' --exec cat -n {} \; | head -250 | tail -100

Repository: claude-code-best/claude-code

Length of output: 3730


requestChatGPTDeviceCode() doesn't support abort signal — initial device-code request can't be cancelled.

The function doesn't accept an AbortSignal parameter, so if the request hangs (slow network, captive portal, server outage), unmount/cancel won't abort the in-flight fetch. Unlike pollForAuthorizationCode, which already accepts a signal, the device code request lacks cancellation support.

To fix, add signal parameter to both requestChatGPTDeviceCode and the postJSON helper, then pass controller.signal from line 1006:

-            const deviceCode = await requestChatGPTDeviceCode();
+            const deviceCode = await requestChatGPTDeviceCode(controller.signal);

Also update the function signatures in chatgptAuth.ts to accept and forward the signal.

🤖 Prompt for AI Agents
Verify each finding against current code. Fix only still-valid issues, skip the
rest with a brief reason, keep changes minimal, and validate.

In `@src/components/ConsoleOAuthFlow.tsx` around lines 999 - 1014, The
requestChatGPTDeviceCode call in ConsoleOAuthFlow cannot be aborted because
requestChatGPTDeviceCode and the shared postJSON helper don't accept an
AbortSignal; update requestChatGPTDeviceCode (in chatgptAuth.ts) to take an
optional signal parameter and forward it into postJSON, update postJSON to
accept and pass the signal to fetch, then pass controller.signal from
ConsoleOAuthFlow where requestChatGPTDeviceCode is invoked (mirroring how
pollForAuthorizationCode already accepts a signal).

if (cancelled) return;
const env: Record<string, string> = {
OPENAI_AUTH_MODE: 'chatgpt',
};
const settingsUpdate: Parameters<typeof updateSettingsForSource>[1] = {
modelType: 'openai',
env,
};
const { error } = updateSettingsForSource('userSettings', settingsUpdate);
if (error) {
throw new Error('Failed to save settings. Please try again.');
}
for (const [k, v] of Object.entries(env)) process.env[k] = v;
setOAuthStatus({ state: 'success' });
void onDone();
} catch (err) {
if (cancelled) return;
setOAuthStatus({
state: 'error',
message: (err as Error).message,
toRetry: {
state: 'chatgpt_subscription',
phase: 'requesting',
},
});
}
}
void runLogin();
return () => {
cancelled = true;
controller.abort();
};
}, [setOAuthStatus, onDone]);

return (
<Box flexDirection="column" gap={1}>
<Text bold>ChatGPT Account Setup</Text>
{status.phase === 'requesting' && (
<Box>
<Spinner />
<Text>Requesting sign-in code…</Text>
</Box>
)}
{status.phase === 'waiting' && status.deviceCode && (
<Box flexDirection="column" gap={1}>
<Text>Open this link and sign in with your ChatGPT account:</Text>
<Link url={status.deviceCode.verificationUrl}>
<Text dimColor>{status.deviceCode.verificationUrl}</Text>
</Link>
<Text>
Enter code: <Text bold>{status.deviceCode.userCode}</Text>
</Text>
<Box>
<Spinner />
<Text>Waiting for ChatGPT authorization…</Text>
</Box>
</Box>
)}
<Text dimColor>Esc to go back. Device codes expire after 15 minutes.</Text>
</Box>
);
}
Comment on lines +1048 to +1075
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major | ⚡ Quick win

Missing confirm:no (Esc) keybinding makes the displayed "Esc to go back" instruction non-functional.

The other login sub-cases (custom_platform, openai_chat_api, gemini_api) each register a useKeybinding('confirm:no', () => setOAuthStatus({ state: 'idle' }), { context: 'Confirmation' }) so the user can return to the menu while waiting. This case shows the same instruction at line 1073 but never registers the keybinding, so Esc does nothing during the requesting/waiting phases. Users who started this flow accidentally have no way to back out other than killing the process.

🛠️ Proposed fix
       useEffect(() => {
         ...
       }, [setOAuthStatus, onDone]);

+      useKeybinding(
+        'confirm:no',
+        () => {
+          setOAuthStatus({ state: 'idle' });
+        },
+        { context: 'Confirmation' },
+      );
+
       return (
         <Box flexDirection="column" gap={1}>
           <Text bold>ChatGPT Account Setup</Text>
🤖 Prompt for AI Agents
Verify each finding against current code. Fix only still-valid issues, skip the
rest with a brief reason, keep changes minimal, and validate.

In `@src/components/ConsoleOAuthFlow.tsx` around lines 1049 - 1076, In
ConsoleOAuthFlow.tsx add the missing Esc keybinding for the ChatGPT device-code
flow: when status.phase is 'requesting' or 'waiting' (the same branch that
renders the "Esc to go back" text) register useKeybinding('confirm:no', () =>
setOAuthStatus({ state: 'idle' }), { context: 'Confirmation' }) so pressing Esc
returns to idle; place this alongside the other flows' keybinding registrations
inside the component that renders the device-code UI (refer to status.phase,
status.deviceCode, and setOAuthStatus to locate the correct spot).


case 'gemini_api': {
type GeminiField = 'base_url' | 'api_key' | 'haiku_model' | 'sonnet_model' | 'opus_model';
const GEMINI_FIELDS: GeminiField[] = ['base_url', 'api_key', 'haiku_model', 'sonnet_model', 'opus_model'];
Expand Down
Loading