diff --git a/docs/configuration.md b/docs/configuration.md index 771fb5d..55337ab 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -116,6 +116,14 @@ Mode-derived runtime defaults when omitted: - Enables codex-rs compact prompt + `summary_prefix` handoff behavior for OpenAI sessions. - Mode defaults: `true` in `codex`, `false` in `native`. - Explicit boolean value overrides mode default. +- `runtime.shareableDebug: boolean` + - Writes a bounded privacy-first summary log to `/logs/codex-plugin/shareable-debug.jsonl`. + - Persists a rolling crash-tolerant event buffer under `/logs/codex-plugin/shareable-debug-state/segments/`. + - On trigger conditions (`401`, `403`, `429`, auth failures, account-switch retry after failure, and synthetic plugin fatal errors), writes a dedicated incident file under `/logs/codex-plugin/shareable-debug-state/incidents/`. + - Uses `/logs/codex-plugin/shareable-debug-state/incident-state.json` to resume or seal interrupted incident capture after restart. + - Sensitive identifiers are pseudonymized per process/log bundle instead of logged raw. + - Request bodies, tokens, cookies, OAuth secrets, raw emails/account IDs/session IDs, and raw `prompt_cache_key` values are never persisted by this mode. + - When enabled, request snapshot logging is suppressed even if `runtime.headerSnapshots` or `runtime.headerTransformDebug` are also set. - `runtime.headerSnapshots: boolean` - Writes before/after request header snapshots to debug logs. - Custom snapshot metadata is stored under a nested `meta` object to prevent collisions with reserved top-level fields. @@ -316,6 +324,7 @@ Advanced path: ### Debug/OAuth controls - `OPENCODE_OPENAI_MULTI_DEBUG=1`: plugin debug logs. +- `OPENCODE_OPENAI_MULTI_SHAREABLE_DEBUG=1`: privacy-first shareable structured debug log. - `CODEX_IN_VIVO=1`: enables live quota probe tests. - `DEBUG_CODEX_PLUGIN=1`: alternate debug flag. - `CODEX_AUTH_DEBUG=1`: verbose OAuth lifecycle logging (`oauth-lifecycle.log`). diff --git a/docs/privacy.md b/docs/privacy.md index 4e3a16a..b819bce 100644 --- a/docs/privacy.md +++ b/docs/privacy.md @@ -38,6 +38,14 @@ - pinned prompt cache metadata (`lastChecked`, URLs, ETags) - `/logs/codex-plugin/` (optional) - request/response snapshot logs when enabled +- `/logs/codex-plugin/shareable-debug.jsonl` (optional) + - bounded privacy-first structured debug summary log with per-process pseudonyms for account/session correlation +- `/logs/codex-plugin/shareable-debug-state/segments/` (optional) + - rolling crash-tolerant pseudonymized event buffer used for pre-incident context +- `/logs/codex-plugin/shareable-debug-state/incidents/` (optional) + - dedicated pseudonymized before/after incident captures around error triggers +- `/logs/codex-plugin/shareable-debug-state/incident-state.json` (optional) + - recovery manifest for interrupted incident capture - `/logs/codex-plugin/oauth-lifecycle.log` (optional) - OAuth lifecycle debug log when `CODEX_AUTH_DEBUG` is enabled @@ -74,6 +82,9 @@ Recommended additional local ignore patterns (not auto-managed by plugin): - Snapshot writer redacts sensitive auth headers/tokens before persistence. - Snapshot writer also redacts sensitive account/session metadata keys and sensitive URL query values. - Live-headers snapshots redact `prompt_cache_key` values. +- Shareable debug mode never writes raw tokens, cookies, OAuth secrets, raw emails/account IDs/identity keys/session IDs, raw `prompt_cache_key` values, or raw request bodies. +- Shareable debug pseudonyms are stable only within a single process/log bundle. +- Shareable debug incident capture keeps a rolling on-disk pseudonymized buffer so before-error context can survive process exit or crash. - If request body capture is enabled, prompt/tool payload content may still be written; use short-lived debugging windows only. - OAuth debug lifecycle logs rotate at a configurable size cap. diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index e592779..127d618 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -143,6 +143,14 @@ Optional OAuth debug log rotation: - `CODEX_AUTH_DEBUG_MAX_BYTES` +Safer issue-reporting path: + +- `OPENCODE_OPENAI_MULTI_SHAREABLE_DEBUG=1` + - Writes a bounded `shareable-debug.jsonl` summary log under the plugin log directory. + - Keeps a rolling pseudonymized buffer under `shareable-debug-state/segments/`. + - On auth/fatal trigger conditions, writes dedicated incident captures under `shareable-debug-state/incidents/`. + - Use this when you need logs that are intended to be safe to paste into a public issue. + Sensitive auth headers/tokens are redacted in snapshot logs. Sensitive account/session metadata keys and URL query values are redacted as well. If request body capture is enabled, prompt/tool payload content may still be present. diff --git a/index.ts b/index.ts index 793f797..245052b 100644 --- a/index.ts +++ b/index.ts @@ -20,6 +20,7 @@ import { getHeaderSnapshotBodiesEnabled, getHeaderTransformDebugEnabled, getHeaderSnapshotsEnabled, + getShareableDebugEnabled, getOrchestratorSubagentsEnabled, getMode, getRemapDeveloperMessagesToUserEnabled, @@ -153,6 +154,7 @@ export const OpenAIMultiAuthPlugin: Plugin = async (input) => { compatInputSanitizer: getCompatInputSanitizerEnabled(cfg), remapDeveloperMessagesToUser: getRemapDeveloperMessagesToUserEnabled(cfg), codexCompactionOverride: getCodexCompactionOverrideEnabled(cfg), + shareableDebug: getShareableDebugEnabled(cfg), headerSnapshots: getHeaderSnapshotsEnabled(cfg), headerSnapshotBodies: getHeaderSnapshotBodiesEnabled(cfg), headerTransformDebug: getHeaderTransformDebugEnabled(cfg), diff --git a/lib/codex-native.ts b/lib/codex-native.ts index 49bb3ef..8528ece 100644 --- a/lib/codex-native.ts +++ b/lib/codex-native.ts @@ -59,6 +59,7 @@ import { import { createSessionAffinityRuntimeState } from "./codex-native/session-affinity-state.js" import { initializeCatalogSync, selectCatalogAuthCandidate } from "./codex-native/catalog-sync.js" import { createOpenAIFetchHandler } from "./codex-native/openai-loader-fetch.js" +import { createShareableDebugLogger } from "./shareable-debug.js" export { browserOpenInvocationFor } from "./codex-native/browser.js" export { upsertAccount } from "./codex-native/accounts.js" export { extractAccountId, extractAccountIdFromClaims, refreshAccessToken } from "./codex-native/oauth-utils.js" @@ -163,6 +164,7 @@ export type CodexAuthPluginOptions = { compatInputSanitizer?: boolean remapDeveloperMessagesToUser?: boolean codexCompactionOverride?: boolean + shareableDebug?: boolean headerSnapshots?: boolean headerSnapshotBodies?: boolean headerTransformDebug?: boolean @@ -355,11 +357,22 @@ export async function CodexAuthPlugin(input: PluginInput, opts: CodexAuthPluginO ...(spoofMode === "native" ? { openaiBeta: "responses=experimental" } : {}) } } + const shareableDebugEnabled = opts.shareableDebug === true + if (shareableDebugEnabled && (opts.headerSnapshots === true || opts.headerTransformDebug === true)) { + opts.log?.warn("shareable debug disables request snapshot logging", { + headerSnapshots: opts.headerSnapshots === true, + headerTransformDebug: opts.headerTransformDebug === true + }) + } const requestSnapshots = createRequestSnapshots({ - enabled: opts.headerSnapshots === true || opts.headerTransformDebug === true, + enabled: !shareableDebugEnabled && (opts.headerSnapshots === true || opts.headerTransformDebug === true), captureBodies: opts.headerSnapshotBodies === true, log: opts.log }) + const shareableDebug = createShareableDebugLogger({ + enabled: shareableDebugEnabled, + log: opts.log + }) const catalogModelsByScope = new Map() const catalogRequestMetadataBySession = new Map< string, @@ -523,6 +536,7 @@ export async function CodexAuthPlugin(input: PluginInput, opts: CodexAuthPluginO configuredRotationStrategy: opts.rotationStrategy, headerTransformDebug: opts.headerTransformDebug === true, compatInputSanitizerEnabled: opts.compatInputSanitizer === true, + shareableDebug, internalCatalogScopeHeader: INTERNAL_CATALOG_SCOPE_HEADER, internalSelectedModelHeader: INTERNAL_SELECTED_MODEL_HEADER, internalCollaborationModeHeader: INTERNAL_COLLABORATION_MODE_HEADER, diff --git a/lib/codex-native/acquire-auth.ts b/lib/codex-native/acquire-auth.ts index 65c00f1..6358a04 100644 --- a/lib/codex-native/acquire-auth.ts +++ b/lib/codex-native/acquire-auth.ts @@ -8,6 +8,7 @@ import type { AccountRecord, OpenAIAuthMode, RotationStrategy } from "../types.j import { parseJwtClaims } from "../claims.js" import { formatAccountLabel } from "./accounts.js" import { extractAccountId, refreshAccessToken, type OAuthTokenRefreshError } from "./oauth-utils.js" +import type { ShareableDebugLogger } from "../shareable-debug.js" const AUTH_REFRESH_FAILURE_COOLDOWN_MS = 30_000 const AUTH_REFRESH_LEASE_MS = 30_000 @@ -73,6 +74,7 @@ export type AcquireOpenAIAuthInput = { pidOffsetEnabled: boolean configuredRotationStrategy?: RotationStrategy log?: Logger + shareableDebug?: ShareableDebugLogger } export function createAcquireOpenAIAuthInputDefaults(): { @@ -114,6 +116,17 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< let totalAccounts = 0 let rotationLogged = false let lastSelectionTrace: AccountSelectionTrace | undefined + const emitAuthFailure = async (details: { outcome: string; status: number; waitMs?: number }): Promise => { + await input.shareableDebug?.emitAuthFailure({ + authMode: input.authMode, + outcome: details.outcome, + status: details.status, + waitMs: details.waitMs, + sessionKey: input.context?.sessionKey, + selectedIdentityKey: lastSelectionTrace?.selectedIdentityKey, + activeIdentityKey: lastSelectionTrace?.activeIdentityKey + }) + } try { while (true) { @@ -163,6 +176,14 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< mode: input.authMode, sessionKey: input.context?.sessionKey ?? null }) + await input.shareableDebug?.emitRotationBegin({ + authMode: input.authMode, + rotationStrategy, + activeIdentityKey: domain.activeIdentityKey, + totalAccounts: domain.accounts.length, + enabledAccounts: enabled.length, + sessionKey: input.context?.sessionKey ?? null + }) rotationLogged = true } @@ -211,6 +232,20 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< ...(event.sessionKey ? { sessionKey: event.sessionKey } : null) } input.log?.debug("rotation decision", event) + void input.shareableDebug?.emitRotationDecision({ + authMode: input.authMode, + rotationStrategy: event.strategy, + decision: event.decision, + totalCount: event.totalCount, + disabledCount: event.disabledCount, + cooldownCount: event.cooldownCount, + refreshLeaseCount: event.refreshLeaseCount, + eligibleCount: event.eligibleCount, + attemptedCount: attempted.size + (event.selectedIdentityKey ? 1 : 0), + selectedIdentityKey: event.selectedIdentityKey, + activeIdentityKey: event.activeIdentityKey, + sessionKey: event.sessionKey + }) } }) @@ -247,6 +282,15 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< selectedCooldownUntil: selected.cooldownUntil ?? null, selectedExpires: selected.expires ?? null }) + void input.shareableDebug?.emitRotationCandidateSelected({ + authMode: input.authMode, + attemptKey, + selectedIdentityKey: selected.identityKey, + selectedIndex, + selectedEnabled: selected.enabled !== false, + selectedCooldownUntil: selected.cooldownUntil ?? null, + selectedExpires: selected.expires ?? null + }) if (lastSelectionTrace) { lastSelectionTrace = { ...lastSelectionTrace, @@ -474,6 +518,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< const authSnapshot = await loadAuthStorage(undefined, { lockReads: false }) const openai = authSnapshot.openai if (!openai || openai.type !== "oauth") { + await emitAuthFailure({ outcome: "oauth_not_configured", status: 401 }) throw new PluginFatalError({ message: "Not authenticated with OpenAI. Run `opencode auth login`.", status: 401, @@ -485,6 +530,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< const domain = ensureOpenAIOAuthDomain(authSnapshot, input.authMode) const enabledAfterAttempts = domain.accounts.filter((account) => account.enabled !== false) if (enabledAfterAttempts.length === 0 && sawInvalidGrant) { + await emitAuthFailure({ outcome: "refresh_invalid_grant", status: 401 }) throw new PluginFatalError({ message: "All enabled OpenAI refresh tokens were rejected (invalid_grant). Run `opencode auth login` to reauthenticate.", @@ -506,6 +552,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< if (nextAvailableAt !== undefined) { const waitMs = Math.max(0, nextAvailableAt - now) + await emitAuthFailure({ outcome: "all_accounts_cooling_down", status: 429, waitMs }) throw new PluginFatalError({ message: `All enabled OpenAI accounts are cooling down. Try again in ${formatWaitTime(waitMs)} or run \`opencode auth login\`.`, status: 429, @@ -515,6 +562,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< } if (sawInvalidGrant) { + await emitAuthFailure({ outcome: "refresh_invalid_grant", status: 401 }) throw new PluginFatalError({ message: "OpenAI refresh token was rejected (invalid_grant). Run `opencode auth login` to reauthenticate this account.", @@ -525,6 +573,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< } if (sawMissingRefresh) { + await emitAuthFailure({ outcome: "missing_refresh_token", status: 401 }) throw new PluginFatalError({ message: "Selected OpenAI account is missing a refresh token. Run `opencode auth login` to reauthenticate.", status: 401, @@ -534,6 +583,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< } if (sawMissingIdentity) { + await emitAuthFailure({ outcome: "missing_account_identity", status: 401 }) throw new PluginFatalError({ message: "Selected OpenAI account is missing identity metadata. Run `opencode auth login` to reauthenticate.", status: 401, @@ -543,6 +593,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< } if (sawRefreshFailure) { + await emitAuthFailure({ outcome: "refresh_failed", status: 401 }) throw new PluginFatalError({ message: "Failed to refresh OpenAI access token. Run `opencode auth login` and try again.", status: 401, @@ -551,6 +602,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< }) } + await emitAuthFailure({ outcome: "no_enabled_accounts", status: 403 }) throw new PluginFatalError({ message: `No enabled OpenAI ${input.authMode} accounts available. Enable an account or run \`opencode auth login\`.`, status: 403, @@ -560,6 +612,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< } } catch (error) { if (isPluginFatalError(error)) throw error + await emitAuthFailure({ outcome: "auth_storage_error", status: 500 }) throw new PluginFatalError({ message: "Unable to access OpenAI auth storage. Check plugin configuration and run `opencode auth login` if needed.", @@ -570,6 +623,7 @@ export async function acquireOpenAIAuth(input: AcquireOpenAIAuthInput): Promise< } if (!access) { + await emitAuthFailure({ outcome: "no_valid_access_token", status: 401 }) throw new PluginFatalError({ message: "No valid OpenAI access token available. Run `opencode auth login`.", status: 401, diff --git a/lib/codex-native/openai-loader-fetch.ts b/lib/codex-native/openai-loader-fetch.ts index b20ab8c..1e8ebde 100644 --- a/lib/codex-native/openai-loader-fetch.ts +++ b/lib/codex-native/openai-loader-fetch.ts @@ -25,6 +25,7 @@ import { import { toReasoningSummaryPluginFatalError } from "./reasoning-summary.js" import type { SessionAffinityRuntimeState } from "./session-affinity-state.js" import { scheduleQuotaRefresh } from "./openai-loader-fetch-quota.js" +import type { ShareableDebugLogger } from "../shareable-debug.js" import { CATALOG_REFRESH_FAILURE_RETRY_MS, CATALOG_REFRESH_TTL_MS, @@ -53,6 +54,7 @@ export type CreateOpenAIFetchHandlerInput = { configuredRotationStrategy?: RotationStrategy headerTransformDebug: boolean compatInputSanitizerEnabled: boolean + shareableDebug?: ShareableDebugLogger internalCatalogScopeHeader?: string internalSelectedModelHeader?: string internalCollaborationModeHeader: string @@ -97,8 +99,20 @@ export function createOpenAIFetchHandler(input: CreateOpenAIFetchHandlerInput) { assertAllowedOutboundUrl(new URL(initialRequestUrl)) } catch (error) { if (isPluginFatalError(error)) { + await input.shareableDebug?.emitSyntheticFatalError({ + authMode: input.authMode, + outcome: error.type, + status: error.status, + endpoint: initialRequestUrl + }) return toSyntheticErrorResponse(error) } + await input.shareableDebug?.emitSyntheticFatalError({ + authMode: input.authMode, + outcome: "disallowed_outbound_request", + status: 400, + endpoint: initialRequestUrl + }) return toSyntheticErrorResponse( new PluginFatalError({ message: "Outbound request validation failed before preparing OpenAI request.", @@ -115,6 +129,12 @@ export function createOpenAIFetchHandler(input: CreateOpenAIFetchHandlerInput) { try { baseRequest = new Request(requestInput, init) } catch { + await input.shareableDebug?.emitSyntheticFatalError({ + authMode: input.authMode, + outcome: "disallowed_outbound_request", + status: 400, + endpoint: initialRequestUrl + }) return toSyntheticErrorResponse( new PluginFatalError({ message: "Outbound request could not be prepared for OpenAI backend.", @@ -203,7 +223,8 @@ export function createOpenAIFetchHandler(input: CreateOpenAIFetchHandlerInput) { persistSessionAffinityState, pidOffsetEnabled: input.pidOffsetEnabled, configuredRotationStrategy: input.configuredRotationStrategy, - log: input.log + log: input.log, + shareableDebug: input.shareableDebug }) const now = Date.now() @@ -281,6 +302,30 @@ export function createOpenAIFetchHandler(input: CreateOpenAIFetchHandlerInput) { maxRedirects: 3, showToast: input.showToast, onAttemptRequest: async ({ attempt, maxAttempts, attemptReasonCode, request, auth, sessionKey }) => { + await input.shareableDebug?.emitFetchAttemptRequest({ + authMode: input.authMode, + rotationStrategy: auth.selectionTrace?.strategy ?? input.configuredRotationStrategy, + attempt: attempt + 1, + maxAttempts, + attemptReasonCode, + request, + selectedIdentityKey: auth.identityKey ?? auth.selectionTrace?.selectedIdentityKey, + activeIdentityKey: auth.selectionTrace?.activeIdentityKey, + sessionKey + }) + if (attemptReasonCode !== "initial_attempt") { + await input.shareableDebug?.emitRetryAfter429({ + authMode: input.authMode, + rotationStrategy: auth.selectionTrace?.strategy ?? input.configuredRotationStrategy, + attempt: attempt + 1, + maxAttempts, + attemptReasonCode, + selectedIdentityKey: auth.identityKey ?? auth.selectionTrace?.selectedIdentityKey, + activeIdentityKey: auth.selectionTrace?.activeIdentityKey, + sessionKey + }) + } + const selectedModelSlug = request.headers.get(internalSelectedModelHeader)?.trim() || undefined const requestCatalogScopeKey = request.headers.get(internalCatalogScopeHeader)?.trim() || selectedPreviousCatalogScopeKey @@ -396,6 +441,18 @@ export function createOpenAIFetchHandler(input: CreateOpenAIFetchHandlerInput) { return payloadTransform.request }, onAttemptResponse: async ({ attempt, maxAttempts, attemptReasonCode, response, auth, sessionKey }) => { + await input.shareableDebug?.emitFetchAttemptResponse({ + authMode: input.authMode, + rotationStrategy: auth.selectionTrace?.strategy ?? input.configuredRotationStrategy, + attempt: attempt + 1, + maxAttempts, + attemptReasonCode, + endpoint: response.url || outbound.url, + status: response.status, + selectedIdentityKey: auth.identityKey ?? auth.selectionTrace?.selectedIdentityKey, + activeIdentityKey: auth.selectionTrace?.activeIdentityKey, + sessionKey + }) await input.requestSnapshots.captureResponse("outbound-response", response, { attempt: attempt + 1, maxAttempts, @@ -411,8 +468,24 @@ export function createOpenAIFetchHandler(input: CreateOpenAIFetchHandlerInput) { assertAllowedOutboundUrl(new URL(outbound.url)) } catch (error) { if (isPluginFatalError(error)) { + await input.shareableDebug?.emitSyntheticFatalError({ + authMode: input.authMode, + outcome: error.type, + status: error.status, + endpoint: outbound.url, + selectedIdentityKey, + activeIdentityKey: selectedAuthForQuota?.identityKey + }) return toSyntheticErrorResponse(error) } + await input.shareableDebug?.emitSyntheticFatalError({ + authMode: input.authMode, + outcome: "disallowed_outbound_request", + status: 400, + endpoint: outbound.url, + selectedIdentityKey, + activeIdentityKey: selectedAuthForQuota?.identityKey + }) return toSyntheticErrorResponse( new PluginFatalError({ message: "Outbound request validation failed before sending to OpenAI backend.", @@ -434,12 +507,28 @@ export function createOpenAIFetchHandler(input: CreateOpenAIFetchHandlerInput) { type: error.type, status: error.status }) + await input.shareableDebug?.emitSyntheticFatalError({ + authMode: input.authMode, + outcome: error.type, + status: error.status, + endpoint: outbound.url, + selectedIdentityKey, + activeIdentityKey: selectedAuthForQuota?.identityKey + }) return toSyntheticErrorResponse(error) } input.log?.debug("unexpected fetch failure", { error: error instanceof Error ? error.message : String(error) }) + await input.shareableDebug?.emitSyntheticFatalError({ + authMode: input.authMode, + outcome: "plugin_fetch_failed", + status: 502, + endpoint: outbound.url, + selectedIdentityKey, + activeIdentityKey: selectedAuthForQuota?.identityKey + }) return toSyntheticErrorResponse( new PluginFatalError({ message: "OpenAI request failed unexpectedly. Retry once, and if it persists run `opencode auth login`.", diff --git a/lib/config.ts b/lib/config.ts index 9636214..7323315 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -52,6 +52,7 @@ export { getDebugEnabled, getHeaderSnapshotBodiesEnabled, getHeaderSnapshotsEnabled, + getShareableDebugEnabled, getHeaderTransformDebugEnabled, getMode, getOrchestratorSubagentsEnabled, diff --git a/lib/config/file.ts b/lib/config/file.ts index 763c352..379469a 100644 --- a/lib/config/file.ts +++ b/lib/config/file.ts @@ -690,6 +690,7 @@ export function validateConfigFileObject(raw: unknown): ConfigValidationResult { "sanitizeInputs", "developerMessagesToUser", "codexCompactionOverride", + "shareableDebug", "headerSnapshots", "headerSnapshotBodies", "headerTransformDebug", @@ -848,6 +849,7 @@ function parseConfigFileObjectWithMetadata(raw: unknown): ParsedConfigFile { typeof runtime?.developerMessagesToUser === "boolean" ? runtime.developerMessagesToUser : undefined const codexCompactionOverride = typeof runtime?.codexCompactionOverride === "boolean" ? runtime.codexCompactionOverride : undefined + const shareableDebug = typeof runtime?.shareableDebug === "boolean" ? runtime.shareableDebug : undefined const headerSnapshots = typeof runtime?.headerSnapshots === "boolean" ? runtime.headerSnapshots : undefined const headerSnapshotBodies = typeof runtime?.headerSnapshotBodies === "boolean" ? runtime.headerSnapshotBodies : undefined @@ -875,6 +877,7 @@ function parseConfigFileObjectWithMetadata(raw: unknown): ParsedConfigFile { compatInputSanitizer, remapDeveloperMessagesToUser, codexCompactionOverride, + shareableDebug, headerSnapshots, headerSnapshotBodies, headerTransformDebug, diff --git a/lib/config/resolve.ts b/lib/config/resolve.ts index 06e4069..1589e9b 100644 --- a/lib/config/resolve.ts +++ b/lib/config/resolve.ts @@ -215,6 +215,7 @@ export function resolveConfig(input: { parseEnvBoolean(env.OPENCODE_OPENAI_MULTI_REMAP_DEVELOPER_MESSAGES_TO_USER) ?? file.remapDeveloperMessagesToUser const codexCompactionOverride = parseEnvBoolean(env.OPENCODE_OPENAI_MULTI_CODEX_COMPACTION_OVERRIDE) ?? file.codexCompactionOverride + const shareableDebug = parseEnvBoolean(env.OPENCODE_OPENAI_MULTI_SHAREABLE_DEBUG) ?? file.shareableDebug const headerSnapshots = parseEnvBoolean(env.OPENCODE_OPENAI_MULTI_HEADER_SNAPSHOTS) ?? file.headerSnapshots const headerSnapshotBodies = parseEnvBoolean(env.OPENCODE_OPENAI_MULTI_HEADER_SNAPSHOT_BODIES) ?? file.headerSnapshotBodies @@ -244,6 +245,7 @@ export function resolveConfig(input: { compatInputSanitizer, remapDeveloperMessagesToUser, codexCompactionOverride, + shareableDebug, headerSnapshots, headerSnapshotBodies, headerTransformDebug, @@ -315,6 +317,10 @@ export function getHeaderSnapshotsEnabled(cfg: PluginConfig): boolean { return cfg.headerSnapshots === true } +export function getShareableDebugEnabled(cfg: PluginConfig): boolean { + return cfg.shareableDebug === true +} + export function getHeaderTransformDebugEnabled(cfg: PluginConfig): boolean { return cfg.headerTransformDebug === true } diff --git a/lib/config/types.ts b/lib/config/types.ts index d3ddfb9..de495b0 100644 --- a/lib/config/types.ts +++ b/lib/config/types.ts @@ -52,6 +52,7 @@ export type PluginConfig = { compatInputSanitizer?: boolean remapDeveloperMessagesToUser?: boolean codexCompactionOverride?: boolean + shareableDebug?: boolean headerSnapshots?: boolean headerSnapshotBodies?: boolean headerTransformDebug?: boolean @@ -81,6 +82,7 @@ export const DEFAULT_CODEX_CONFIG = { sanitizeInputs: false, developerMessagesToUser: true, promptCacheKeyStrategy: "default", + shareableDebug: false, headerSnapshots: false, headerSnapshotBodies: false, headerTransformDebug: false, @@ -154,6 +156,12 @@ export const DEFAULT_CODEX_CONFIG_TEMPLATE = `{ // mode default: false in "native", true in "codex" // "codexCompactionOverride": true, + // Privacy-first structured debug logging safe for public sharing. + // Writes a bounded summary log plus crash-tolerant incident captures. + // options: true | false + // default: false + "shareableDebug": false, + // Write request header snapshots to plugin logs. // options: true | false // default: false diff --git a/lib/paths.ts b/lib/paths.ts index 388a0bd..15e1e0e 100644 --- a/lib/paths.ts +++ b/lib/paths.ts @@ -5,6 +5,7 @@ export const CODEX_ACCOUNTS_FILE = "codex-accounts.json" export const LEGACY_OPENAI_CODEX_ACCOUNTS_FILE = "openai-codex-accounts.json" export const CODEX_SESSION_AFFINITY_FILE = "codex-session-affinity.json" export const CODEX_SNAPSHOTS_FILE = "codex-snapshots.json" +export const CODEX_SHAREABLE_DEBUG_FILE = "shareable-debug.jsonl" const OPENCODE_AUTH_FILE = "auth.json" const OPENCODE_SESSION_STORAGE_DIR = path.join("opencode", "storage", "session") @@ -89,3 +90,7 @@ export function defaultSnapshotsPath(): string { export function defaultSessionAffinityPath(env: Record = process.env): string { return path.join(defaultOpencodeCachePath(env), CODEX_SESSION_AFFINITY_FILE) } + +export function defaultShareableDebugLogPath(env: Record = process.env): string { + return path.join(defaultCodexPluginLogsPath(env), CODEX_SHAREABLE_DEBUG_FILE) +} diff --git a/lib/shareable-debug.ts b/lib/shareable-debug.ts new file mode 100644 index 0000000..ae96f96 --- /dev/null +++ b/lib/shareable-debug.ts @@ -0,0 +1,1364 @@ +import { createHmac, randomBytes, randomUUID } from "node:crypto" +import fs from "node:fs/promises" +import fsSync from "node:fs" +import path from "node:path" + +import { + enforceOwnerOnlyPermissions, + isFsErrorCode, + readJsonFileBestEffort, + writeJsonFileAtomic, + writeJsonFileAtomicBestEffort +} from "./cache-io.js" +import type { Logger } from "./logger.js" +import { defaultShareableDebugLogPath } from "./paths.js" +import type { OpenAIAuthMode, RotationStrategy } from "./types.js" + +const PROCESS_SECRET = randomBytes(32) + +const DEFAULT_SUMMARY_MAX_BYTES = 256 * 1024 +const DEFAULT_SUMMARY_MAX_FILES = 2 +const DEFAULT_SEGMENT_MAX_BYTES = 16 * 1024 +const DEFAULT_ROLLING_BUFFER_MAX_BYTES = 256 * 1024 +const DEFAULT_PRE_TRIGGER_EVENT_COUNT = 40 +const DEFAULT_POST_TRIGGER_EVENT_COUNT = 20 +const DEFAULT_MAX_INCIDENT_FILES = 8 +const DEFAULT_MAX_INCIDENT_BYTES = 512 * 1024 + +type ShareableDebugBaseEvent = { + authMode: OpenAIAuthMode +} + +type ShareableDebugEventRecord = { + seq: number + timestamp: string + event: string + [key: string]: unknown +} + +type JsonlReadResult = { + records: ShareableDebugEventRecord[] + hadTruncatedTail: boolean +} + +type TriggerReason = "http_status" | "auth_failure" | "retry_after_429" | "synthetic_fatal_error" | "process_failure" + +type IncidentLifecycleReason = "trigger" | "recovered" | "missing_output" | "interrupted" + +type IncidentManifest = { + version: 1 + incidentId: string + outputFilePath: string + triggerSeq: number + triggerEvent: string + triggerReason: TriggerReason + preTriggerStartSeq: number + preTriggerEndSeq: number + postWindowCount: number + postRemaining: number + status: "open" + createdAt: string + updatedAt: string +} + +type ManifestlessRecoveryMarker = { + version: 1 + triggerSeq: number +} + +type ShareableDebugIncidentConfig = { + summaryMaxBytes?: number + summaryMaxFiles?: number + segmentMaxBytes?: number + rollingBufferMaxBytes?: number + preTriggerEventCount?: number + postTriggerEventCount?: number + maxIncidentFiles?: number + maxIncidentBytes?: number +} + +export type ShareableDebugLogger = { + enabled: boolean + emitRotationBegin: ( + input: ShareableDebugBaseEvent & { + rotationStrategy: RotationStrategy + activeIdentityKey?: string + sessionKey?: string | null + totalAccounts: number + enabledAccounts: number + } + ) => Promise + emitRotationDecision: ( + input: ShareableDebugBaseEvent & { + rotationStrategy: RotationStrategy + decision: string + totalCount: number + disabledCount: number + cooldownCount: number + refreshLeaseCount: number + eligibleCount: number + attemptedCount?: number + selectedIdentityKey?: string + activeIdentityKey?: string + sessionKey?: string + attemptKey?: string + selectedIndex?: number + } + ) => Promise + emitRotationCandidateSelected: ( + input: ShareableDebugBaseEvent & { + attemptKey?: string + selectedIdentityKey?: string + selectedIndex?: number + selectedEnabled?: boolean + selectedCooldownUntil?: number | null + selectedExpires?: number | null + } + ) => Promise + emitFetchAttemptRequest: ( + input: ShareableDebugBaseEvent & { + attempt: number + maxAttempts: number + attemptReasonCode: string + request: Request + selectedIdentityKey?: string + activeIdentityKey?: string + sessionKey?: string | null + rotationStrategy?: string + } + ) => Promise + emitFetchAttemptResponse: ( + input: ShareableDebugBaseEvent & { + attempt: number + maxAttempts: number + attemptReasonCode: string + endpoint?: string + status: number + selectedIdentityKey?: string + activeIdentityKey?: string + sessionKey?: string | null + rotationStrategy?: string + } + ) => Promise + emitRetryAfter429: ( + input: ShareableDebugBaseEvent & { + attempt: number + maxAttempts: number + attemptReasonCode: string + selectedIdentityKey?: string + activeIdentityKey?: string + sessionKey?: string | null + rotationStrategy?: string + } + ) => Promise + emitAuthFailure: ( + input: ShareableDebugBaseEvent & { + outcome: string + status: number + sessionKey?: string | null + selectedIdentityKey?: string + activeIdentityKey?: string + waitMs?: number + } + ) => Promise + emitSyntheticFatalError: ( + input: ShareableDebugBaseEvent & { + outcome: string + status: number + sessionKey?: string | null + selectedIdentityKey?: string + activeIdentityKey?: string + endpoint?: string + } + ) => Promise +} + +function pseudonym(prefix: string, raw: string | undefined | null): string | undefined { + const normalized = raw?.trim() + if (!normalized) return undefined + const digest = createHmac("sha256", PROCESS_SECRET).update(normalized).digest("hex").slice(0, 8) + return `${prefix}_${digest}` +} + +function normalizeEndpoint(input: string | undefined): string | undefined { + if (!input) return undefined + try { + return new URL(input).pathname || undefined + } catch { + return undefined + } +} + +async function extractPromptCacheKey(request: Request): Promise { + try { + const raw = await request.clone().text() + if (!raw) return undefined + + try { + const parsed = JSON.parse(raw) as unknown + if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) { + const candidate = (parsed as Record).prompt_cache_key + return typeof candidate === "string" && candidate.trim().length > 0 ? candidate : undefined + } + return undefined + } catch { + const params = new URLSearchParams(raw) + const candidate = params.get("prompt_cache_key") + return candidate && candidate.trim().length > 0 ? candidate : undefined + } + } catch { + return undefined + } +} + +function createNoopShareableDebugLogger(): ShareableDebugLogger { + const noop = async () => {} + return { + enabled: false, + emitRotationBegin: noop, + emitRotationDecision: noop, + emitRotationCandidateSelected: noop, + emitFetchAttemptRequest: noop, + emitFetchAttemptResponse: noop, + emitRetryAfter429: noop, + emitAuthFailure: noop, + emitSyntheticFatalError: noop + } +} + +function defaultStateDirForLogPath(filePath: string): string { + const parsed = path.parse(filePath) + return path.join(parsed.dir, `${parsed.name}-state`) +} + +function parsePositiveInteger(value: number | undefined, fallback: number): number { + if (typeof value !== "number" || !Number.isFinite(value)) return fallback + return Math.max(1, Math.floor(value)) +} + +function segmentFileName(startSeq: number): string { + return `segment-${startSeq.toString().padStart(16, "0")}.jsonl` +} + +function incidentFileName(incidentId: string, timestamp: string): string { + const stamp = timestamp.replaceAll(/[:.]/g, "-") + return `incident-${stamp}-${incidentId}.jsonl` +} + +function sortLexicallyAscending(values: string[]): string[] { + return [...values].sort((left, right) => left.localeCompare(right)) +} + +function incidentFileReference(filePath: string): string { + return path.basename(filePath) +} + +function contiguousWindow( + rows: ShareableDebugEventRecord[], + startSeq: number, + maxCount: number +): { + rows: ShareableDebugEventRecord[] + hasGapAfterPrefix: boolean +} { + const deduped = Array.from(new Map(rows.map((row) => [row.seq, row] as const)).values()).sort( + (left, right) => left.seq - right.seq + ) + const window: ShareableDebugEventRecord[] = [] + let expectedSeq = startSeq + + for (const row of deduped) { + if (window.length >= maxCount) break + if (row.seq < expectedSeq) continue + if (row.seq > expectedSeq) { + return { + rows: window, + hasGapAfterPrefix: true + } + } + window.push(row) + expectedSeq += 1 + } + + return { + rows: window, + hasGapAfterPrefix: false + } +} + +function parseSegmentStartSeq(filePath: string): number | undefined { + const match = /^segment-(\d+)\.jsonl$/u.exec(path.basename(filePath)) + if (!match) return undefined + const value = Number.parseInt(match[1] ?? "", 10) + return Number.isFinite(value) && value >= 1 ? value : undefined +} + +async function readJsonlFile(filePath: string): Promise { + try { + const raw = await fs.readFile(filePath, "utf8") + const lines = raw + .split("\n") + .map((line) => line.trim()) + .filter(Boolean) + const records: ShareableDebugEventRecord[] = [] + let hadTruncatedTail = false + + for (const [index, line] of lines.entries()) { + try { + records.push(JSON.parse(line) as ShareableDebugEventRecord) + } catch (error) { + if (index === lines.length - 1) { + hadTruncatedTail = true + break + } + throw error + } + } + + return { + records, + hadTruncatedTail + } + } catch (error) { + if (isFsErrorCode(error, "ENOENT")) { + return { + records: [], + hadTruncatedTail: false + } + } + throw error + } +} + +function readJsonlFileSync(filePath: string): JsonlReadResult { + try { + const raw = fsSync.readFileSync(filePath, "utf8") + const lines = raw + .split("\n") + .map((line) => line.trim()) + .filter(Boolean) + const records: ShareableDebugEventRecord[] = [] + let hadTruncatedTail = false + + for (const [index, line] of lines.entries()) { + try { + records.push(JSON.parse(line) as ShareableDebugEventRecord) + } catch (error) { + if (index === lines.length - 1) { + hadTruncatedTail = true + break + } + throw error + } + } + + return { + records, + hadTruncatedTail + } + } catch (error) { + if (isFsErrorCode(error, "ENOENT")) { + return { + records: [], + hadTruncatedTail: false + } + } + throw error + } +} + +async function readJsonlRecords(filePath: string): Promise { + return (await readJsonlFile(filePath)).records +} + +function readJsonlRecordsSync(filePath: string): ShareableDebugEventRecord[] { + return readJsonlFileSync(filePath).records +} + +async function lastSeqFromSegment(filePath: string): Promise { + const rows = await readJsonlRecords(filePath) + const last = rows.at(-1) + return typeof last?.seq === "number" ? last.seq : 0 +} + +async function appendJsonlLine(filePath: string, line: string): Promise { + await fs.mkdir(path.dirname(filePath), { recursive: true }) + await fs.appendFile(filePath, line, { mode: 0o600 }) + await enforceOwnerOnlyPermissions(filePath) +} + +async function writeJsonlLines(filePath: string, lines: string[]): Promise { + await fs.mkdir(path.dirname(filePath), { recursive: true }) + await fs.writeFile(filePath, lines.join(""), { mode: 0o600 }) + await enforceOwnerOnlyPermissions(filePath) +} + +function appendJsonlLineSync(filePath: string, line: string): void { + fsSync.mkdirSync(path.dirname(filePath), { recursive: true }) + fsSync.appendFileSync(filePath, line, { mode: 0o600 }) + try { + fsSync.chmodSync(filePath, 0o600) + } catch { + // best-effort only in crash path + } +} + +function writeJsonlLinesSync(filePath: string, lines: string[]): void { + fsSync.mkdirSync(path.dirname(filePath), { recursive: true }) + fsSync.writeFileSync(filePath, lines.join(""), { mode: 0o600 }) + try { + fsSync.chmodSync(filePath, 0o600) + } catch { + // best-effort only in crash path + } +} + +async function rotateLogFileIfNeeded(filePath: string, nextLineBytes: number, maxBytes: number, maxFiles: number) { + const keepFiles = Math.max(1, maxFiles) + let currentSize = 0 + try { + currentSize = (await fs.stat(filePath)).size + } catch (error) { + if (!isFsErrorCode(error, "ENOENT")) throw error + } + if (currentSize === 0 || currentSize + nextLineBytes <= maxBytes) return + + for (let index = keepFiles - 1; index >= 1; index -= 1) { + const source = index === 1 ? filePath : `${filePath}.${index - 1}` + const dest = `${filePath}.${index}` + try { + await deleteFileIfExists(dest) + await fs.rename(source, dest) + } catch (error) { + if (!isFsErrorCode(error, "ENOENT")) throw error + } + } +} + +async function deleteFileIfExists(filePath: string): Promise { + try { + await fs.unlink(filePath) + } catch (error) { + if (!isFsErrorCode(error, "ENOENT")) throw error + } +} + +function isIncidentManifest(value: unknown): value is IncidentManifest { + return ( + typeof value === "object" && + value !== null && + "version" in value && + value.version === 1 && + "incidentId" in value && + typeof value.incidentId === "string" && + "outputFilePath" in value && + typeof value.outputFilePath === "string" && + "triggerSeq" in value && + typeof value.triggerSeq === "number" && + "triggerEvent" in value && + typeof value.triggerEvent === "string" && + "triggerReason" in value && + typeof value.triggerReason === "string" && + "preTriggerStartSeq" in value && + typeof value.preTriggerStartSeq === "number" && + "preTriggerEndSeq" in value && + typeof value.preTriggerEndSeq === "number" && + "postWindowCount" in value && + typeof value.postWindowCount === "number" && + "postRemaining" in value && + typeof value.postRemaining === "number" && + "status" in value && + value.status === "open" && + "createdAt" in value && + typeof value.createdAt === "string" && + "updatedAt" in value && + typeof value.updatedAt === "string" + ) +} + +function isManifestlessRecoveryMarker(value: unknown): value is ManifestlessRecoveryMarker { + return ( + typeof value === "object" && + value !== null && + "version" in value && + value.version === 1 && + "triggerSeq" in value && + typeof value.triggerSeq === "number" + ) +} + +export function createShareableDebugLogger(input: { + enabled: boolean + env?: Record + filePath?: string + stateDir?: string + registerProcessHandlers?: boolean + incidentConfig?: ShareableDebugIncidentConfig + log?: Logger +}): ShareableDebugLogger { + if (!input.enabled) return createNoopShareableDebugLogger() + + const filePath = input.filePath ?? defaultShareableDebugLogPath(input.env) + const stateDir = input.stateDir ?? defaultStateDirForLogPath(filePath) + const segmentsDir = path.join(stateDir, "segments") + const incidentsDir = path.join(stateDir, "incidents") + const manifestPath = path.join(stateDir, "incident-state.json") + const manifestlessRecoveryPath = path.join(stateDir, "manifestless-recovery.json") + + const summaryMaxBytes = parsePositiveInteger(input.incidentConfig?.summaryMaxBytes, DEFAULT_SUMMARY_MAX_BYTES) + const summaryMaxFiles = parsePositiveInteger(input.incidentConfig?.summaryMaxFiles, DEFAULT_SUMMARY_MAX_FILES) + const segmentMaxBytes = parsePositiveInteger(input.incidentConfig?.segmentMaxBytes, DEFAULT_SEGMENT_MAX_BYTES) + const rollingBufferMaxBytes = parsePositiveInteger( + input.incidentConfig?.rollingBufferMaxBytes, + DEFAULT_ROLLING_BUFFER_MAX_BYTES + ) + const preTriggerEventCount = parsePositiveInteger( + input.incidentConfig?.preTriggerEventCount, + DEFAULT_PRE_TRIGGER_EVENT_COUNT + ) + const postTriggerEventCount = parsePositiveInteger( + input.incidentConfig?.postTriggerEventCount, + DEFAULT_POST_TRIGGER_EVENT_COUNT + ) + const maxIncidentFiles = parsePositiveInteger(input.incidentConfig?.maxIncidentFiles, DEFAULT_MAX_INCIDENT_FILES) + const maxIncidentBytes = parsePositiveInteger(input.incidentConfig?.maxIncidentBytes, DEFAULT_MAX_INCIDENT_BYTES) + + let nextSeq = 1 + let currentSegmentPath: string | undefined + let currentSegmentBytes = 0 + let openIncident: IncidentManifest | undefined + let pendingWrite = Promise.resolve() + let initPromise = Promise.resolve() + let handlersRegistered = false + let signalInFlight = false + + const sanitizeJsonlTail = async (targetPath: string): Promise => { + const result = await readJsonlFile(targetPath) + if (result.hadTruncatedTail) { + await writeJsonlLines( + targetPath, + result.records.map((record) => `${JSON.stringify(record)}\n`) + ) + } + return result.records + } + + const appendSummaryLifecycleSync = (event: string, payload: Record): void => { + appendSummaryLineSync(`${JSON.stringify({ timestamp: new Date().toISOString(), event, ...payload })}\n`) + } + + const reconcileIncidentFile = async (incident: IncidentManifest): Promise => { + let incidentRecords: ShareableDebugEventRecord[] + try { + incidentRecords = await sanitizeJsonlTail(incident.outputFilePath) + } catch (error) { + if (isFsErrorCode(error, "ENOENT")) { + incidentRecords = [] + } else { + await sealIncidentIncomplete({ + incident, + reason: "interrupted" + }) + return undefined + } + } + + const baseRowsFromSegments = await readEventsInRange(incident.preTriggerStartSeq, incident.preTriggerEndSeq) + const baseRowsFromIncident = incidentRecords.filter( + (row) => typeof row.seq === "number" && row.seq >= incident.preTriggerStartSeq && row.seq <= incident.triggerSeq + ) + const expectedBaseCount = incident.triggerSeq - incident.preTriggerStartSeq + 1 + const baseWindow = contiguousWindow( + [...baseRowsFromIncident, ...baseRowsFromSegments], + incident.preTriggerStartSeq, + expectedBaseCount + ) + + if ( + baseWindow.hasGapAfterPrefix || + baseWindow.rows.length !== expectedBaseCount || + !baseWindow.rows.some((row) => row.seq === incident.triggerSeq && row.event === incident.triggerEvent) + ) { + await sealIncidentIncomplete({ + incident, + reason: "interrupted" + }) + return undefined + } + + const closedRow = incidentRecords.find( + (row) => row.event === "incident_closed" && row.incidentId === incident.incidentId + ) + if (closedRow) { + await deleteFileIfExists(manifestPath) + return undefined + } + + const postRowsFromSegments = await readEventsInRange( + incident.triggerSeq + 1, + incident.triggerSeq + incident.postWindowCount + ) + const recoveredPostWindow = contiguousWindow( + [ + ...incidentRecords.filter( + (row) => typeof row.seq === "number" && row.seq > incident.triggerSeq && row.event !== "incident_closed" + ), + ...postRowsFromSegments + ], + incident.triggerSeq + 1, + incident.postWindowCount + ) + if (recoveredPostWindow.hasGapAfterPrefix) { + await sealIncidentIncomplete({ + incident, + reason: "interrupted" + }) + return undefined + } + const recoveredPostRows = recoveredPostWindow.rows + + await writeJsonlLines( + incident.outputFilePath, + [...baseWindow.rows, ...recoveredPostRows].map((row) => `${JSON.stringify(row)}\n`) + ) + + const postRemaining = Math.max(0, incident.postWindowCount - recoveredPostRows.length) + const recoveredIncident: IncidentManifest = { + ...incident, + postRemaining, + updatedAt: new Date().toISOString() + } + + if (recoveredIncident.postRemaining <= 0) { + await closeIncident(recoveredIncident, "recovered") + return undefined + } + + await writeManifest(recoveredIncident) + await appendSummaryLifecycle("incident_recovered", { + incidentId: recoveredIncident.incidentId, + incidentFile: incidentFileReference(recoveredIncident.outputFilePath), + triggerSeq: recoveredIncident.triggerSeq, + triggerEvent: recoveredIncident.triggerEvent, + postRemaining: recoveredIncident.postRemaining + }) + return recoveredIncident + } + + const initializeState = async (): Promise => { + await fs.mkdir(segmentsDir, { recursive: true }) + await fs.mkdir(incidentsDir, { recursive: true }) + + const segmentFiles = sortLexicallyAscending(await fs.readdir(segmentsDir).catch(() => [])) + const latestSegment = segmentFiles.at(-1) + let latestSegmentRecord: ShareableDebugEventRecord | undefined + if (latestSegment) { + currentSegmentPath = path.join(segmentsDir, latestSegment) + const latestSegmentRows = await sanitizeJsonlTail(currentSegmentPath) + currentSegmentBytes = (await fs.stat(currentSegmentPath)).size + const segmentStartSeq = parseSegmentStartSeq(currentSegmentPath) ?? 1 + nextSeq = Math.max(segmentStartSeq, (await lastSeqFromSegment(currentSegmentPath)) + 1) + latestSegmentRecord = latestSegmentRows.at(-1) + } + + const manifest = await readJsonFileBestEffort(manifestPath) + const manifestlessRecovery = await readJsonFileBestEffort(manifestlessRecoveryPath) + if ( + isManifestlessRecoveryMarker(manifestlessRecovery) && + manifestlessRecovery.triggerSeq !== latestSegmentRecord?.seq + ) { + await deleteFileIfExists(manifestlessRecoveryPath) + } + if (!isIncidentManifest(manifest)) { + await deleteFileIfExists(manifestPath) + const triggerReason = latestSegmentRecord ? triggerReasonFor(latestSegmentRecord) : undefined + if ( + latestSegmentRecord && + triggerReason && + (!isManifestlessRecoveryMarker(manifestlessRecovery) || + manifestlessRecovery.triggerSeq !== latestSegmentRecord.seq) + ) { + const incident = createIncidentManifest(latestSegmentRecord, triggerReason) + const preTriggerEvents = await readEventsInRange(incident.preTriggerStartSeq, incident.preTriggerEndSeq) + const expectedBaseCount = incident.triggerSeq - incident.preTriggerStartSeq + 1 + const baseWindow = contiguousWindow(preTriggerEvents, incident.preTriggerStartSeq, expectedBaseCount) + + await writeJsonlLines( + incident.outputFilePath, + baseWindow.rows.map((entry) => `${JSON.stringify(entry)}\n`) + ) + if (baseWindow.hasGapAfterPrefix || baseWindow.rows.length !== expectedBaseCount) { + await writeJsonFileAtomic(manifestlessRecoveryPath, { + version: 1, + triggerSeq: latestSegmentRecord.seq + }) + await sealIncidentIncomplete({ + incident, + reason: "interrupted" + }) + return + } + + await writeManifest(incident) + await deleteFileIfExists(manifestlessRecoveryPath) + await appendSummaryLifecycle("incident_recovered", { + incidentId: incident.incidentId, + incidentFile: incidentFileReference(incident.outputFilePath), + triggerSeq: incident.triggerSeq, + triggerEvent: incident.triggerEvent, + postRemaining: incident.postRemaining + }) + openIncident = incident + await pruneIncidents() + } + return + } + + openIncident = await reconcileIncidentFile(manifest) + } + + const runQueued = (work: () => Promise): Promise => { + pendingWrite = pendingWrite + .then(() => initPromise) + .then(work) + .catch((error) => { + input.log?.warn("shareable debug write failed", { + error: error instanceof Error ? error.message : String(error) + }) + }) + return pendingWrite + } + + const writeManifest = async (manifest: IncidentManifest): Promise => { + await writeJsonFileAtomic(manifestPath, manifest) + } + + const writeManifestBestEffort = async (manifest: IncidentManifest): Promise => { + await writeJsonFileAtomicBestEffort(manifestPath, manifest) + } + + const appendSummaryLine = async (line: string): Promise => { + await rotateLogFileIfNeeded(filePath, Buffer.byteLength(line), summaryMaxBytes, summaryMaxFiles) + await appendJsonlLine(filePath, line) + } + + const appendSummaryLifecycle = async (event: string, payload: Record): Promise => { + const line = `${JSON.stringify({ timestamp: new Date().toISOString(), event, ...payload })}\n` + await appendSummaryLine(line) + } + + const appendSummaryLineSync = (line: string): void => { + try { + const existingSize = fsSync.existsSync(filePath) ? fsSync.statSync(filePath).size : 0 + if (existingSize > 0 && existingSize + Buffer.byteLength(line) > summaryMaxBytes) { + for (let index = summaryMaxFiles - 1; index >= 1; index -= 1) { + const source = index === 1 ? filePath : `${filePath}.${index - 1}` + const dest = `${filePath}.${index}` + if (fsSync.existsSync(source)) { + if (fsSync.existsSync(dest)) { + fsSync.unlinkSync(dest) + } + fsSync.renameSync(source, dest) + } + } + } + appendJsonlLineSync(filePath, line) + } catch { + // best-effort only in crash path + } + } + + const ensureSegmentForLine = async (lineBytes: number, seq: number): Promise => { + if (!currentSegmentPath || currentSegmentBytes + lineBytes > segmentMaxBytes) { + currentSegmentPath = path.join(segmentsDir, segmentFileName(seq)) + currentSegmentBytes = 0 + } + } + + const pruneSegments = async (): Promise => { + const files = sortLexicallyAscending(await fs.readdir(segmentsDir)) + let totalBytes = 0 + const entries: Array<{ name: string; filePath: string; size: number }> = [] + for (const name of files) { + const filePath = path.join(segmentsDir, name) + const stat = await fs.stat(filePath) + entries.push({ name, filePath, size: stat.size }) + totalBytes += stat.size + } + + for (const entry of entries) { + if (totalBytes <= rollingBufferMaxBytes) break + if (entry.filePath === currentSegmentPath) continue + await deleteFileIfExists(entry.filePath) + totalBytes -= entry.size + } + } + + const pruneIncidents = async (): Promise => { + const incidentFiles = sortLexicallyAscending(await fs.readdir(incidentsDir).catch(() => [])) + const entries: Array<{ filePath: string; name: string; size: number }> = [] + let totalBytes = 0 + for (const name of incidentFiles) { + const filePath = path.join(incidentsDir, name) + const stat = await fs.stat(filePath) + entries.push({ filePath, name, size: stat.size }) + totalBytes += stat.size + } + + for (const entry of entries) { + if (entries.length <= maxIncidentFiles && totalBytes <= maxIncidentBytes) break + if (entry.filePath === openIncident?.outputFilePath) continue + await deleteFileIfExists(entry.filePath) + totalBytes -= entry.size + const index = entries.findIndex((candidate) => candidate.filePath === entry.filePath) + if (index >= 0) { + entries.splice(index, 1) + } + } + } + + const appendToSegment = async (record: ShareableDebugEventRecord): Promise => { + const line = `${JSON.stringify(record)}\n` + const lineBytes = Buffer.byteLength(line) + await ensureSegmentForLine(lineBytes, record.seq) + if (!currentSegmentPath) { + throw new Error("shareable_debug_missing_segment_path") + } + await appendJsonlLine(currentSegmentPath, line) + currentSegmentBytes += lineBytes + await pruneSegments() + } + + const appendToSegmentSync = (record: ShareableDebugEventRecord): void => { + const line = `${JSON.stringify(record)}\n` + const lineBytes = Buffer.byteLength(line) + if (!currentSegmentPath || currentSegmentBytes + lineBytes > segmentMaxBytes) { + currentSegmentPath = path.join(segmentsDir, segmentFileName(record.seq)) + currentSegmentBytes = 0 + } + appendJsonlLineSync(currentSegmentPath, line) + currentSegmentBytes += lineBytes + } + + const readEventsInRange = async (startSeq: number, endSeq: number): Promise => { + const files = sortLexicallyAscending(await fs.readdir(segmentsDir)) + const events: ShareableDebugEventRecord[] = [] + for (const name of files) { + const rows = await readJsonlRecords(path.join(segmentsDir, name)) + for (const row of rows) { + if (typeof row.seq !== "number") continue + if (row.seq < startSeq || row.seq > endSeq) continue + events.push(row) + } + } + return events.sort((left, right) => left.seq - right.seq) + } + + const readEventsInRangeSync = (startSeq: number, endSeq: number): ShareableDebugEventRecord[] => { + const files = sortLexicallyAscending(fsSync.readdirSync(segmentsDir, "utf8")) + const events: ShareableDebugEventRecord[] = [] + for (const name of files) { + const rows = readJsonlRecordsSync(path.join(segmentsDir, name)) + for (const row of rows) { + if (typeof row.seq !== "number") continue + if (row.seq < startSeq || row.seq > endSeq) continue + events.push(row) + } + } + return events.sort((left, right) => left.seq - right.seq) + } + + const createIncidentManifest = ( + record: ShareableDebugEventRecord, + triggerReason: TriggerReason + ): IncidentManifest => { + const incidentId = randomUUID() + const timestamp = new Date().toISOString() + return { + version: 1, + incidentId, + outputFilePath: path.join(incidentsDir, incidentFileName(incidentId, timestamp)), + triggerSeq: record.seq, + triggerEvent: record.event, + triggerReason, + preTriggerStartSeq: Math.max(1, record.seq - preTriggerEventCount), + preTriggerEndSeq: record.seq, + postWindowCount: postTriggerEventCount, + postRemaining: postTriggerEventCount, + status: "open", + createdAt: timestamp, + updatedAt: timestamp + } + } + + const appendIncidentLine = async (incident: IncidentManifest, record: ShareableDebugEventRecord): Promise => { + await appendJsonlLine(incident.outputFilePath, `${JSON.stringify(record)}\n`) + } + + const appendIncidentLifecycle = async ( + incident: IncidentManifest, + event: string, + payload: Record + ): Promise => { + await appendJsonlLine( + incident.outputFilePath, + `${JSON.stringify({ + seq: nextSeq++, + timestamp: new Date().toISOString(), + event, + incidentId: incident.incidentId, + ...payload + })}\n` + ) + } + + const appendIncidentLifecycleSync = ( + incident: IncidentManifest, + event: string, + payload: Record + ): void => { + appendJsonlLineSync( + incident.outputFilePath, + `${JSON.stringify({ + seq: nextSeq++, + timestamp: new Date().toISOString(), + event, + incidentId: incident.incidentId, + ...payload + })}\n` + ) + } + + const closeIncident = async (incident: IncidentManifest, reason: IncidentLifecycleReason): Promise => { + await appendIncidentLifecycle(incident, "incident_closed", { + reason, + triggerSeq: incident.triggerSeq, + triggerEvent: incident.triggerEvent, + incomplete: false + }) + await appendSummaryLifecycle("incident_closed", { + incidentId: incident.incidentId, + incidentFile: incidentFileReference(incident.outputFilePath), + triggerSeq: incident.triggerSeq, + triggerEvent: incident.triggerEvent, + reason + }) + await deleteFileIfExists(manifestPath) + openIncident = undefined + await pruneIncidents() + } + + const sealIncidentIncomplete = async (inputState: { + incident: IncidentManifest + reason: IncidentLifecycleReason + }): Promise => { + try { + await sanitizeJsonlTail(inputState.incident.outputFilePath) + } catch (error) { + if (isFsErrorCode(error, "ENOENT")) { + await writeJsonlLines(inputState.incident.outputFilePath, []) + } + } + await writeJsonFileAtomic(manifestlessRecoveryPath, { + version: 1, + triggerSeq: inputState.incident.triggerSeq + }) + await appendIncidentLifecycle(inputState.incident, "incident_closed", { + reason: inputState.reason, + triggerSeq: inputState.incident.triggerSeq, + triggerEvent: inputState.incident.triggerEvent, + incomplete: true + }) + await appendSummaryLifecycle("incident_closed", { + incidentId: inputState.incident.incidentId, + incidentFile: incidentFileReference(inputState.incident.outputFilePath), + triggerSeq: inputState.incident.triggerSeq, + triggerEvent: inputState.incident.triggerEvent, + reason: inputState.reason, + incomplete: true + }) + await deleteFileIfExists(manifestPath) + } + + const writeManifestSync = (incident: IncidentManifest): void => { + fsSync.mkdirSync(path.dirname(manifestPath), { recursive: true }) + fsSync.writeFileSync(manifestPath, `${JSON.stringify(incident, null, 2)}\n`, { mode: 0o600 }) + try { + fsSync.chmodSync(manifestPath, 0o600) + } catch { + // best-effort only in crash path + } + } + + const sealIncidentIncompleteSync = (incident: IncidentManifest, reason: IncidentLifecycleReason): void => { + try { + fsSync.mkdirSync(path.dirname(manifestlessRecoveryPath), { recursive: true }) + fsSync.writeFileSync( + manifestlessRecoveryPath, + `${JSON.stringify({ version: 1, triggerSeq: incident.triggerSeq }, null, 2)}\n`, + { mode: 0o600 } + ) + fsSync.chmodSync(manifestlessRecoveryPath, 0o600) + } catch { + // best-effort only in crash path + } + appendIncidentLifecycleSync(incident, "incident_closed", { + reason, + triggerSeq: incident.triggerSeq, + triggerEvent: incident.triggerEvent, + incomplete: true + }) + appendSummaryLifecycleSync("incident_closed", { + incidentId: incident.incidentId, + incidentFile: incidentFileReference(incident.outputFilePath), + triggerSeq: incident.triggerSeq, + triggerEvent: incident.triggerEvent, + reason, + incomplete: true + }) + try { + fsSync.unlinkSync(manifestPath) + } catch (error) { + if (!isFsErrorCode(error, "ENOENT")) throw error + } + } + + const openIncidentCaptureSync = ( + record: ShareableDebugEventRecord, + triggerReason: TriggerReason + ): IncidentManifest => { + const incident = createIncidentManifest(record, triggerReason) + writeManifestSync(incident) + const preTriggerEvents = readEventsInRangeSync(incident.preTriggerStartSeq, record.seq) + const expectedBaseCount = incident.triggerSeq - incident.preTriggerStartSeq + 1 + const baseWindow = contiguousWindow(preTriggerEvents, incident.preTriggerStartSeq, expectedBaseCount) + writeJsonlLinesSync( + incident.outputFilePath, + baseWindow.rows.map((entry) => `${JSON.stringify(entry)}\n`) + ) + if (baseWindow.hasGapAfterPrefix || baseWindow.rows.length !== expectedBaseCount) { + sealIncidentIncompleteSync(incident, "interrupted") + return incident + } + try { + fsSync.unlinkSync(manifestlessRecoveryPath) + } catch (error) { + if (!isFsErrorCode(error, "ENOENT")) throw error + } + appendSummaryLifecycleSync("incident_opened", { + incidentId: incident.incidentId, + incidentFile: incidentFileReference(incident.outputFilePath), + triggerSeq: record.seq, + triggerEvent: record.event, + triggerReason + }) + return incident + } + + initPromise = initializeState().catch((error) => { + input.log?.warn("shareable debug initialization failed", { + error: error instanceof Error ? error.message : String(error) + }) + }) + + const openIncidentCapture = async ( + record: ShareableDebugEventRecord, + triggerReason: TriggerReason + ): Promise => { + if (openIncident) return + const incident = createIncidentManifest(record, triggerReason) + await writeManifest(incident) + const preTriggerEvents = await readEventsInRange(incident.preTriggerStartSeq, record.seq) + const expectedBaseCount = incident.triggerSeq - incident.preTriggerStartSeq + 1 + const baseWindow = contiguousWindow(preTriggerEvents, incident.preTriggerStartSeq, expectedBaseCount) + await writeJsonlLines( + incident.outputFilePath, + baseWindow.rows.map((entry) => `${JSON.stringify(entry)}\n`) + ) + if (baseWindow.hasGapAfterPrefix || baseWindow.rows.length !== expectedBaseCount) { + await sealIncidentIncomplete({ + incident, + reason: "interrupted" + }) + return + } + await deleteFileIfExists(manifestlessRecoveryPath) + await appendSummaryLifecycle("incident_opened", { + incidentId: incident.incidentId, + incidentFile: incidentFileReference(incident.outputFilePath), + triggerSeq: record.seq, + triggerEvent: record.event, + triggerReason + }) + + if (incident.postRemaining <= 0) { + await closeIncident(incident, "trigger") + return + } + openIncident = incident + await pruneIncidents() + } + + const appendToOpenIncident = async (record: ShareableDebugEventRecord): Promise => { + if (!openIncident || record.seq <= openIncident.triggerSeq) return + await appendIncidentLine(openIncident, record) + openIncident = { + ...openIncident, + postRemaining: Math.max(0, openIncident.postRemaining - 1), + updatedAt: new Date().toISOString() + } + if (openIncident.postRemaining <= 0) { + await closeIncident(openIncident, "trigger") + return + } + await writeManifestBestEffort(openIncident) + } + + const triggerReasonFor = (record: ShareableDebugEventRecord): TriggerReason | undefined => { + if (record.event === "auth_failure") return "auth_failure" + if (record.event === "retry_after_429") return "retry_after_429" + if (record.event === "synthetic_fatal_error") return "synthetic_fatal_error" + if (record.event === "process_failure") return "process_failure" + if (record.event === "fetch_attempt_response") { + const status = typeof record.status === "number" ? record.status : undefined + if (status === 401 || status === 403 || status === 429) { + return "http_status" + } + } + return undefined + } + + const appendRecord = async (record: ShareableDebugEventRecord): Promise => { + await appendToSegment(record) + const triggerReason = triggerReasonFor(record) + if (triggerReason && !openIncident) { + await openIncidentCapture(record, triggerReason) + } else { + await appendToOpenIncident(record) + } + await appendSummaryLine(`${JSON.stringify(record)}\n`) + } + + const buildRecord = (event: string, payload: Record): ShareableDebugEventRecord => ({ + seq: nextSeq++, + timestamp: new Date().toISOString(), + event, + ...payload + }) + + const emitEvent = async (event: string, payload: Record): Promise => { + await runQueued(async () => { + const record = buildRecord(event, payload) + await appendRecord(record) + }) + } + + const captureProcessFailureSync = (event: string, payload: Record): void => { + try { + const record = buildRecord(event, payload) + appendToSegmentSync(record) + const triggerReason = triggerReasonFor(record) + if (triggerReason && !openIncident) { + openIncidentCaptureSync(record, triggerReason) + } else if (openIncident && record.seq > openIncident.triggerSeq) { + appendJsonlLineSync(openIncident.outputFilePath, `${JSON.stringify(record)}\n`) + openIncident = { + ...openIncident, + postRemaining: Math.max(0, openIncident.postRemaining - 1), + updatedAt: new Date().toISOString() + } + if (openIncident.postRemaining <= 0) { + appendIncidentLifecycleSync(openIncident, "incident_closed", { + reason: "trigger", + triggerSeq: openIncident.triggerSeq, + triggerEvent: openIncident.triggerEvent, + incomplete: false + }) + try { + fsSync.unlinkSync(manifestPath) + } catch (error) { + if (!isFsErrorCode(error, "ENOENT")) throw error + } + appendSummaryLifecycleSync("incident_closed", { + incidentId: openIncident.incidentId, + incidentFile: incidentFileReference(openIncident.outputFilePath), + triggerSeq: openIncident.triggerSeq, + triggerEvent: openIncident.triggerEvent, + reason: "trigger" + }) + openIncident = undefined + } else { + writeManifestSync(openIncident) + } + } + appendSummaryLineSync(`${JSON.stringify(record)}\n`) + } catch { + // best-effort only in crash path + } + } + + const flushPending = async (): Promise => { + await pendingWrite + } + + const installProcessHandlers = (): void => { + if (input.registerProcessHandlers === false || handlersRegistered) return + handlersRegistered = true + + const handleSignal = (signal: NodeJS.Signals) => { + if (signalInFlight) return + signalInFlight = true + captureProcessFailureSync("process_failure", { + authMode: "codex", + outcome: signal.toLowerCase(), + status: 499 + }) + const signalHandler = signalHandlers[signal] + if (signalHandler) { + process.removeListener(signal, signalHandler) + } + try { + process.kill(process.pid, signal) + } catch { + // best-effort only + } + } + + const beforeExitHandler = () => { + void flushPending() + } + const uncaughtExceptionMonitorHandler = (error: Error) => { + captureProcessFailureSync("process_failure", { + authMode: "codex", + outcome: "uncaught_exception", + status: 500, + errorName: error.name + }) + } + + const signalHandlers: Partial void>> = { + SIGINT: () => handleSignal("SIGINT"), + SIGTERM: () => handleSignal("SIGTERM"), + SIGHUP: () => handleSignal("SIGHUP"), + SIGBREAK: () => handleSignal("SIGBREAK") + } + + process.on("beforeExit", beforeExitHandler) + process.on("uncaughtExceptionMonitor", uncaughtExceptionMonitorHandler) + process.on("SIGINT", signalHandlers.SIGINT ?? (() => {})) + process.on("SIGTERM", signalHandlers.SIGTERM ?? (() => {})) + if (process.platform === "win32") { + process.on("SIGBREAK", signalHandlers.SIGBREAK ?? (() => {})) + } else { + process.on("SIGHUP", signalHandlers.SIGHUP ?? (() => {})) + } + } + + installProcessHandlers() + + return { + enabled: true, + async emitRotationBegin(event) { + await emitEvent("rotation_begin", { + authMode: event.authMode, + rotationStrategy: event.rotationStrategy, + totalAccounts: event.totalAccounts, + enabledAccounts: event.enabledAccounts, + activeIdentity: pseudonym("ident", event.activeIdentityKey), + session: pseudonym("sess", event.sessionKey) + }) + }, + async emitRotationDecision(event) { + await emitEvent("rotation_decision", { + authMode: event.authMode, + rotationStrategy: event.rotationStrategy, + decision: event.decision, + totalCount: event.totalCount, + disabledCount: event.disabledCount, + cooldownCount: event.cooldownCount, + refreshLeaseCount: event.refreshLeaseCount, + eligibleCount: event.eligibleCount, + attemptedCount: event.attemptedCount, + selectedIndex: event.selectedIndex, + selectedIdentity: pseudonym("ident", event.selectedIdentityKey), + activeIdentity: pseudonym("ident", event.activeIdentityKey), + session: pseudonym("sess", event.sessionKey), + attempt: pseudonym("attempt", event.attemptKey) + }) + }, + async emitRotationCandidateSelected(event) { + await emitEvent("rotation_candidate_selected", { + authMode: event.authMode, + selectedIndex: event.selectedIndex, + selectedEnabled: event.selectedEnabled, + selectedCooldownUntil: event.selectedCooldownUntil, + selectedExpires: event.selectedExpires, + selectedIdentity: pseudonym("ident", event.selectedIdentityKey), + attempt: pseudonym("attempt", event.attemptKey) + }) + }, + async emitFetchAttemptRequest(event) { + await emitEvent("fetch_attempt_request", { + authMode: event.authMode, + rotationStrategy: event.rotationStrategy, + attempt: event.attempt, + maxAttempts: event.maxAttempts, + attemptReasonCode: event.attemptReasonCode, + method: event.request.method.toUpperCase(), + endpoint: normalizeEndpoint(event.request.url), + selectedIdentity: pseudonym("ident", event.selectedIdentityKey), + activeIdentity: pseudonym("ident", event.activeIdentityKey), + session: pseudonym("sess", event.sessionKey), + promptCacheKey: pseudonym("pck", await extractPromptCacheKey(event.request)) + }) + }, + async emitFetchAttemptResponse(event) { + await emitEvent("fetch_attempt_response", { + authMode: event.authMode, + rotationStrategy: event.rotationStrategy, + attempt: event.attempt, + maxAttempts: event.maxAttempts, + attemptReasonCode: event.attemptReasonCode, + endpoint: normalizeEndpoint(event.endpoint), + status: event.status, + selectedIdentity: pseudonym("ident", event.selectedIdentityKey), + activeIdentity: pseudonym("ident", event.activeIdentityKey), + session: pseudonym("sess", event.sessionKey) + }) + }, + async emitRetryAfter429(event) { + await emitEvent("retry_after_429", { + authMode: event.authMode, + rotationStrategy: event.rotationStrategy, + attempt: event.attempt, + maxAttempts: event.maxAttempts, + attemptReasonCode: event.attemptReasonCode, + selectedIdentity: pseudonym("ident", event.selectedIdentityKey), + activeIdentity: pseudonym("ident", event.activeIdentityKey), + session: pseudonym("sess", event.sessionKey) + }) + }, + async emitAuthFailure(event) { + await emitEvent("auth_failure", { + authMode: event.authMode, + outcome: event.outcome, + status: event.status, + waitMs: event.waitMs, + selectedIdentity: pseudonym("ident", event.selectedIdentityKey), + activeIdentity: pseudonym("ident", event.activeIdentityKey), + session: pseudonym("sess", event.sessionKey) + }) + }, + async emitSyntheticFatalError(event) { + await emitEvent("synthetic_fatal_error", { + authMode: event.authMode, + outcome: event.outcome, + status: event.status, + endpoint: normalizeEndpoint(event.endpoint), + selectedIdentity: pseudonym("ident", event.selectedIdentityKey), + activeIdentity: pseudonym("ident", event.activeIdentityKey), + session: pseudonym("sess", event.sessionKey) + }) + } + } +} diff --git a/schemas/codex-config.schema.json b/schemas/codex-config.schema.json index fa9bda1..a766cbb 100644 --- a/schemas/codex-config.schema.json +++ b/schemas/codex-config.schema.json @@ -53,6 +53,9 @@ "codexCompactionOverride": { "type": "boolean" }, + "shareableDebug": { + "type": "boolean" + }, "headerSnapshots": { "type": "boolean" }, diff --git a/scripts/test-mocking-allowlist.json b/scripts/test-mocking-allowlist.json index 0a2f6d6..5110339 100644 --- a/scripts/test-mocking-allowlist.json +++ b/scripts/test-mocking-allowlist.json @@ -6,6 +6,11 @@ "mock": 0, "stubGlobal": 0 }, + "test/acquire-auth-shareable-debug.test.ts": { + "doMock": 1, + "mock": 0, + "stubGlobal": 0 + }, "test/acquire-auth-locking.test.ts": { "doMock": 10, "mock": 0, @@ -67,7 +72,7 @@ "stubGlobal": 0 }, "test/codex-native-snapshots.test.ts": { - "doMock": 12, + "doMock": 13, "mock": 0, "stubGlobal": 0 }, @@ -92,7 +97,12 @@ "stubGlobal": 0 }, "test/openai-loader-fetch.prompt-cache-key.catalog-refresh.test.ts": { - "doMock": 4, + "doMock": 6, + "mock": 0, + "stubGlobal": 0 + }, + "test/openai-loader-fetch.shareable-debug.test.ts": { + "doMock": 6, "mock": 0, "stubGlobal": 0 }, diff --git a/test/acquire-auth-shareable-debug.test.ts b/test/acquire-auth-shareable-debug.test.ts new file mode 100644 index 0000000..b14afb8 --- /dev/null +++ b/test/acquire-auth-shareable-debug.test.ts @@ -0,0 +1,84 @@ +import { describe, expect, it, vi } from "vitest" + +describe("acquire auth shareable debug wiring", () => { + it("emits rotation events through the shareable debug logger", async () => { + vi.resetModules() + + const authState: Record = { + openai: { + type: "oauth", + native: { + strategy: "sticky", + accounts: [ + { + identityKey: "acc_1|user@example.com|plus", + accountId: "acc_1", + email: "user@example.com", + plan: "plus", + enabled: true, + access: "at_1", + refresh: "rt_1", + expires: Date.now() + 60_000 + } + ], + activeIdentityKey: "acc_1|user@example.com|plus" + } + } + } + + vi.doMock("../lib/storage", () => ({ + loadAuthStorage: vi.fn(async () => structuredClone(authState)), + saveAuthStorage: vi.fn(async (_path: string | undefined, update: (auth: Record) => unknown) => { + await update(structuredClone(authState)) + return authState + }), + ensureOpenAIOAuthDomain: vi.fn((auth: Record, mode: "native" | "codex") => { + const openai = auth.openai as { native?: unknown; codex?: unknown } + return mode === "native" ? openai.native : openai.codex + }) + })) + + const { acquireOpenAIAuth, createAcquireOpenAIAuthInputDefaults } = await import("../lib/codex-native/acquire-auth") + const defaults = createAcquireOpenAIAuthInputDefaults() + const shareableDebug = { + enabled: true, + emitRotationBegin: vi.fn(async () => {}), + emitRotationDecision: vi.fn(async () => {}), + emitRotationCandidateSelected: vi.fn(async () => {}), + emitFetchAttemptRequest: vi.fn(async () => {}), + emitFetchAttemptResponse: vi.fn(async () => {}), + emitRetryAfter429: vi.fn(async () => {}), + emitAuthFailure: vi.fn(async () => {}), + emitSyntheticFatalError: vi.fn(async () => {}) + } + + const auth = await acquireOpenAIAuth({ + authMode: "native", + context: { sessionKey: "ses_trace_1" }, + isSubagentRequest: false, + stickySessionState: defaults.stickySessionState, + hybridSessionState: defaults.hybridSessionState, + seenSessionKeys: new Map(), + persistSessionAffinityState: () => {}, + pidOffsetEnabled: false, + shareableDebug + }) + + expect(auth.access).toBe("at_1") + expect(shareableDebug.emitRotationBegin).toHaveBeenCalledWith( + expect.objectContaining({ + authMode: "native", + rotationStrategy: "sticky", + activeIdentityKey: "acc_1|user@example.com|plus", + sessionKey: "ses_trace_1" + }) + ) + expect(shareableDebug.emitRotationDecision).toHaveBeenCalled() + expect(shareableDebug.emitRotationCandidateSelected).toHaveBeenCalledWith( + expect.objectContaining({ + selectedIdentityKey: "acc_1|user@example.com|plus" + }) + ) + expect(shareableDebug.emitAuthFailure).not.toHaveBeenCalled() + }) +}) diff --git a/test/codex-native-snapshots.test.ts b/test/codex-native-snapshots.test.ts index 50a19ef..741c4c1 100644 --- a/test/codex-native-snapshots.test.ts +++ b/test/codex-native-snapshots.test.ts @@ -497,6 +497,48 @@ describe("codex-native snapshots", () => { expect(seenInternalHeader).toBe("") }) + it("disables request snapshots when shareable debug is enabled", async () => { + vi.resetModules() + + const createRequestSnapshots = vi.fn(() => ({ + captureRequest: vi.fn(async () => {}), + captureResponse: vi.fn(async () => {}) + })) + vi.doMock("../lib/request-snapshots", () => ({ + createRequestSnapshots + })) + + const warn = vi.fn() + const { CodexAuthPlugin } = await import("../lib/codex-native") + + await CodexAuthPlugin({} as never, { + spoofMode: "codex", + shareableDebug: true, + headerSnapshots: true, + headerTransformDebug: true, + log: { + debug: vi.fn(), + info: vi.fn(), + warn, + error: vi.fn() + } + }) + + expect(createRequestSnapshots).toHaveBeenCalledWith( + expect.objectContaining({ + enabled: false, + captureBodies: false + }) + ) + expect(warn).toHaveBeenCalledWith( + "shareable debug disables request snapshot logging", + expect.objectContaining({ + headerSnapshots: true, + headerTransformDebug: true + }) + ) + }) + it("remaps non-permissions developer messages to user in codex mode by default", async () => { vi.resetModules() diff --git a/test/config-file-loading.test.ts b/test/config-file-loading.test.ts index 7b6b596..4dbb5df 100644 --- a/test/config-file-loading.test.ts +++ b/test/config-file-loading.test.ts @@ -30,6 +30,7 @@ describe("config file loading", () => { sanitizeInputs: true, developerMessagesToUser: true, codexCompactionOverride: true, + shareableDebug: true, headerSnapshots: true, headerSnapshotBodies: true, headerTransformDebug: true, @@ -77,6 +78,7 @@ describe("config file loading", () => { expect(loaded.compatInputSanitizer).toBe(true) expect(loaded.remapDeveloperMessagesToUser).toBe(true) expect(loaded.codexCompactionOverride).toBe(true) + expect(loaded.shareableDebug).toBe(true) expect(loaded.headerSnapshots).toBe(true) expect(loaded.headerSnapshotBodies).toBe(true) expect(loaded.headerTransformDebug).toBe(true) diff --git a/test/config-loading-resolve.test.ts b/test/config-loading-resolve.test.ts index d48e66b..2bbeb52 100644 --- a/test/config-loading-resolve.test.ts +++ b/test/config-loading-resolve.test.ts @@ -9,6 +9,7 @@ import { getDebugEnabled, getHeaderSnapshotBodiesEnabled, getHeaderSnapshotsEnabled, + getShareableDebugEnabled, getHeaderTransformDebugEnabled, getMode, getOrchestratorSubagentsEnabled, @@ -194,6 +195,11 @@ describe("config loading", () => { expect(getHeaderSnapshotBodiesEnabled(cfg)).toBe(true) }) + it("enables shareable debug from env flags", () => { + const cfg = resolveConfig({ env: { OPENCODE_OPENAI_MULTI_SHAREABLE_DEBUG: "1" } }) + expect(getShareableDebugEnabled(cfg)).toBe(true) + }) + it("parses collaboration profile gate from env", () => { const enabled = resolveConfig({ env: { diff --git a/test/config-schema.test.ts b/test/config-schema.test.ts index fa0b0a0..e9d72ad 100644 --- a/test/config-schema.test.ts +++ b/test/config-schema.test.ts @@ -52,4 +52,16 @@ describe("codex config schema", () => { expect(schema.$defs?.customModel?.required).toContain("targetModel") expect(schema.$defs?.customModel?.properties).toHaveProperty("variants") }) + + it("declares runtime.shareableDebug", () => { + const schema = JSON.parse(readFileSync(schemaPath, "utf8")) as { + properties?: { + runtime?: { + properties?: Record + } + } + } + + expect(schema.properties?.runtime?.properties).toHaveProperty("shareableDebug") + }) }) diff --git a/test/config-validation.test.ts b/test/config-validation.test.ts index 0ee3514..ad7e329 100644 --- a/test/config-validation.test.ts +++ b/test/config-validation.test.ts @@ -11,7 +11,8 @@ describe("config validation", () => { it("returns actionable issues for invalid known fields", () => { const result = validateConfigFileObject({ runtime: { - promptCacheKeyStrategy: "bad" + promptCacheKeyStrategy: "bad", + shareableDebug: "yes" }, global: { serviceTier: "turbo" @@ -19,8 +20,13 @@ describe("config validation", () => { }) expect(result.valid).toBe(false) - expect(result.issues[0]).toContain("runtime.promptCacheKeyStrategy") - expect(result.issues[1]).toContain("global.serviceTier") + expect(result.issues).toEqual( + expect.arrayContaining([ + expect.stringContaining("runtime.promptCacheKeyStrategy"), + expect.stringContaining("runtime.shareableDebug"), + expect.stringContaining("global.serviceTier") + ]) + ) }) it("reports precise custom model validation issues", () => { diff --git a/test/openai-loader-fetch.prompt-cache-key.catalog-refresh.test.ts b/test/openai-loader-fetch.prompt-cache-key.catalog-refresh.test.ts index 52957ec..3c06d84 100644 --- a/test/openai-loader-fetch.prompt-cache-key.catalog-refresh.test.ts +++ b/test/openai-loader-fetch.prompt-cache-key.catalog-refresh.test.ts @@ -58,6 +58,83 @@ describe("openai loader fetch prompt cache key (catalog refresh)", () => { expect(acquireOpenAIAuth).not.toHaveBeenCalled() }) + it("returns a synthetic error when rewritten outbound URL validation throws a non-plugin error", async () => { + vi.resetModules() + + const acquireOpenAIAuth = vi.fn(async () => ({ + access: "access-token", + accountId: "acc_123", + identityKey: "acc_123|user@example.com|plus" + })) + vi.doMock("../lib/codex-native/acquire-auth", () => ({ + acquireOpenAIAuth + })) + + let validateCount = 0 + vi.doMock("../lib/codex-native/request-routing", async () => { + const actual = await vi.importActual( + "../lib/codex-native/request-routing" + ) + return { + ...actual, + rewriteUrl: vi.fn((request: Request) => request.url), + assertAllowedOutboundUrl: vi.fn((_url: URL) => { + validateCount += 1 + if (validateCount === 2) { + throw new Error("rewritten url invalid") + } + }) + } + }) + + const { createOpenAIFetchHandler } = await import("../lib/codex-native/openai-loader-fetch") + const { createFetchOrchestratorState } = await import("../lib/fetch-orchestrator") + const { createStickySessionState } = await import("../lib/rotation") + + const fetchSpy = vi.fn(async () => new Response("ok", { status: 200 })) + stubGlobalForTest("fetch", fetchSpy) + + const handler = createOpenAIFetchHandler({ + authMode: "native", + spoofMode: "native", + remapDeveloperMessagesToUserEnabled: false, + quietMode: true, + pidOffsetEnabled: false, + headerTransformDebug: false, + compatInputSanitizerEnabled: false, + internalCollaborationModeHeader: "x-opencode-collaboration-mode-kind", + requestSnapshots: { + captureRequest: async () => {}, + captureResponse: async () => {} + }, + sessionAffinityState: { + orchestratorState: createFetchOrchestratorState(), + stickySessionState: createStickySessionState(), + hybridSessionState: createStickySessionState(), + persistSessionAffinityState: () => {} + }, + getCatalogModels: () => undefined, + syncCatalogFromAuth: async () => undefined, + setCooldown: async () => {}, + showToast: async () => {} + }) + + const response = await handler("https://api.openai.com/v1/responses", { + method: "POST", + headers: { + "content-type": "application/json", + session_id: "ses_outbound_validation_1" + }, + body: JSON.stringify({ model: "gpt-5.3-codex", input: "hello" }) + }) + + const payload = (await response.json()) as { error?: { type?: string } } + expect(response.status).toBe(400) + expect(payload.error?.type).toBe("disallowed_outbound_request") + expect(fetchSpy).not.toHaveBeenCalled() + expect(acquireOpenAIAuth).not.toHaveBeenCalled() + }) + it("dedupes catalog refresh across concurrent requests for the same account scope", async () => { vi.resetModules() diff --git a/test/openai-loader-fetch.shareable-debug.test.ts b/test/openai-loader-fetch.shareable-debug.test.ts new file mode 100644 index 0000000..60fc421 --- /dev/null +++ b/test/openai-loader-fetch.shareable-debug.test.ts @@ -0,0 +1,485 @@ +import { afterEach, describe, expect, it, vi } from "vitest" + +import { resetStubbedGlobals, stubGlobalForTest } from "./helpers/mock-policy" + +afterEach(() => { + resetStubbedGlobals() +}) + +describe("openai loader shareable debug wiring", () => { + it("emits request, retry, and response events", async () => { + vi.resetModules() + + const auths = [ + { + access: "access_1", + identityKey: "acc_1|user1@example.com|plus", + accountId: "acc_1", + selectionTrace: { + strategy: "sticky", + selectedIdentityKey: "acc_1|user1@example.com|plus", + activeIdentityKey: "acc_1|user1@example.com|plus" + } + }, + { + access: "access_2", + identityKey: "acc_2|user2@example.com|pro", + accountId: "acc_2", + selectionTrace: { + strategy: "sticky", + selectedIdentityKey: "acc_2|user2@example.com|pro", + activeIdentityKey: "acc_2|user2@example.com|pro" + } + } + ] + let authIndex = 0 + const acquireOpenAIAuth = vi.fn(async () => auths[authIndex++]) + vi.doMock("../lib/codex-native/acquire-auth", () => ({ + acquireOpenAIAuth + })) + + const { createOpenAIFetchHandler } = await import("../lib/codex-native/openai-loader-fetch") + const { createFetchOrchestratorState } = await import("../lib/fetch-orchestrator") + const { createStickySessionState } = await import("../lib/rotation") + + const shareableDebug = { + enabled: true, + emitRotationBegin: vi.fn(async () => {}), + emitRotationDecision: vi.fn(async () => {}), + emitRotationCandidateSelected: vi.fn(async () => {}), + emitFetchAttemptRequest: vi.fn(async () => {}), + emitFetchAttemptResponse: vi.fn(async () => {}), + emitRetryAfter429: vi.fn(async () => {}), + emitAuthFailure: vi.fn(async () => {}), + emitSyntheticFatalError: vi.fn(async () => {}) + } + + let fetchCount = 0 + stubGlobalForTest( + "fetch", + vi.fn(async () => { + fetchCount += 1 + if (fetchCount === 1) { + return new Response("Rate limited", { status: 429, headers: { "Retry-After": "1" } }) + } + return new Response("ok", { status: 200 }) + }) + ) + + const handler = createOpenAIFetchHandler({ + authMode: "codex", + spoofMode: "codex", + remapDeveloperMessagesToUserEnabled: true, + promptCacheKeyStrategy: "default", + quietMode: true, + pidOffsetEnabled: false, + headerTransformDebug: false, + compatInputSanitizerEnabled: false, + internalCollaborationModeHeader: "x-opencode-collaboration-mode-kind", + requestSnapshots: { + captureRequest: vi.fn(async () => {}), + captureResponse: vi.fn(async () => {}) + }, + sessionAffinityState: { + orchestratorState: createFetchOrchestratorState(), + stickySessionState: createStickySessionState(), + hybridSessionState: createStickySessionState(), + persistSessionAffinityState: vi.fn(async () => {}) + }, + getCatalogModels: () => undefined, + syncCatalogFromAuth: vi.fn(async () => undefined), + setCooldown: vi.fn(async () => {}), + showToast: vi.fn(async () => {}), + shareableDebug + }) + + const response = await handler("https://api.openai.com/v1/responses", { + method: "POST", + headers: { + session_id: "ses_trace_1", + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "hi", + prompt_cache_key: "pck_trace_1" + }) + }) + + expect(response.status).toBe(200) + expect(shareableDebug.emitFetchAttemptRequest).toHaveBeenCalledTimes(2) + expect(shareableDebug.emitFetchAttemptResponse).toHaveBeenCalledTimes(2) + expect(shareableDebug.emitRetryAfter429).toHaveBeenCalledWith( + expect.objectContaining({ + authMode: "codex", + attempt: 2, + attemptReasonCode: "retry_switched_account_after_429", + selectedIdentityKey: "acc_2|user2@example.com|pro", + sessionKey: "ses_trace_1" + }) + ) + }) + + it("emits initial-attempt events without retry and falls back to selection metadata", async () => { + vi.resetModules() + + const acquireOpenAIAuth = vi.fn(async () => ({ + access: "access_1", + accountId: "acc_1", + selectionTrace: { + selectedIdentityKey: "acc_1|user1@example.com|plus", + activeIdentityKey: "acc_2|user2@example.com|pro" + } + })) + vi.doMock("../lib/codex-native/acquire-auth", () => ({ + acquireOpenAIAuth + })) + + const { createOpenAIFetchHandler } = await import("../lib/codex-native/openai-loader-fetch") + const { createFetchOrchestratorState } = await import("../lib/fetch-orchestrator") + const { createStickySessionState } = await import("../lib/rotation") + + const shareableDebug = { + enabled: true, + emitRotationBegin: vi.fn(async () => {}), + emitRotationDecision: vi.fn(async () => {}), + emitRotationCandidateSelected: vi.fn(async () => {}), + emitFetchAttemptRequest: vi.fn(async () => {}), + emitFetchAttemptResponse: vi.fn(async () => {}), + emitRetryAfter429: vi.fn(async () => {}), + emitAuthFailure: vi.fn(async () => {}), + emitSyntheticFatalError: vi.fn(async () => {}) + } + + stubGlobalForTest( + "fetch", + vi.fn(async () => { + const response = new Response("ok", { status: 200 }) + Object.defineProperty(response, "url", { + value: "https://chatgpt.com/backend-api/codex/responses" + }) + return response + }) + ) + + const handler = createOpenAIFetchHandler({ + authMode: "codex", + spoofMode: "codex", + remapDeveloperMessagesToUserEnabled: true, + promptCacheKeyStrategy: "default", + quietMode: true, + pidOffsetEnabled: false, + configuredRotationStrategy: "round_robin", + headerTransformDebug: false, + compatInputSanitizerEnabled: false, + internalCollaborationModeHeader: "x-opencode-collaboration-mode-kind", + requestSnapshots: { + captureRequest: vi.fn(async () => {}), + captureResponse: vi.fn(async () => {}) + }, + sessionAffinityState: { + orchestratorState: createFetchOrchestratorState(), + stickySessionState: createStickySessionState(), + hybridSessionState: createStickySessionState(), + persistSessionAffinityState: vi.fn(async () => {}) + }, + getCatalogModels: () => undefined, + syncCatalogFromAuth: vi.fn(async () => undefined), + setCooldown: vi.fn(async () => {}), + showToast: vi.fn(async () => {}), + shareableDebug + }) + + const response = await handler("https://api.openai.com/v1/responses", { + method: "POST", + headers: { + session_id: "ses_trace_2", + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "hi", + prompt_cache_key: "pck_trace_2" + }) + }) + + expect(response.status).toBe(200) + expect(shareableDebug.emitFetchAttemptRequest).toHaveBeenCalledWith( + expect.objectContaining({ + authMode: "codex", + attempt: 1, + attemptReasonCode: "initial_attempt", + rotationStrategy: "round_robin", + selectedIdentityKey: "acc_1|user1@example.com|plus", + activeIdentityKey: "acc_2|user2@example.com|pro", + sessionKey: "ses_trace_2" + }) + ) + expect(shareableDebug.emitFetchAttemptResponse).toHaveBeenCalledWith( + expect.objectContaining({ + authMode: "codex", + attempt: 1, + attemptReasonCode: "initial_attempt", + rotationStrategy: "round_robin", + selectedIdentityKey: "acc_1|user1@example.com|plus", + activeIdentityKey: "acc_2|user2@example.com|pro", + sessionKey: "ses_trace_2" + }) + ) + expect(shareableDebug.emitRetryAfter429).not.toHaveBeenCalled() + }) + + it("emits synthetic fatal events for plugin fatal responses", async () => { + vi.resetModules() + + const acquireOpenAIAuth = vi.fn(async () => ({ + access: "access_1", + accountId: "acc_1", + identityKey: "acc_1|user1@example.com|plus" + })) + vi.doMock("../lib/codex-native/acquire-auth", () => ({ + acquireOpenAIAuth + })) + + const { createOpenAIFetchHandler } = await import("../lib/codex-native/openai-loader-fetch") + const { createFetchOrchestratorState } = await import("../lib/fetch-orchestrator") + const { createStickySessionState } = await import("../lib/rotation") + + const shareableDebug = { + enabled: true, + emitRotationBegin: vi.fn(async () => {}), + emitRotationDecision: vi.fn(async () => {}), + emitRotationCandidateSelected: vi.fn(async () => {}), + emitFetchAttemptRequest: vi.fn(async () => {}), + emitFetchAttemptResponse: vi.fn(async () => {}), + emitRetryAfter429: vi.fn(async () => {}), + emitAuthFailure: vi.fn(async () => {}), + emitSyntheticFatalError: vi.fn(async () => {}) + } + + const handler = createOpenAIFetchHandler({ + authMode: "codex", + spoofMode: "codex", + remapDeveloperMessagesToUserEnabled: true, + promptCacheKeyStrategy: "default", + quietMode: true, + pidOffsetEnabled: false, + headerTransformDebug: false, + compatInputSanitizerEnabled: false, + internalCollaborationModeHeader: "x-opencode-collaboration-mode-kind", + requestSnapshots: { + captureRequest: vi.fn(async () => {}), + captureResponse: vi.fn(async () => {}) + }, + sessionAffinityState: { + orchestratorState: createFetchOrchestratorState(), + stickySessionState: createStickySessionState(), + hybridSessionState: createStickySessionState(), + persistSessionAffinityState: vi.fn(async () => {}) + }, + getCatalogModels: () => undefined, + syncCatalogFromAuth: vi.fn(async () => undefined), + setCooldown: vi.fn(async () => {}), + showToast: vi.fn(async () => {}), + shareableDebug + }) + + const response = await handler("not a valid outbound url") + expect(response.status).toBe(400) + expect(shareableDebug.emitSyntheticFatalError).toHaveBeenCalledWith( + expect.objectContaining({ + authMode: "codex", + outcome: "disallowed_outbound_request", + status: 400, + endpoint: "not a valid outbound url" + }) + ) + expect(acquireOpenAIAuth).not.toHaveBeenCalled() + }) + + it("emits synthetic fatal events when rewritten outbound validation fails", async () => { + vi.resetModules() + + const acquireOpenAIAuth = vi.fn(async () => ({ + access: "access_1", + accountId: "acc_1", + identityKey: "acc_1|user1@example.com|plus" + })) + vi.doMock("../lib/codex-native/acquire-auth", () => ({ + acquireOpenAIAuth + })) + + let validateCount = 0 + vi.doMock("../lib/codex-native/request-routing", async () => { + const actual = await vi.importActual( + "../lib/codex-native/request-routing" + ) + return { + ...actual, + rewriteUrl: vi.fn((request: Request) => request.url), + assertAllowedOutboundUrl: vi.fn((_url: URL) => { + validateCount += 1 + if (validateCount === 2) { + throw new Error("rewritten url invalid") + } + }) + } + }) + + const { createOpenAIFetchHandler } = await import("../lib/codex-native/openai-loader-fetch") + const { createFetchOrchestratorState } = await import("../lib/fetch-orchestrator") + const { createStickySessionState } = await import("../lib/rotation") + + const shareableDebug = { + enabled: true, + emitRotationBegin: vi.fn(async () => {}), + emitRotationDecision: vi.fn(async () => {}), + emitRotationCandidateSelected: vi.fn(async () => {}), + emitFetchAttemptRequest: vi.fn(async () => {}), + emitFetchAttemptResponse: vi.fn(async () => {}), + emitRetryAfter429: vi.fn(async () => {}), + emitAuthFailure: vi.fn(async () => {}), + emitSyntheticFatalError: vi.fn(async () => {}) + } + + const handler = createOpenAIFetchHandler({ + authMode: "codex", + spoofMode: "codex", + remapDeveloperMessagesToUserEnabled: true, + promptCacheKeyStrategy: "default", + quietMode: true, + pidOffsetEnabled: false, + headerTransformDebug: false, + compatInputSanitizerEnabled: false, + internalCollaborationModeHeader: "x-opencode-collaboration-mode-kind", + requestSnapshots: { + captureRequest: vi.fn(async () => {}), + captureResponse: vi.fn(async () => {}) + }, + sessionAffinityState: { + orchestratorState: createFetchOrchestratorState(), + stickySessionState: createStickySessionState(), + hybridSessionState: createStickySessionState(), + persistSessionAffinityState: vi.fn(async () => {}) + }, + getCatalogModels: () => undefined, + syncCatalogFromAuth: vi.fn(async () => undefined), + setCooldown: vi.fn(async () => {}), + showToast: vi.fn(async () => {}), + shareableDebug + }) + + const response = await handler("https://api.openai.com/v1/responses", { + method: "POST", + headers: { + session_id: "ses_trace_invalid_rewrite", + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "hi" + }) + }) + + expect(response.status).toBe(400) + expect(shareableDebug.emitSyntheticFatalError).toHaveBeenCalledWith( + expect.objectContaining({ + authMode: "codex", + outcome: "disallowed_outbound_request", + status: 400, + endpoint: "https://api.openai.com/v1/responses" + }) + ) + }) + + it("emits synthetic fatal events when upstream fetch fails unexpectedly", async () => { + vi.resetModules() + + const acquireOpenAIAuth = vi.fn(async () => ({ + access: "access_1", + accountId: "acc_1", + identityKey: "acc_1|user1@example.com|plus", + selectionTrace: { + selectedIdentityKey: "acc_1|user1@example.com|plus", + activeIdentityKey: "acc_1|user1@example.com|plus" + } + })) + vi.doMock("../lib/codex-native/acquire-auth", () => ({ + acquireOpenAIAuth + })) + + const { createOpenAIFetchHandler } = await import("../lib/codex-native/openai-loader-fetch") + const { createFetchOrchestratorState } = await import("../lib/fetch-orchestrator") + const { createStickySessionState } = await import("../lib/rotation") + + const shareableDebug = { + enabled: true, + emitRotationBegin: vi.fn(async () => {}), + emitRotationDecision: vi.fn(async () => {}), + emitRotationCandidateSelected: vi.fn(async () => {}), + emitFetchAttemptRequest: vi.fn(async () => {}), + emitFetchAttemptResponse: vi.fn(async () => {}), + emitRetryAfter429: vi.fn(async () => {}), + emitAuthFailure: vi.fn(async () => {}), + emitSyntheticFatalError: vi.fn(async () => {}) + } + + stubGlobalForTest( + "fetch", + vi.fn(async () => { + throw new Error("socket hang up") + }) + ) + + const handler = createOpenAIFetchHandler({ + authMode: "codex", + spoofMode: "codex", + remapDeveloperMessagesToUserEnabled: true, + promptCacheKeyStrategy: "default", + quietMode: true, + pidOffsetEnabled: false, + headerTransformDebug: false, + compatInputSanitizerEnabled: false, + internalCollaborationModeHeader: "x-opencode-collaboration-mode-kind", + requestSnapshots: { + captureRequest: vi.fn(async () => {}), + captureResponse: vi.fn(async () => {}) + }, + sessionAffinityState: { + orchestratorState: createFetchOrchestratorState(), + stickySessionState: createStickySessionState(), + hybridSessionState: createStickySessionState(), + persistSessionAffinityState: vi.fn(async () => {}) + }, + getCatalogModels: () => undefined, + syncCatalogFromAuth: vi.fn(async () => undefined), + setCooldown: vi.fn(async () => {}), + showToast: vi.fn(async () => {}), + shareableDebug + }) + + const response = await handler("https://api.openai.com/v1/responses", { + method: "POST", + headers: { + session_id: "ses_trace_fetch_failure", + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "hi" + }) + }) + + expect(response.status).toBe(502) + expect(shareableDebug.emitSyntheticFatalError).toHaveBeenCalledWith( + expect.objectContaining({ + authMode: "codex", + outcome: "plugin_fetch_failed", + status: 502, + endpoint: "https://api.openai.com/v1/responses", + selectedIdentityKey: "acc_1|user1@example.com|plus", + activeIdentityKey: "acc_1|user1@example.com|plus" + }) + ) + }) +}) diff --git a/test/shareable-debug.test.ts b/test/shareable-debug.test.ts new file mode 100644 index 0000000..3395f4a --- /dev/null +++ b/test/shareable-debug.test.ts @@ -0,0 +1,1398 @@ +import fs from "node:fs/promises" +import os from "node:os" +import path from "node:path" + +import { describe, expect, it } from "vitest" + +import { createShareableDebugLogger } from "../lib/shareable-debug" + +describe("shareable debug logger", () => { + it("writes shareable events with stable pseudonyms and without raw secrets", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-debug-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const logger = createShareableDebugLogger({ + enabled: true, + filePath, + registerProcessHandlers: false + }) + + const request = new Request("https://api.openai.com/v1/responses?access_token=at_secret", { + method: "POST", + headers: { + Authorization: "Bearer super-secret-token", + Cookie: "sid=session-cookie", + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: "pck_secret_123" + }) + }) + + await logger.emitRotationBegin({ + authMode: "codex", + rotationStrategy: "sticky", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_sensitive_1", + totalAccounts: 2, + enabledAccounts: 2 + }) + await logger.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_sensitive_1" + }) + await logger.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/v1/responses?access_token=at_secret", + status: 200, + selectedIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_sensitive_1" + }) + + const raw = await fs.readFile(filePath, "utf8") + const lines = raw + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + expect(lines).toHaveLength(3) + expect(lines[0]?.event).toBe("rotation_begin") + expect(lines[1]?.event).toBe("fetch_attempt_request") + expect(lines[2]?.event).toBe("fetch_attempt_response") + + expect(lines[0]?.activeIdentity).toMatch(/^ident_[0-9a-f]{8}$/) + expect(lines[0]?.session).toMatch(/^sess_[0-9a-f]{8}$/) + expect(lines[1]?.selectedIdentity).toBe(lines[0]?.activeIdentity) + expect(lines[1]?.activeIdentity).toBe(lines[0]?.activeIdentity) + expect(lines[1]?.session).toBe(lines[0]?.session) + expect(lines[1]?.promptCacheKey).toMatch(/^pck_[0-9a-f]{8}$/) + expect(lines[2]?.session).toBe(lines[0]?.session) + expect(lines[1]?.endpoint).toBe("/v1/responses") + expect(lines[2]?.endpoint).toBe("/v1/responses") + + expect(raw).not.toContain("user@example.com") + expect(raw).not.toContain("acc_1|user@example.com|pro") + expect(raw).not.toContain("ses_sensitive_1") + expect(raw).not.toContain("pck_secret_123") + expect(raw).not.toContain("super private prompt") + expect(raw).not.toContain("super-secret-token") + expect(raw).not.toContain("access_token=at_secret") + expect(raw).not.toContain("session-cookie") + }) + + it("captures a bounded incident file around a trigger response", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-incident-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const logger = createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 2, + postTriggerEventCount: 2, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + Authorization: "Bearer super-secret-token", + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + await logger.emitRotationBegin({ + authMode: "codex", + rotationStrategy: "sticky", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_sensitive_1", + totalAccounts: 2, + enabledAccounts: 2 + }) + await logger.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("pck_before_1"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_sensitive_1" + }) + await logger.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 429, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_sensitive_1" + }) + await logger.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + request: buildRequest("pck_after_1"), + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_sensitive_1" + }) + await logger.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_sensitive_1" + }) + + const incidentDir = path.join(stateDir, "incidents") + const incidents = await fs.readdir(incidentDir) + expect(incidents).toHaveLength(1) + + const incidentRaw = await fs.readFile(path.join(incidentDir, incidents[0] ?? ""), "utf8") + const incidentLines = incidentRaw + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + expect(incidentLines.map((line) => line.event)).toEqual([ + "rotation_begin", + "fetch_attempt_request", + "fetch_attempt_response", + "fetch_attempt_request", + "fetch_attempt_response", + "incident_closed" + ]) + expect(incidentRaw).not.toContain("user@example.com") + expect(incidentRaw).not.toContain("super private prompt") + expect(incidentRaw).not.toContain("super-secret-token") + expect(incidentRaw).not.toContain("pck_before_1") + const summaryRaw = await fs.readFile(filePath, "utf8") + expect(summaryRaw).not.toContain(root) + await expect(fs.access(path.join(stateDir, "incident-state.json"))).rejects.toThrow() + }) + + it("seals a live trigger capture incomplete when the retained prelude is already truncated", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-live-incomplete-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const segmentsDir = path.join(stateDir, "segments") + + const logger = createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + await logger.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_live_incomplete_1" + }) + + const [segmentFile] = await fs.readdir(segmentsDir) + await fs.writeFile(path.join(segmentsDir, segmentFile ?? ""), "", { mode: 0o600 }) + + await logger.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_live_incomplete_1" + }) + + const incidentsDir = path.join(stateDir, "incidents") + const [incidentFile] = await fs.readdir(incidentsDir) + const incidentRaw = await fs.readFile(path.join(incidentsDir, incidentFile ?? ""), "utf8") + expect(incidentRaw).toContain('"event":"incident_closed"') + expect(incidentRaw).toContain('"incomplete":true') + + const logger2 = createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + await logger2.emitRotationCandidateSelected({ + authMode: "codex", + selectedIdentityKey: "acc_2|user@example.com|team", + selectedIndex: 1, + selectedEnabled: true + }) + + const incidentFilesAfterRestart = await fs.readdir(incidentsDir) + expect(incidentFilesAfterRestart).toHaveLength(1) + }) + + it("recovers an open incident after restart and closes it when the post window completes", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-recover-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 2, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (attemptReasonCode: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: `pck_${attemptReasonCode}` + }) + }) + + const logger1 = createLogger() + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_sensitive_restart" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_sensitive_restart" + }) + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + request: buildRequest("after_one"), + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_sensitive_restart" + }) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_sensitive_restart" + }) + + const incidentDir = path.join(stateDir, "incidents") + const incidents = await fs.readdir(incidentDir) + expect(incidents).toHaveLength(1) + + const incidentRaw = await fs.readFile(path.join(incidentDir, incidents[0] ?? ""), "utf8") + expect(incidentRaw).toContain('"event":"incident_closed"') + + const summaryRaw = await fs.readFile(filePath, "utf8") + expect(summaryRaw).toContain('"event":"incident_recovered"') + expect(summaryRaw).not.toContain(root) + await expect(fs.access(path.join(stateDir, "incident-state.json"))).rejects.toThrow() + }) + + it("ignores a torn trailing segment line during restart recovery", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-torn-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const segmentDir = path.join(stateDir, "segments") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const logger1 = createLogger() + await logger1.emitRotationBegin({ + authMode: "codex", + rotationStrategy: "sticky", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_torn_1", + totalAccounts: 2, + enabledAccounts: 2 + }) + + const segmentFiles = await fs.readdir(segmentDir) + expect(segmentFiles).toHaveLength(1) + await fs.appendFile(path.join(segmentDir, segmentFiles[0] ?? ""), '{"seq":999,"event":"partial"') + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 429, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_torn_1" + }) + + const incidentDir = path.join(stateDir, "incidents") + const incidents = await fs.readdir(incidentDir) + expect(incidents).toHaveLength(1) + + const incidentRaw = await fs.readFile(path.join(incidentDir, incidents[0] ?? ""), "utf8") + expect(incidentRaw).toContain('"event":"rotation_begin"') + expect(incidentRaw).toContain('"event":"fetch_attempt_response"') + expect(incidentRaw).not.toContain('"event":"partial"') + }) + + it("resumes sequence numbering from the latest segment filename after a torn first line", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-seq-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const segmentDir = path.join(stateDir, "segments") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const logger1 = createLogger() + await logger1.emitRotationBegin({ + authMode: "codex", + rotationStrategy: "sticky", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_seq_1", + totalAccounts: 2, + enabledAccounts: 2 + }) + + await fs.writeFile(path.join(segmentDir, "segment-0000000000000002.jsonl"), '{"seq":2', { mode: 0o600 }) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 429, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_seq_1" + }) + + const summaryLines = (await fs.readFile(filePath, "utf8")) + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + expect(summaryLines.at(-1)?.seq).toBe(2) + + const latestSegmentRaw = await fs.readFile(path.join(segmentDir, "segment-0000000000000002.jsonl"), "utf8") + expect(latestSegmentRaw).toContain('"seq":2') + expect(latestSegmentRaw).not.toContain('{"seq":2{"seq":2') + }) + + it("reconciles a stale incident manifest against persisted post-trigger events on restart", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-manifest-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const manifestPath = path.join(stateDir, "incident-state.json") + const segmentsDir = path.join(stateDir, "segments") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 2, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + const logger1 = createLogger() + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_manifest_1" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_manifest_1" + }) + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + request: buildRequest("after_one"), + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_manifest_1" + }) + + const staleManifest = JSON.parse(await fs.readFile(manifestPath, "utf8")) as Record + staleManifest.postRemaining = 2 + await fs.writeFile(manifestPath, `${JSON.stringify(staleManifest, null, 2)}\n`, { mode: 0o600 }) + + const incidentsDir = path.join(stateDir, "incidents") + const [incidentFile] = await fs.readdir(incidentsDir) + const incidentPath = path.join(incidentsDir, incidentFile ?? "") + const [segmentFile] = await fs.readdir(segmentsDir) + const segmentPath = path.join(segmentsDir, segmentFile ?? "") + const segmentRows = (await fs.readFile(segmentPath, "utf8")) + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + await fs.writeFile( + segmentPath, + `${segmentRows + .filter((row) => typeof row.seq === "number" && row.seq >= 2) + .map((row) => JSON.stringify(row)) + .join("\n")}\n`, + { mode: 0o600 } + ) + await fs.appendFile(incidentPath, '{"seq":999,"event":"partial"') + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_manifest_1" + }) + + const incidentRaw = await fs.readFile(incidentPath, "utf8") + const incidentLines = incidentRaw + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + expect(incidentLines.map((line) => line.event)).toEqual([ + "fetch_attempt_request", + "fetch_attempt_response", + "fetch_attempt_request", + "fetch_attempt_response", + "incident_closed" + ]) + expect(incidentRaw).not.toContain('"event":"partial"') + expect(incidentRaw).not.toContain('{"seq":999') + + const summaryRaw = await fs.readFile(filePath, "utf8") + expect(summaryRaw).toContain('"event":"incident_recovered"') + await expect(fs.access(manifestPath)).rejects.toThrow() + }) + + it("rebuilds a missing incident prelude from segments when the incident file was left empty", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-empty-incident-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + const logger1 = createLogger() + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_empty_incident_1" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_empty_incident_1" + }) + + const incidentsDir = path.join(stateDir, "incidents") + const [incidentFile] = await fs.readdir(incidentsDir) + const incidentPath = path.join(incidentsDir, incidentFile ?? "") + await fs.writeFile(incidentPath, "", { mode: 0o600 }) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_empty_incident_1" + }) + + const incidentRaw = await fs.readFile(incidentPath, "utf8") + const incidentLines = incidentRaw + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + expect(incidentLines.map((line) => line.event)).toEqual([ + "fetch_attempt_request", + "fetch_attempt_response", + "fetch_attempt_response", + "incident_closed" + ]) + }) + + it("rebuilds a missing incident file from the segment buffer", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-missing-file-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + const logger1 = createLogger() + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_missing_file_1" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_missing_file_1" + }) + + const incidentsDir = path.join(stateDir, "incidents") + const [incidentFile] = await fs.readdir(incidentsDir) + const incidentPath = path.join(incidentsDir, incidentFile ?? "") + await fs.unlink(incidentPath) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_missing_file_1" + }) + + const rebuiltRaw = await fs.readFile(incidentPath, "utf8") + expect(rebuiltRaw).toContain('"event":"fetch_attempt_request"') + expect(rebuiltRaw).toContain('"event":"fetch_attempt_response"') + expect(rebuiltRaw).toContain('"event":"incident_closed"') + }) + + it("recovers post-trigger events from segments when the incident file missed them", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-post-recover-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const manifestPath = path.join(stateDir, "incident-state.json") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 2, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + const logger1 = createLogger() + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_post_recover_1" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_post_recover_1" + }) + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + request: buildRequest("after_one"), + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_post_recover_1" + }) + + const incidentsDir = path.join(stateDir, "incidents") + const [incidentFile] = await fs.readdir(incidentsDir) + const incidentPath = path.join(incidentsDir, incidentFile ?? "") + const incidentRows = (await fs.readFile(incidentPath, "utf8")) + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + await fs.writeFile( + incidentPath, + `${incidentRows + .filter((row) => typeof row.seq === "number" && row.seq <= 2) + .map((row) => JSON.stringify(row)) + .join("\n")}\n`, + { mode: 0o600 } + ) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_post_recover_1" + }) + + const recoveredIncidentRaw = await fs.readFile(incidentPath, "utf8") + const recoveredIncidentLines = recoveredIncidentRaw + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + expect(recoveredIncidentLines.map((line) => line.event)).toEqual([ + "fetch_attempt_request", + "fetch_attempt_response", + "fetch_attempt_request", + "fetch_attempt_response", + "incident_closed" + ]) + await expect(fs.access(manifestPath)).rejects.toThrow() + }) + + it("seals the incident incomplete when the pre-trigger window cannot be reconstructed", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-incomplete-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const segmentsDir = path.join(stateDir, "segments") + const manifestPath = path.join(stateDir, "incident-state.json") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + const logger1 = createLogger() + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_incomplete_1" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_incomplete_1" + }) + + const incidentsDir = path.join(stateDir, "incidents") + const [incidentFile] = await fs.readdir(incidentsDir) + const incidentPath = path.join(incidentsDir, incidentFile ?? "") + await fs.writeFile(incidentPath, "", { mode: 0o600 }) + + const [segmentFile] = await fs.readdir(segmentsDir) + await fs.writeFile(path.join(segmentsDir, segmentFile ?? ""), "", { mode: 0o600 }) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_incomplete_1" + }) + + const incidentRaw = await fs.readFile(incidentPath, "utf8") + expect(incidentRaw).toContain('"event":"incident_closed"') + expect(incidentRaw).toContain('"incomplete":true') + + const summaryRaw = await fs.readFile(filePath, "utf8") + expect(summaryRaw).toContain('"event":"incident_closed"') + expect(summaryRaw).toContain('"incomplete":true') + await expect(fs.access(manifestPath)).rejects.toThrow() + }) + + it("preserves existing incident evidence when sealing an unrecoverable capture", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-preserve-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const segmentsDir = path.join(stateDir, "segments") + + const createLogger = (registerProcessHandlers = false) => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers, + incidentConfig: { + preTriggerEventCount: 2, + postTriggerEventCount: 2, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + const logger1 = createLogger() + await logger1.emitRotationBegin({ + authMode: "codex", + rotationStrategy: "sticky", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_preserve_1", + totalAccounts: 2, + enabledAccounts: 2 + }) + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_preserve_1" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_preserve_1" + }) + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + request: buildRequest("after"), + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_preserve_1" + }) + + const incidentsDir = path.join(stateDir, "incidents") + const [incidentFile] = await fs.readdir(incidentsDir) + const incidentPath = path.join(incidentsDir, incidentFile ?? "") + const incidentRows = (await fs.readFile(incidentPath, "utf8")) + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + await fs.writeFile( + incidentPath, + `${incidentRows + .filter((row) => typeof row.seq === "number" && row.seq >= 2) + .map((row) => JSON.stringify(row)) + .join("\n")}\n`, + { mode: 0o600 } + ) + + const [segmentFile] = await fs.readdir(segmentsDir) + await fs.writeFile(path.join(segmentsDir, segmentFile ?? ""), "", { mode: 0o600 }) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_preserve_1" + }) + + const preservedRaw = await fs.readFile(incidentPath, "utf8") + expect(preservedRaw).toContain('"event":"fetch_attempt_request"') + expect(preservedRaw).toContain('"event":"fetch_attempt_response"') + expect(preservedRaw).toContain('"event":"incident_closed"') + expect(preservedRaw).toContain('"incomplete":true') + }) + + it("captures crash-path incidents with buffered context and leaves them recoverable", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-crash-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const manifestPath = path.join(stateDir, "incident-state.json") + + const createLogger = (registerProcessHandlers = false) => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const beforeExitListeners = new Set(process.listeners("beforeExit")) + const uncaughtListeners = new Set(process.listeners("uncaughtExceptionMonitor")) + const sigintListeners = new Set(process.listeners("SIGINT")) + const sigtermListeners = new Set(process.listeners("SIGTERM")) + const extraSignal = process.platform === "win32" ? "SIGBREAK" : "SIGHUP" + const extraSignalListeners = new Set(process.listeners(extraSignal)) + + const logger1 = createLogger(true) + await logger1.emitRotationBegin({ + authMode: "codex", + rotationStrategy: "sticky", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_crash_1", + totalAccounts: 2, + enabledAccounts: 2 + }) + + const addedBeforeExit = process.listeners("beforeExit").filter((listener) => !beforeExitListeners.has(listener)) + const addedUncaught = process + .listeners("uncaughtExceptionMonitor") + .filter((listener) => !uncaughtListeners.has(listener)) + const addedSigint = process.listeners("SIGINT").filter((listener) => !sigintListeners.has(listener)) + const addedSigterm = process.listeners("SIGTERM").filter((listener) => !sigtermListeners.has(listener)) + const addedExtraSignal = process.listeners(extraSignal).filter((listener) => !extraSignalListeners.has(listener)) + + try { + expect(addedUncaught).toHaveLength(1) + ;(addedUncaught[0] as (error: Error) => void)(new Error("boom")) + + const incidentsDir = path.join(stateDir, "incidents") + const [incidentFile] = await fs.readdir(incidentsDir) + const incidentPath = path.join(incidentsDir, incidentFile ?? "") + const crashRaw = await fs.readFile(incidentPath, "utf8") + expect(crashRaw).toContain('"event":"rotation_begin"') + expect(crashRaw).toContain('"event":"process_failure"') + expect(crashRaw).not.toContain('"event":"incident_closed"') + await fs.access(manifestPath) + + const logger2 = createLogger() + await logger2.emitRotationCandidateSelected({ + authMode: "codex", + selectedIdentityKey: "acc_2|user@example.com|team", + selectedIndex: 1, + selectedEnabled: true + }) + + const recoveredRaw = await fs.readFile(incidentPath, "utf8") + expect(recoveredRaw).toContain('"event":"incident_closed"') + await expect(fs.access(manifestPath)).rejects.toThrow() + } finally { + for (const listener of addedBeforeExit) process.removeListener("beforeExit", listener) + for (const listener of addedUncaught) process.removeListener("uncaughtExceptionMonitor", listener) + for (const listener of addedSigint) process.removeListener("SIGINT", listener) + for (const listener of addedSigterm) process.removeListener("SIGTERM", listener) + for (const listener of addedExtraSignal) process.removeListener(extraSignal, listener) + } + }) + + it("recovers a trigger from segments when the manifest was never written", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-manifestless-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const manifestPath = path.join(stateDir, "incident-state.json") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + const logger1 = createLogger() + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_manifestless_1" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_manifestless_1" + }) + + const incidentsDir = path.join(stateDir, "incidents") + for (const name of await fs.readdir(incidentsDir)) { + await fs.unlink(path.join(incidentsDir, name)) + } + await fs.unlink(manifestPath) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_manifestless_1" + }) + + const recoveredFiles = await fs.readdir(incidentsDir) + expect(recoveredFiles).toHaveLength(1) + const recoveredRaw = await fs.readFile(path.join(incidentsDir, recoveredFiles[0] ?? ""), "utf8") + expect(recoveredRaw).toContain('"event":"fetch_attempt_response"') + expect(recoveredRaw).toContain('"event":"incident_closed"') + }) + + it("seals manifestless recovery incomplete when the retained prelude is truncated", async () => { + const root = await fs.mkdtemp(path.join(os.tmpdir(), "opencode-shareable-manifestless-incomplete-")) + const filePath = path.join(root, "shareable-debug.jsonl") + const stateDir = path.join(root, "shareable-debug-state") + const manifestPath = path.join(stateDir, "incident-state.json") + const segmentsDir = path.join(stateDir, "segments") + + const createLogger = () => + createShareableDebugLogger({ + enabled: true, + filePath, + stateDir, + registerProcessHandlers: false, + incidentConfig: { + preTriggerEventCount: 1, + postTriggerEventCount: 1, + segmentMaxBytes: 220, + rollingBufferMaxBytes: 8_192, + maxIncidentFiles: 4, + maxIncidentBytes: 8_192 + } + }) + + const buildRequest = (promptCacheKey: string) => + new Request("https://api.openai.com/backend-api/codex/responses", { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + model: "gpt-5.4", + input: "super private prompt", + prompt_cache_key: promptCacheKey + }) + }) + + const logger1 = createLogger() + await logger1.emitFetchAttemptRequest({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + request: buildRequest("before"), + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_manifestless_incomplete_1" + }) + await logger1.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 1, + maxAttempts: 3, + attemptReasonCode: "initial_attempt", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 403, + selectedIdentityKey: "acc_1|user@example.com|pro", + activeIdentityKey: "acc_1|user@example.com|pro", + sessionKey: "ses_manifestless_incomplete_1" + }) + + const incidentsDir = path.join(stateDir, "incidents") + for (const name of await fs.readdir(incidentsDir)) { + await fs.unlink(path.join(incidentsDir, name)) + } + await fs.unlink(manifestPath) + + const [segmentFile] = await fs.readdir(segmentsDir) + const segmentPath = path.join(segmentsDir, segmentFile ?? "") + const segmentRows = (await fs.readFile(segmentPath, "utf8")) + .trim() + .split("\n") + .map((line) => JSON.parse(line) as Record) + + await fs.writeFile( + segmentPath, + `${segmentRows + .filter((row) => typeof row.seq === "number" && row.seq >= 2) + .map((row) => JSON.stringify(row)) + .join("\n")}\n`, + { mode: 0o600 } + ) + + const logger2 = createLogger() + await logger2.emitFetchAttemptResponse({ + authMode: "codex", + attempt: 2, + maxAttempts: 3, + attemptReasonCode: "retry_switched_account_after_429", + endpoint: "https://api.openai.com/backend-api/codex/responses", + status: 200, + selectedIdentityKey: "acc_2|user@example.com|team", + activeIdentityKey: "acc_2|user@example.com|team", + sessionKey: "ses_manifestless_incomplete_1" + }) + + const recoveredFiles = await fs.readdir(incidentsDir) + expect(recoveredFiles).toHaveLength(1) + const recoveredRaw = await fs.readFile(path.join(incidentsDir, recoveredFiles[0] ?? ""), "utf8") + expect(recoveredRaw).toContain('"event":"incident_closed"') + expect(recoveredRaw).toContain('"incomplete":true') + + const logger3 = createLogger() + await logger3.emitRotationCandidateSelected({ + authMode: "codex", + selectedIdentityKey: "acc_2|user@example.com|team", + selectedIndex: 1, + selectedEnabled: true + }) + + const recoveredFilesAfterRestart = await fs.readdir(incidentsDir) + expect(recoveredFilesAfterRestart).toHaveLength(1) + }) +})