diff --git a/index.ts b/index.ts index 7db8808..5f6a487 100644 --- a/index.ts +++ b/index.ts @@ -422,6 +422,10 @@ export const OpenAIOAuthPlugin: Plugin = async ({ client }: PluginInput) => { }; }; + const resolveOAuthFetchTimeoutMs = (): number => { + return getFetchTimeoutMs(loadPluginConfig()); + }; + const buildManualOAuthFlow = ( pkce: { verifier: string }, url: string, @@ -460,10 +464,12 @@ export const OpenAIOAuthPlugin: Plugin = async ({ client }: PluginInput) => { message: "OAuth state mismatch. Restart login and try again.", }; } + const oauthFetchTimeoutMs = resolveOAuthFetchTimeoutMs(); const tokens = await exchangeAuthorizationCode( parsed.code, pkce.verifier, REDIRECT_URI, + { timeoutMs: oauthFetchTimeoutMs }, ); if (tokens?.type === "success") { const resolved = resolveAccountSelection(tokens); @@ -509,10 +515,12 @@ export const OpenAIOAuthPlugin: Plugin = async ({ client }: PluginInput) => { return { type: "failed" as const, reason: "unknown" as const, message: "OAuth callback timeout or cancelled" }; } + const oauthFetchTimeoutMs = resolveOAuthFetchTimeoutMs(); return await exchangeAuthorizationCode( result.code, pkce.verifier, REDIRECT_URI, + { timeoutMs: oauthFetchTimeoutMs }, ); }; diff --git a/lib/auth/auth.ts b/lib/auth/auth.ts index 591a68e..2de1090 100644 --- a/lib/auth/auth.ts +++ b/lib/auth/auth.ts @@ -11,6 +11,7 @@ export const AUTHORIZE_URL = "https://auth.openai.com/oauth/authorize"; export const TOKEN_URL = "https://auth.openai.com/oauth/token"; export const REDIRECT_URI = "http://localhost:1455/auth/callback"; export const SCOPE = "openid profile email offline_access"; +const DEFAULT_OAUTH_EXCHANGE_TIMEOUT_MS = 60_000; const OAUTH_SENSITIVE_QUERY_PARAMS = [ "state", @@ -111,50 +112,134 @@ export function parseAuthorizationInput(input: string): ParsedAuthInput { * @param redirectUri - OAuth redirect URI * @returns Token result */ +export type ExchangeAuthorizationCodeOptions = { + signal?: AbortSignal; + timeoutMs?: number; +}; + +function resolveExchangeTimeoutMs(timeoutMs: number | undefined): number { + if (typeof timeoutMs !== "number" || !Number.isFinite(timeoutMs)) { + return DEFAULT_OAUTH_EXCHANGE_TIMEOUT_MS; + } + return Math.max(1_000, Math.floor(timeoutMs)); +} + +function createAbortError(message: string): Error & { code?: string } { + const error = new Error(message) as Error & { code?: string }; + error.name = "AbortError"; + error.code = "ABORT_ERR"; + return error; +} + +function buildExchangeAbortContext( + options: ExchangeAuthorizationCodeOptions, +): { signal: AbortSignal; cleanup: () => void } { + const controller = new AbortController(); + const timeoutMs = resolveExchangeTimeoutMs(options.timeoutMs); + const upstreamSignal = options.signal; + let timeoutId: ReturnType | undefined; + + const onUpstreamAbort = () => { + const reason = upstreamSignal?.reason; + controller.abort( + reason instanceof Error ? reason : createAbortError("Request aborted"), + ); + }; + + if (upstreamSignal) { + upstreamSignal.addEventListener("abort", onUpstreamAbort, { once: true }); + if (upstreamSignal.aborted) { + onUpstreamAbort(); + } + } + + if (!controller.signal.aborted) { + timeoutId = setTimeout(() => { + controller.abort( + createAbortError( + `OAuth token exchange timed out after ${timeoutMs}ms`, + ), + ); + }, timeoutMs); + } + + return { + signal: controller.signal, + cleanup: () => { + if (timeoutId !== undefined) { + clearTimeout(timeoutId); + } + if (upstreamSignal) { + upstreamSignal.removeEventListener("abort", onUpstreamAbort); + } + }, + }; +} + export async function exchangeAuthorizationCode( code: string, verifier: string, redirectUri: string = REDIRECT_URI, + options: ExchangeAuthorizationCodeOptions = {}, ): Promise { - const res = await fetch(TOKEN_URL, { - method: "POST", - headers: { "Content-Type": "application/x-www-form-urlencoded" }, - body: new URLSearchParams({ - grant_type: "authorization_code", - client_id: CLIENT_ID, - code, - code_verifier: verifier, - redirect_uri: redirectUri, - }), - }); - if (!res.ok) { - const text = await res.text().catch(() => ""); - logError(`code->token failed: ${res.status} ${text}`); - return { type: "failed", reason: "http_error", statusCode: res.status, message: text || undefined }; - } - const rawJson = (await res.json()) as unknown; - const json = safeParseOAuthTokenResponse(rawJson); - if (!json) { - logError("token response validation failed", getOAuthResponseLogMetadata(rawJson)); - return { type: "failed", reason: "invalid_response", message: "Response failed schema validation" }; - } - if (!json.refresh_token || json.refresh_token.trim().length === 0) { - logError("token response missing refresh token", getOAuthResponseLogMetadata(rawJson)); + const abortContext = buildExchangeAbortContext(options); + try { + const res = await fetch(TOKEN_URL, { + method: "POST", + headers: { "Content-Type": "application/x-www-form-urlencoded" }, + signal: abortContext.signal, + body: new URLSearchParams({ + grant_type: "authorization_code", + client_id: CLIENT_ID, + code, + code_verifier: verifier, + redirect_uri: redirectUri, + }), + }); + if (!res.ok) { + const text = await res.text().catch(() => ""); + logError("code->token failed", { status: res.status, bodyLength: text.length }); + return { + type: "failed", + reason: "http_error", + statusCode: res.status, + message: text ? "OAuth token exchange failed" : undefined, + }; + } + const rawJson = (await res.json()) as unknown; + const json = safeParseOAuthTokenResponse(rawJson); + if (!json) { + logError("token response validation failed", getOAuthResponseLogMetadata(rawJson)); + return { type: "failed", reason: "invalid_response", message: "Response failed schema validation" }; + } + if (!json.refresh_token || json.refresh_token.trim().length === 0) { + logError("token response missing refresh token", getOAuthResponseLogMetadata(rawJson)); + return { + type: "failed", + reason: "invalid_response", + message: "Missing refresh token in authorization code exchange response", + }; + } + const normalizedRefreshToken = json.refresh_token.trim(); return { - type: "failed", - reason: "invalid_response", - message: "Missing refresh token in authorization code exchange response", + type: "success", + access: json.access_token, + refresh: normalizedRefreshToken, + expires: Date.now() + json.expires_in * 1000, + idToken: json.id_token, + multiAccount: true, }; + } catch (error) { + const err = error as Error; + if (abortContext.signal.aborted || isAbortError(err)) { + logError("code->token aborted", { message: err?.message ?? "Request aborted" }); + return { type: "failed", reason: "unknown", message: err?.message ?? "Request aborted" }; + } + logError("code->token error", { message: err?.message ?? String(err) }); + return { type: "failed", reason: "network_error", message: err?.message ?? "Network request failed" }; + } finally { + abortContext.cleanup(); } - const normalizedRefreshToken = json.refresh_token.trim(); - return { - type: "success", - access: json.access_token, - refresh: normalizedRefreshToken, - expires: Date.now() + json.expires_in * 1000, - idToken: json.id_token, - multiAccount: true, - }; } /** @@ -206,8 +291,13 @@ export async function refreshAccessToken( if (!response.ok) { const text = await response.text().catch(() => ""); - logError(`Token refresh failed: ${response.status} ${text}`); - return { type: "failed", reason: "http_error", statusCode: response.status, message: text || undefined }; + logError("Token refresh failed", { status: response.status, bodyLength: text.length }); + return { + type: "failed", + reason: "http_error", + statusCode: response.status, + message: text ? "Token refresh failed" : undefined, + }; } const rawJson = (await response.json()) as unknown; diff --git a/lib/auto-update-checker.ts b/lib/auto-update-checker.ts index a948c60..a3c5c27 100644 --- a/lib/auto-update-checker.ts +++ b/lib/auto-update-checker.ts @@ -1,6 +1,7 @@ import { readFileSync, writeFileSync, existsSync, mkdirSync } from "node:fs"; import { join } from "node:path"; import { createLogger } from "./logger.js"; +import { fetchWithTimeoutAndRetry } from "./network.js"; import { getCodexCacheDir } from "./runtime-paths.js"; const log = createLogger("update-checker"); @@ -10,6 +11,9 @@ const NPM_REGISTRY_URL = `https://registry.npmjs.org/${PACKAGE_NAME}/latest`; const CACHE_DIR = getCodexCacheDir(); const CACHE_FILE = join(CACHE_DIR, "update-check-cache.json"); const CHECK_INTERVAL_MS = 24 * 60 * 60 * 1000; +const UPDATE_FETCH_TIMEOUT_MS = 5_000; +const UPDATE_FETCH_RETRIES = 2; +const UPDATE_FETCH_RETRYABLE_STATUSES = [429, 500, 502, 503, 504] as const; interface UpdateCheckCache { lastCheck: number; @@ -164,15 +168,29 @@ function compareVersions(current: string, latest: string): number { async function fetchLatestVersion(): Promise { try { - const controller = new AbortController(); - const timeout = setTimeout(() => controller.abort(), 5000); - - const response = await fetch(NPM_REGISTRY_URL, { - signal: controller.signal, - headers: { Accept: "application/json" }, - }); - - clearTimeout(timeout); + const { response, attempts, durationMs } = await fetchWithTimeoutAndRetry( + NPM_REGISTRY_URL, + { + headers: { Accept: "application/json" }, + }, + { + timeoutMs: UPDATE_FETCH_TIMEOUT_MS, + retries: UPDATE_FETCH_RETRIES, + retryOnStatuses: UPDATE_FETCH_RETRYABLE_STATUSES, + baseDelayMs: 100, + maxDelayMs: 1_000, + jitterMs: 100, + onRetry: (retry) => { + log.debug("Retrying npm update check", retry); + }, + }, + ); + if (attempts > 1) { + log.debug("Recovered npm update check after retries", { + attempts, + durationMs, + }); + } if (!response.ok) { log.debug("Failed to fetch npm registry", { status: response.status }); diff --git a/lib/codex-manager.ts b/lib/codex-manager.ts index 794eb7c..ee4c22c 100644 --- a/lib/codex-manager.ts +++ b/lib/codex-manager.ts @@ -23,6 +23,7 @@ import { selectBestAccountCandidate, } from "./accounts.js"; import { ACCOUNT_LIMITS } from "./constants.js"; +import { getFetchTimeoutMs, loadPluginConfig } from "./config.js"; import { loadDashboardDisplaySettings, DEFAULT_DASHBOARD_DISPLAY_SETTINGS, @@ -1251,7 +1252,14 @@ async function runOAuthFlow(forceNewLogin: boolean): Promise { message: UI_COPY.oauth.cancelled, }; } - return exchangeAuthorizationCode(code, pkce.verifier, REDIRECT_URI); + const authPluginConfig = loadPluginConfig(); + const oauthFetchTimeoutMs = getFetchTimeoutMs(authPluginConfig); + return exchangeAuthorizationCode( + code, + pkce.verifier, + REDIRECT_URI, + { timeoutMs: oauthFetchTimeoutMs }, + ); } async function persistAccountPool( diff --git a/lib/config.ts b/lib/config.ts index f9e7ecf..aff896e 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -34,6 +34,10 @@ const UNSUPPORTED_CODEX_POLICIES = new Set(["strict", "fallback"]); const emittedConfigWarnings = new Set(); const configSaveQueues = new Map>(); const RETRYABLE_FS_CODES = new Set(["EBUSY", "EPERM"]); +const CONFIG_READ_MAX_ATTEMPTS = 4; +const CONFIG_READ_RETRY_BASE_DELAY_MS = 10; +const pluginConfigShape = PluginConfigSchema.shape; +type PluginConfigShapeKey = keyof typeof pluginConfigShape; export type UnsupportedCodexPolicy = "strict" | "fallback"; @@ -193,7 +197,7 @@ export function loadPluginConfig(): PluginConfig { return { ...DEFAULT_PLUGIN_CONFIG }; } - const fileContent = readFileSync(configPath, "utf-8"); + const fileContent = readFileSyncWithRetry(configPath, "utf-8"); const normalizedFileContent = stripUtf8Bom(fileContent); userConfig = JSON.parse(normalizedFileContent) as unknown; sourceKind = "file"; @@ -222,6 +226,7 @@ export function loadPluginConfig(): PluginConfig { `Plugin config validation warnings: ${schemaErrors.slice(0, 3).join(", ")}`, ); } + const sanitizedConfig = sanitizePluginConfigRecord(userConfig); if ( sourceKind === "file" && @@ -235,7 +240,7 @@ export function loadPluginConfig(): PluginConfig { return { ...DEFAULT_PLUGIN_CONFIG, - ...(userConfig as Partial), + ...sanitizedConfig, }; } catch (error) { const configPath = resolvePluginConfigPath() ?? CONFIG_PATH; @@ -268,11 +273,46 @@ function isRecord(value: unknown): value is Record { return value !== null && typeof value === "object" && !Array.isArray(value); } +function isPluginConfigShapeKey(key: string): key is PluginConfigShapeKey { + return Object.hasOwn(pluginConfigShape, key); +} + +function sanitizePluginConfigRecord(userConfig: unknown): Partial { + if (!isRecord(userConfig)) return {}; + const sanitized: Record = {}; + for (const [key, value] of Object.entries(userConfig)) { + if (!isPluginConfigShapeKey(key)) continue; + const parsed = pluginConfigShape[key].safeParse(value); + if (parsed.success && parsed.data !== undefined) { + sanitized[key] = parsed.data; + } + } + return sanitized as Partial; +} + function isRetryableFsError(error: unknown): boolean { const code = (error as NodeJS.ErrnoException | undefined)?.code; return typeof code === "string" && RETRYABLE_FS_CODES.has(code); } +function sleepSync(ms: number): void { + Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms); +} + +function readFileSyncWithRetry(path: string, encoding: BufferEncoding): string { + for (let attempt = 0; attempt < CONFIG_READ_MAX_ATTEMPTS; attempt += 1) { + try { + return readFileSync(path, encoding); + } catch (error) { + if (!isRetryableFsError(error) || attempt >= CONFIG_READ_MAX_ATTEMPTS - 1) { + throw error; + } + sleepSync(CONFIG_READ_RETRY_BASE_DELAY_MS * 2 ** attempt); + } + } + throw new Error(`Failed to read config file after ${CONFIG_READ_MAX_ATTEMPTS} attempts`); +} + function sleep(ms: number): Promise { return new Promise((resolve) => setTimeout(resolve, ms)); } @@ -333,7 +373,7 @@ async function withConfigSaveLock(path: string, task: () => Promise): Prom function readConfigRecordFromPath(configPath: string): Record | null { if (!existsSync(configPath)) return null; try { - const fileContent = readFileSync(configPath, "utf-8"); + const fileContent = readFileSyncWithRetry(configPath, "utf-8"); const normalizedFileContent = stripUtf8Bom(fileContent); const parsed = JSON.parse(normalizedFileContent) as unknown; return isRecord(parsed) ? parsed : null; diff --git a/lib/network.ts b/lib/network.ts new file mode 100644 index 0000000..6896be3 --- /dev/null +++ b/lib/network.ts @@ -0,0 +1,194 @@ +import { isAbortError, sleep } from "./utils.js"; + +export interface RetryAttemptInfo { + attempt: number; + maxAttempts: number; + delayMs: number; + reason: "error" | "status" | "timeout"; + status?: number; + errorType?: string; +} + +export interface ResilientFetchOptions { + timeoutMs: number; + retries?: number; + baseDelayMs?: number; + maxDelayMs?: number; + jitterMs?: number; + retryOnStatuses?: readonly number[]; + signal?: AbortSignal; + onRetry?: (info: RetryAttemptInfo) => void; +} + +export interface ResilientFetchResult { + response: Response; + attempts: number; + durationMs: number; +} + +const DEFAULT_BASE_DELAY_MS = 250; +const DEFAULT_MAX_DELAY_MS = 5_000; +const DEFAULT_JITTER_MS = 100; + +function createAbortError(message: string): Error { + const error = new Error(message); + error.name = "AbortError"; + return error; +} + +function isCallerAbort(error: unknown, callerSignal: AbortSignal | undefined): boolean { + if (!callerSignal?.aborted) return false; + if (isAbortError(error)) return true; + if (callerSignal.reason !== undefined) { + return error === callerSignal.reason; + } + return false; +} + +function getRetryErrorType(error: unknown): string { + if (isAbortError(error)) return "AbortError"; + if (error instanceof Error && error.name) return error.name; + return typeof error; +} + +function getAbortReason(signal: AbortSignal): Error { + if (signal.reason instanceof Error) { + return signal.reason; + } + return createAbortError("Request aborted by caller"); +} + +function computeDelayMs( + attempt: number, + baseDelayMs: number, + maxDelayMs: number, + jitterMs: number, +): number { + const cappedBase = Math.max(0, Math.floor(baseDelayMs)); + const cappedMax = Math.max(0, Math.floor(maxDelayMs)); + const cappedJitter = Math.max(0, Math.floor(jitterMs)); + const exponential = Math.min(cappedMax, cappedBase * 2 ** Math.max(0, attempt - 1)); + const jitter = cappedJitter > 0 ? Math.floor(Math.random() * (cappedJitter + 1)) : 0; + return exponential + jitter; +} + +function shouldRetryStatus(status: number, retryOnStatuses: ReadonlySet): boolean { + return retryOnStatuses.has(status); +} + +function bindCallerAbortSignal( + callerSignal: AbortSignal | undefined, + controller: AbortController, +): (() => void) | null { + if (!callerSignal) return null; + if (callerSignal.aborted) { + controller.abort(callerSignal.reason); + return null; + } + const onAbort = () => controller.abort(callerSignal.reason); + callerSignal.addEventListener("abort", onAbort, { once: true }); + return () => callerSignal.removeEventListener("abort", onAbort); +} + +async function sleepWithAbort(delayMs: number, signal?: AbortSignal): Promise { + const normalizedDelayMs = Math.max(0, Math.floor(delayMs)); + if (normalizedDelayMs === 0) return; + if (!signal) { + await sleep(normalizedDelayMs); + return; + } + if (signal.aborted) { + throw getAbortReason(signal); + } + await new Promise((resolve, reject) => { + const timer = setTimeout(() => { + signal.removeEventListener("abort", onAbort); + resolve(); + }, normalizedDelayMs); + const onAbort = () => { + clearTimeout(timer); + signal.removeEventListener("abort", onAbort); + reject(getAbortReason(signal)); + }; + signal.addEventListener("abort", onAbort, { once: true }); + if (signal.aborted) { + onAbort(); + } + }); +} + +/** + * Execute a fetch request with a per-attempt timeout and bounded retry/backoff. + * Caller-provided abort signals are always honored and never retried. + */ +export async function fetchWithTimeoutAndRetry( + input: URL | string | Request, + init: RequestInit = {}, + options: ResilientFetchOptions, +): Promise { + const timeoutMs = Math.max(1_000, Math.floor(options.timeoutMs)); + const maxAttempts = Math.max(1, Math.floor((options.retries ?? 0) + 1)); + const baseDelayMs = options.baseDelayMs ?? DEFAULT_BASE_DELAY_MS; + const maxDelayMs = options.maxDelayMs ?? DEFAULT_MAX_DELAY_MS; + const jitterMs = options.jitterMs ?? DEFAULT_JITTER_MS; + const retryOnStatuses = new Set(options.retryOnStatuses ?? []); + const startedAt = Date.now(); + let lastError: unknown = null; + + for (let attempt = 1; attempt <= maxAttempts; attempt += 1) { + const controller = new AbortController(); + const removeAbortListener = bindCallerAbortSignal(options.signal, controller); + const timeout = setTimeout(() => { + controller.abort(createAbortError(`Request timed out after ${timeoutMs}ms`)); + }, timeoutMs); + + try { + const response = await fetch(input, { ...init, signal: controller.signal }); + if (attempt < maxAttempts && shouldRetryStatus(response.status, retryOnStatuses)) { + const delayMs = computeDelayMs(attempt, baseDelayMs, maxDelayMs, jitterMs); + options.onRetry?.({ + attempt, + maxAttempts, + reason: "status", + status: response.status, + delayMs, + }); + await response.body?.cancel().catch(() => {}); + await sleepWithAbort(delayMs, options.signal); + continue; + } + return { + response, + attempts: attempt, + durationMs: Date.now() - startedAt, + }; + } catch (error) { + lastError = error; + if (isCallerAbort(error, options.signal)) { + throw error; + } + if (attempt >= maxAttempts) { + throw error; + } + const delayMs = computeDelayMs(attempt, baseDelayMs, maxDelayMs, jitterMs); + const retryReason: RetryAttemptInfo["reason"] = isAbortError(error) + ? "timeout" + : "error"; + options.onRetry?.({ + attempt, + maxAttempts, + reason: retryReason, + errorType: getRetryErrorType(error), + delayMs, + }); + await sleepWithAbort(delayMs, options.signal); + } finally { + clearTimeout(timeout); + removeAbortListener?.(); + } + } + + throw (lastError instanceof Error + ? lastError + : new Error("Request failed after all retry attempts")); +} diff --git a/lib/prompts/codex.ts b/lib/prompts/codex.ts index 434d0ad..237ba31 100644 --- a/lib/prompts/codex.ts +++ b/lib/prompts/codex.ts @@ -3,6 +3,7 @@ import { dirname, join } from "node:path"; import { fileURLToPath } from "node:url"; import type { CacheMetadata, GitHubRelease } from "../types.js"; import { logWarn, logError, logDebug } from "../logger.js"; +import { fetchWithTimeoutAndRetry } from "../network.js"; import { getCodexCacheDir } from "../runtime-paths.js"; const GITHUB_API_RELEASES = @@ -11,6 +12,12 @@ const GITHUB_HTML_RELEASES = "https://github.com/openai/codex/releases/latest"; const CACHE_DIR = getCodexCacheDir(); const CACHE_TTL_MS = 15 * 60 * 1000; +const GITHUB_FETCH_TIMEOUT_MS = 8_000; +const GITHUB_FETCH_RETRIES = 1; +const GITHUB_RETRYABLE_STATUSES = [429] as const; +const GITHUB_FETCH_RETRY_BASE_DELAY_MS = 20; +const GITHUB_FETCH_RETRY_MAX_DELAY_MS = 200; +const GITHUB_FETCH_RETRY_JITTER_MS = 10; const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); @@ -142,7 +149,27 @@ async function getLatestReleaseTag(): Promise { } try { - const response = await fetch(GITHUB_API_RELEASES); + const { response, attempts, durationMs } = await fetchWithTimeoutAndRetry( + GITHUB_API_RELEASES, + undefined, + { + timeoutMs: GITHUB_FETCH_TIMEOUT_MS, + retries: GITHUB_FETCH_RETRIES, + retryOnStatuses: GITHUB_RETRYABLE_STATUSES, + baseDelayMs: GITHUB_FETCH_RETRY_BASE_DELAY_MS, + maxDelayMs: GITHUB_FETCH_RETRY_MAX_DELAY_MS, + jitterMs: GITHUB_FETCH_RETRY_JITTER_MS, + onRetry: (retry) => { + logDebug("Retrying GitHub release API fetch", retry); + }, + }, + ); + if (attempts > 1) { + logDebug("Recovered GitHub release API fetch after retries", { + attempts, + durationMs, + }); + } if (response.ok) { const data = (await response.json()) as GitHubRelease; if (data.tag_name) { @@ -157,7 +184,24 @@ async function getLatestReleaseTag(): Promise { // Fall through to HTML fallback } - const htmlResponse = await fetch(GITHUB_HTML_RELEASES); + const { response: htmlResponse, attempts: htmlAttempts, durationMs: htmlDurationMs } = + await fetchWithTimeoutAndRetry(GITHUB_HTML_RELEASES, undefined, { + timeoutMs: GITHUB_FETCH_TIMEOUT_MS, + retries: GITHUB_FETCH_RETRIES, + retryOnStatuses: GITHUB_RETRYABLE_STATUSES, + baseDelayMs: GITHUB_FETCH_RETRY_BASE_DELAY_MS, + maxDelayMs: GITHUB_FETCH_RETRY_MAX_DELAY_MS, + jitterMs: GITHUB_FETCH_RETRY_JITTER_MS, + onRetry: (retry) => { + logDebug("Retrying GitHub HTML release fetch", retry); + }, + }); + if (htmlAttempts > 1) { + logDebug("Recovered GitHub HTML release fetch after retries", { + attempts: htmlAttempts, + durationMs: htmlDurationMs, + }); + } if (!htmlResponse.ok) { throw new Error( `Failed to fetch latest release: ${htmlResponse.status}`, @@ -313,7 +357,31 @@ async function fetchAndPersistInstructions( headers["If-None-Match"] = cachedETag; } - const response = await fetch(instructionsUrl, { headers }); + const { response, attempts, durationMs } = await fetchWithTimeoutAndRetry( + instructionsUrl, + { headers }, + { + timeoutMs: GITHUB_FETCH_TIMEOUT_MS, + retries: GITHUB_FETCH_RETRIES, + retryOnStatuses: GITHUB_RETRYABLE_STATUSES, + baseDelayMs: GITHUB_FETCH_RETRY_BASE_DELAY_MS, + maxDelayMs: GITHUB_FETCH_RETRY_MAX_DELAY_MS, + jitterMs: GITHUB_FETCH_RETRY_JITTER_MS, + onRetry: (retry) => { + logDebug("Retrying Codex prompt download", { + url: instructionsUrl, + ...retry, + }); + }, + }, + ); + if (attempts > 1) { + logDebug("Recovered Codex prompt download after retries", { + url: instructionsUrl, + attempts, + durationMs, + }); + } if (response.status === 304) { const diskContent = await readFileOrNull(cacheFile); if (diskContent) { diff --git a/lib/prompts/host-codex-prompt.ts b/lib/prompts/host-codex-prompt.ts index 9323ea9..8d3b5d0 100644 --- a/lib/prompts/host-codex-prompt.ts +++ b/lib/prompts/host-codex-prompt.ts @@ -8,6 +8,7 @@ import { join } from "node:path"; import { mkdir, readFile, writeFile, rename, rm } from "node:fs/promises"; import { logDebug } from "../logger.js"; +import { fetchWithTimeoutAndRetry } from "../network.js"; import { getCodexCacheDir } from "../runtime-paths.js"; import { sleep } from "../utils.js"; @@ -37,6 +38,12 @@ const LEGACY_CACHE_FILES: ReadonlyArray<{ content: string; meta: string }> = [ }, ]; const CACHE_TTL_MS = 15 * 60 * 1000; +const PROMPT_FETCH_TIMEOUT_MS = 8_000; +const PROMPT_FETCH_RETRIES = 1; +const PROMPT_RETRYABLE_STATUSES = [429] as const; +const PROMPT_FETCH_RETRY_BASE_DELAY_MS = 20; +const PROMPT_FETCH_RETRY_MAX_DELAY_MS = 200; +const PROMPT_FETCH_RETRY_JITTER_MS = 10; const RETRYABLE_FS_ERROR_CODES = new Set(["EBUSY", "EPERM"]); const WRITE_RETRY_ATTEMPTS = 5; const WRITE_RETRY_BASE_DELAY_MS = 10; @@ -278,7 +285,32 @@ async function refreshPrompt( let response: Response; try { - response = await fetch(sourceUrl, { headers }); + const fetchResult = await fetchWithTimeoutAndRetry( + sourceUrl, + { headers }, + { + timeoutMs: PROMPT_FETCH_TIMEOUT_MS, + retries: PROMPT_FETCH_RETRIES, + retryOnStatuses: PROMPT_RETRYABLE_STATUSES, + baseDelayMs: PROMPT_FETCH_RETRY_BASE_DELAY_MS, + maxDelayMs: PROMPT_FETCH_RETRY_MAX_DELAY_MS, + jitterMs: PROMPT_FETCH_RETRY_JITTER_MS, + onRetry: (retry) => { + logDebug("Retrying host-codex prompt source fetch", { + sourceUrl: redactSourceForLog(sourceUrl), + ...retry, + }); + }, + }, + ); + response = fetchResult.response; + if (fetchResult.attempts > 1) { + logDebug("Recovered host-codex prompt source fetch after retries", { + sourceUrl: redactSourceForLog(sourceUrl), + attempts: fetchResult.attempts, + durationMs: fetchResult.durationMs, + }); + } } catch (error) { lastFailure = `${redactSourceForLog(sourceUrl)}: ${String(error)}`; logDebug("Codex prompt source fetch failed", { diff --git a/lib/shutdown.ts b/lib/shutdown.ts index 6696562..6c7e342 100644 --- a/lib/shutdown.ts +++ b/lib/shutdown.ts @@ -2,6 +2,18 @@ type CleanupFn = () => void | Promise; const cleanupFunctions: CleanupFn[] = []; let shutdownRegistered = false; +let cleanupInFlight: Promise | null = null; +const DEFAULT_SHUTDOWN_TIMEOUT_MS = 8_000; +const MAX_SHUTDOWN_TIMEOUT_MS = 120_000; + +function getShutdownTimeoutMs(): number { + const raw = process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS; + const parsed = raw ? Number.parseInt(raw, 10) : DEFAULT_SHUTDOWN_TIMEOUT_MS; + if (!Number.isFinite(parsed) || parsed <= 0) { + return DEFAULT_SHUTDOWN_TIMEOUT_MS; + } + return Math.max(1_000, Math.min(parsed, MAX_SHUTDOWN_TIMEOUT_MS)); +} export function registerCleanup(fn: CleanupFn): void { cleanupFunctions.push(fn); @@ -16,23 +28,47 @@ export function unregisterCleanup(fn: CleanupFn): void { } export async function runCleanup(): Promise { + if (cleanupInFlight) { + await cleanupInFlight; + return; + } + const fns = [...cleanupFunctions]; cleanupFunctions.length = 0; + const timeoutMs = getShutdownTimeoutMs(); - for (const fn of fns) { - try { - await fn(); - } catch { - // Ignore cleanup errors during shutdown + const runner = (async () => { + for (const fn of fns) { + try { + await fn(); + } catch { + // Ignore cleanup errors during shutdown + } } - } + })(); + let timeoutHandle: ReturnType | null = null; + const timeoutPromise = new Promise((resolve) => { + timeoutHandle = setTimeout(resolve, timeoutMs); + }); + + cleanupInFlight = Promise.race([runner, timeoutPromise]).finally(() => { + if (timeoutHandle) { + clearTimeout(timeoutHandle); + } + cleanupInFlight = null; + }); + + await cleanupInFlight; } function ensureShutdownHandler(): void { if (shutdownRegistered) return; shutdownRegistered = true; + let signalHandled = false; const handleSignal = () => { + if (signalHandled) return; + signalHandled = true; void runCleanup().finally(() => { process.exit(0); }); diff --git a/test/auth-logging.test.ts b/test/auth-logging.test.ts index e34e5dc..2c64c48 100644 --- a/test/auth-logging.test.ts +++ b/test/auth-logging.test.ts @@ -5,7 +5,7 @@ vi.mock('../lib/logger.js', () => ({ })); import { logError } from '../lib/logger.js'; -import { exchangeAuthorizationCode } from '../lib/auth/auth.js'; +import { exchangeAuthorizationCode, REDIRECT_URI, refreshAccessToken } from '../lib/auth/auth.js'; describe('OAuth auth logging', () => { afterEach(() => { @@ -63,4 +63,96 @@ describe('OAuth auth logging', () => { globalThis.fetch = originalFetch; } }); + + it('logs timeout metadata when token exchange aborts', async () => { + const originalFetch = globalThis.fetch; + vi.useFakeTimers(); + globalThis.fetch = vi.fn((_url, init) => + new Promise((_resolve, reject) => { + const signal = init?.signal as AbortSignal | undefined; + if (signal?.aborted) { + reject(signal.reason); + return; + } + signal?.addEventListener( + 'abort', + () => { + reject(signal.reason); + }, + { once: true }, + ); + }), + ) as never; + + try { + const resultPromise = exchangeAuthorizationCode( + 'auth-code', + 'verifier-123', + REDIRECT_URI, + { timeoutMs: 1000 }, + ); + await vi.advanceTimersByTimeAsync(1000); + const result = await resultPromise; + expect(result.type).toBe('failed'); + + expect(vi.mocked(logError)).toHaveBeenCalledWith( + 'code->token aborted', + { message: 'OAuth token exchange timed out after 1000ms' }, + ); + } finally { + globalThis.fetch = originalFetch; + vi.useRealTimers(); + } + }); + + it('logs only sanitized metadata for HTTP token exchange failures', async () => { + const originalFetch = globalThis.fetch; + const rawBody = JSON.stringify({ + error: 'invalid_request', + refresh_token: 'secret-refresh-token', + access_token: 'secret-access-token', + }); + globalThis.fetch = vi.fn(async () => new Response(rawBody, { status: 400 })) as never; + + try { + const result = await exchangeAuthorizationCode('auth-code', 'verifier-123'); + expect(result.type).toBe('failed'); + if (result.type === 'failed') { + expect(result.reason).toBe('http_error'); + expect(result.statusCode).toBe(400); + expect(result.message).toBe('OAuth token exchange failed'); + } + expect(vi.mocked(logError)).toHaveBeenCalledWith('code->token failed', { + status: 400, + bodyLength: rawBody.length, + }); + } finally { + globalThis.fetch = originalFetch; + } + }); + + it('logs only sanitized metadata for HTTP refresh failures', async () => { + const originalFetch = globalThis.fetch; + const rawBody = JSON.stringify({ + error: 'invalid_grant', + refresh_token: 'secret-refresh-token', + }); + globalThis.fetch = vi.fn(async () => new Response(rawBody, { status: 400 })) as never; + + try { + const result = await refreshAccessToken('bad-refresh-token'); + expect(result.type).toBe('failed'); + if (result.type === 'failed') { + expect(result.reason).toBe('http_error'); + expect(result.statusCode).toBe(400); + expect(result.message).toBe('Token refresh failed'); + } + expect(vi.mocked(logError)).toHaveBeenCalledWith('Token refresh failed', { + status: 400, + bodyLength: rawBody.length, + }); + } finally { + globalThis.fetch = originalFetch; + } + }); }); diff --git a/test/auth.test.ts b/test/auth.test.ts index fe7affa..561d092 100644 --- a/test/auth.test.ts +++ b/test/auth.test.ts @@ -340,13 +340,226 @@ describe('Auth Module', () => { if (result.type === 'failed') { expect(result.reason).toBe('http_error'); expect(result.statusCode).toBe(400); - expect(result.message).toBe('Bad Request'); + expect(result.message).toBe('OAuth token exchange failed'); } } finally { globalThis.fetch = originalFetch; } }); + it('returns failed for network errors during code exchange', async () => { + const originalFetch = globalThis.fetch; + globalThis.fetch = vi.fn(async () => { + throw new Error('Network failed'); + }) as never; + + try { + const result = await exchangeAuthorizationCode('code', 'verifier'); + expect(result.type).toBe('failed'); + if (result.type === 'failed') { + expect(result.reason).toBe('network_error'); + expect(result.message).toBe('Network failed'); + } + } finally { + globalThis.fetch = originalFetch; + } + }); + + it('returns failed when code exchange times out', async () => { + const originalFetch = globalThis.fetch; + vi.useFakeTimers(); + globalThis.fetch = vi.fn((_url, init) => + new Promise((_resolve, reject) => { + const signal = init?.signal as AbortSignal | undefined; + if (signal?.aborted) { + reject(signal.reason); + return; + } + signal?.addEventListener( + 'abort', + () => { + reject(signal.reason); + }, + { once: true }, + ); + }), + ) as never; + + try { + const resultPromise = exchangeAuthorizationCode( + 'code', + 'verifier', + REDIRECT_URI, + { timeoutMs: 1000 }, + ); + await vi.advanceTimersByTimeAsync(1000); + const result = await resultPromise; + expect(result.type).toBe('failed'); + if (result.type === 'failed') { + expect(result.reason).toBe('unknown'); + expect(result.message).toContain('timed out'); + } + } finally { + globalThis.fetch = originalFetch; + vi.useRealTimers(); + } + }); + + it('short-circuits when upstream signal is already aborted', async () => { + const originalFetch = globalThis.fetch; + vi.useFakeTimers(); + const upstream = new AbortController(); + const abortReason = Object.assign(new Error('upstream-pre-aborted'), { + name: 'AbortError', + code: 'ABORT_ERR', + }); + const setTimeoutSpy = vi.spyOn(globalThis, 'setTimeout'); + upstream.abort(abortReason); + globalThis.fetch = vi.fn(async (_url, init) => { + const signal = init?.signal as AbortSignal | undefined; + if (signal?.aborted) { + throw signal.reason; + } + throw new Error('fetch should not continue with non-aborted signal'); + }) as never; + + try { + const result = await exchangeAuthorizationCode( + 'code', + 'verifier', + REDIRECT_URI, + { timeoutMs: 1_000, signal: upstream.signal }, + ); + expect(result.type).toBe('failed'); + if (result.type === 'failed') { + expect(result.reason).toBe('unknown'); + expect(result.message).toBe('upstream-pre-aborted'); + } + expect(setTimeoutSpy).not.toHaveBeenCalled(); + await vi.advanceTimersByTimeAsync(5_000); + expect(setTimeoutSpy).not.toHaveBeenCalled(); + } finally { + globalThis.fetch = originalFetch; + vi.useRealTimers(); + vi.restoreAllMocks(); + } + }); + + it('propagates mid-flight upstream abort reason and clears timeout', async () => { + const originalFetch = globalThis.fetch; + vi.useFakeTimers(); + const upstream = new AbortController(); + const setTimeoutSpy = vi.spyOn(globalThis, 'setTimeout'); + const clearTimeoutSpy = vi.spyOn(globalThis, 'clearTimeout'); + globalThis.fetch = vi.fn((_url, init) => + new Promise((_resolve, reject) => { + const signal = init?.signal as AbortSignal | undefined; + if (signal?.aborted) { + reject(signal.reason); + return; + } + signal?.addEventListener( + 'abort', + () => { + reject(signal.reason); + }, + { once: true }, + ); + }), + ) as never; + + try { + const resultPromise = exchangeAuthorizationCode( + 'code', + 'verifier', + REDIRECT_URI, + { timeoutMs: 5_000, signal: upstream.signal }, + ); + upstream.abort( + Object.assign(new Error('upstream-mid-flight'), { + name: 'AbortError', + code: 'ABORT_ERR', + }), + ); + const result = await resultPromise; + expect(result.type).toBe('failed'); + if (result.type === 'failed') { + expect(result.reason).toBe('unknown'); + expect(result.message).toBe('upstream-mid-flight'); + } + expect(setTimeoutSpy).toHaveBeenCalledTimes(1); + expect(clearTimeoutSpy).toHaveBeenCalledTimes(1); + } finally { + globalThis.fetch = originalFetch; + vi.useRealTimers(); + vi.restoreAllMocks(); + } + }); + + it('cleans upstream listeners and timers across repeated abortable exchanges', async () => { + const originalFetch = globalThis.fetch; + vi.useFakeTimers(); + const upstream = new AbortController(); + const addListenerSpy = vi.spyOn(upstream.signal, 'addEventListener'); + const removeListenerSpy = vi.spyOn(upstream.signal, 'removeEventListener'); + const setTimeoutSpy = vi.spyOn(globalThis, 'setTimeout'); + const clearTimeoutSpy = vi.spyOn(globalThis, 'clearTimeout'); + globalThis.fetch = vi.fn((_url, init) => + new Promise((_resolve, reject) => { + const signal = init?.signal as AbortSignal | undefined; + if (signal?.aborted) { + reject(signal.reason); + return; + } + signal?.addEventListener( + 'abort', + () => { + reject(signal.reason); + }, + { once: true }, + ); + }), + ) as never; + + try { + const first = exchangeAuthorizationCode( + 'code-1', + 'verifier-1', + REDIRECT_URI, + { timeoutMs: 2_000, signal: upstream.signal }, + ); + const second = exchangeAuthorizationCode( + 'code-2', + 'verifier-2', + REDIRECT_URI, + { timeoutMs: 2_000, signal: upstream.signal }, + ); + expect(addListenerSpy).toHaveBeenCalledTimes(2); + + upstream.abort(new Error('shared-upstream-abort')); + const [firstResult, secondResult] = await Promise.all([first, second]); + expect(firstResult.type).toBe('failed'); + expect(secondResult.type).toBe('failed'); + if (firstResult.type === 'failed') { + expect(firstResult.reason).toBe('unknown'); + } + if (secondResult.type === 'failed') { + expect(secondResult.reason).toBe('unknown'); + } + expect(setTimeoutSpy).toHaveBeenCalledTimes(2); + expect(clearTimeoutSpy).toHaveBeenCalledTimes(2); + expect(removeListenerSpy).toHaveBeenCalledTimes(2); + + const clearTimeoutCallCount = clearTimeoutSpy.mock.calls.length; + await vi.advanceTimersByTimeAsync(5_000); + expect(clearTimeoutSpy).toHaveBeenCalledTimes(clearTimeoutCallCount); + } finally { + globalThis.fetch = originalFetch; + vi.useRealTimers(); + vi.restoreAllMocks(); + } + }); + it('returns failed with undefined message when text read fails', async () => { const originalFetch = globalThis.fetch; const mockResponse = { @@ -444,6 +657,8 @@ describe('Auth Module', () => { expect(result.type).toBe('failed'); if (result.type === 'failed') { expect(result.reason).toBe('http_error'); + expect(result.statusCode).toBe(400); + expect(result.message).toBe('Token refresh failed'); } } finally { globalThis.fetch = originalFetch; diff --git a/test/auto-update-checker.test.ts b/test/auto-update-checker.test.ts index 6157233..29f1dd8 100644 --- a/test/auto-update-checker.test.ts +++ b/test/auto-update-checker.test.ts @@ -331,7 +331,9 @@ describe("auto-update-checker", () => { it("handles fetch failure gracefully", async () => { vi.mocked(globalThis.fetch).mockRejectedValue(new Error("Network error")); - const result = await checkForUpdates(true); + const pending = checkForUpdates(true); + await vi.runAllTimersAsync(); + const result = await pending; expect(result.hasUpdate).toBe(false); expect(result.latestVersion).toBe(null); @@ -343,12 +345,34 @@ describe("auto-update-checker", () => { status: 500, } as Response); - const result = await checkForUpdates(true); + const pending = checkForUpdates(true); + await vi.runAllTimersAsync(); + const result = await pending; expect(result.hasUpdate).toBe(false); expect(result.latestVersion).toBe(null); }); + it("retries retryable HTTP statuses and succeeds on a later attempt", async () => { + vi.mocked(globalThis.fetch) + .mockResolvedValueOnce(new Response("busy-1", { status: 500 })) + .mockResolvedValueOnce(new Response("busy-2", { status: 500 })) + .mockResolvedValueOnce( + new Response(JSON.stringify({ version: "5.0.1" }), { + status: 200, + headers: { "content-type": "application/json" }, + }), + ); + + const pending = checkForUpdates(true); + await vi.runAllTimersAsync(); + const result = await pending; + + expect(globalThis.fetch).toHaveBeenCalledTimes(3); + expect(result.latestVersion).toBe("5.0.1"); + expect(result.hasUpdate).toBe(true); + }); + it("saves cache after successful fetch", async () => { vi.mocked(globalThis.fetch).mockResolvedValue({ ok: true, @@ -457,7 +481,9 @@ describe("auto-update-checker", () => { vi.mocked(globalThis.fetch).mockRejectedValue(new Error("Network error")); const showToast = vi.fn().mockResolvedValue(undefined); - await expect(checkAndNotify(showToast)).resolves.toBeUndefined(); + const pending = checkAndNotify(showToast); + await vi.runAllTimersAsync(); + await expect(pending).resolves.toBeUndefined(); expect(showToast).not.toHaveBeenCalled(); }); diff --git a/test/codex-manager-cli.test.ts b/test/codex-manager-cli.test.ts index 27261cd..5b052fc 100644 --- a/test/codex-manager-cli.test.ts +++ b/test/codex-manager-cli.test.ts @@ -834,6 +834,8 @@ describe("codex manager cli commands", () => { .mockResolvedValueOnce({ mode: "add" }) .mockResolvedValueOnce({ mode: "cancel" }); promptAddAnotherAccountMock.mockResolvedValue(false); + const expectedTimeoutMs = 4321; + loadPluginConfigMock.mockReturnValue({ fetchTimeoutMs: expectedTimeoutMs }); const authModule = await import("../lib/auth/auth.js"); const createAuthorizationFlowMock = vi.mocked(authModule.createAuthorizationFlow); @@ -874,6 +876,12 @@ describe("codex manager cli commands", () => { expect(storageState.activeIndex).toBe(1); expect(storageState.activeIndexByFamily.codex).toBe(1); expect(setCodexCliActiveSelectionMock).toHaveBeenCalledTimes(1); + expect(exchangeAuthorizationCodeMock).toHaveBeenCalledWith( + "oauth-code", + "pkce-verifier", + authModule.REDIRECT_URI, + { timeoutMs: expectedTimeoutMs }, + ); }); it("runs full refresh test from login menu deep-check mode", async () => { diff --git a/test/config-save.test.ts b/test/config-save.test.ts index 2064fae..39d3279 100644 --- a/test/config-save.test.ts +++ b/test/config-save.test.ts @@ -93,6 +93,72 @@ describe("plugin config save paths", () => { }); }); + it("retries transient env-path read locks before merge save to prevent key loss", async () => { + const configPath = join(tempDir, "plugin-config.json"); + process.env.CODEX_MULTI_AUTH_CONFIG_PATH = configPath; + await fs.writeFile( + configPath, + JSON.stringify({ + codexMode: true, + preserved: { nested: true }, + }), + "utf8", + ); + + vi.resetModules(); + const logWarnMock = vi.fn(); + let transientReadFailures = 0; + + vi.doMock("node:fs", async () => { + const actual = await vi.importActual("node:fs"); + return { + ...actual, + readFileSync: vi.fn((...args: Parameters) => { + const [filePath] = args; + if ( + typeof filePath === "string" && + filePath === configPath && + transientReadFailures < 2 + ) { + transientReadFailures += 1; + const code = transientReadFailures === 1 ? "EBUSY" : "EPERM"; + throw Object.assign(new Error(`Transient ${code}`), { code }); + } + return actual.readFileSync(...args); + }), + }; + }); + vi.doMock("../lib/logger.js", async () => { + const actual = await vi.importActual( + "../lib/logger.js", + ); + return { + ...actual, + logWarn: logWarnMock, + }; + }); + + try { + const { savePluginConfig } = await import("../lib/config.js"); + await savePluginConfig({ codexTuiV2: false }); + } finally { + vi.doUnmock("node:fs"); + vi.doUnmock("../lib/logger.js"); + } + + const parsed = JSON.parse(await fs.readFile(configPath, "utf8")) as Record< + string, + unknown + >; + expect(parsed.codexMode).toBe(true); + expect(parsed.preserved).toEqual({ nested: true }); + expect(parsed.codexTuiV2).toBe(false); + const failedReadWarnings = logWarnMock.mock.calls.filter(([message]) => + String(message).includes("Failed to read config from"), + ); + expect(failedReadWarnings).toHaveLength(0); + }); + it("recovers from malformed env-path JSON before saving", async () => { const configPath = join(tempDir, "plugin-config.json"); process.env.CODEX_MULTI_AUTH_CONFIG_PATH = configPath; @@ -132,13 +198,13 @@ describe("plugin config save paths", () => { await savePluginConfig({ codexMode: false, parallelProbing: true, - parallelProbingMaxConcurrency: 7, + parallelProbingMaxConcurrency: 5, }); const loaded = loadPluginConfig(); expect(loaded.codexMode).toBe(false); expect(loaded.parallelProbing).toBe(true); - expect(loaded.parallelProbingMaxConcurrency).toBe(7); + expect(loaded.parallelProbingMaxConcurrency).toBe(5); }); it("resolves parallel probing settings and clamps concurrency", async () => { diff --git a/test/host-codex-prompt.test.ts b/test/host-codex-prompt.test.ts index 59e8c98..820f8b6 100644 --- a/test/host-codex-prompt.test.ts +++ b/test/host-codex-prompt.test.ts @@ -324,6 +324,47 @@ describe("host-codex-prompt", () => { ); }); + it("deduplicates concurrent stale refresh while retrying a 429 source response", async () => { + const { getHostCodexPrompt } = await import("../lib/prompts/host-codex-prompt.js"); + vi.spyOn(Math, "random").mockReturnValue(0); + + const staleMeta = JSON.stringify({ + etag: '"old-etag"', + lastChecked: Date.now() - 20 * 60 * 1000, + }); + vi.mocked(readFile).mockImplementation(async (filePath) => { + if (String(filePath).includes("host-codex-prompt-meta.json")) { + return staleMeta; + } + return "Old cached content"; + }); + + mockFetch + .mockResolvedValueOnce(new Response("rate limited", { status: 429 })) + .mockResolvedValueOnce( + new Response("Prompt after retry", { + status: 200, + headers: { etag: '"retry-etag"' }, + }), + ); + + const [first, second] = await Promise.all([getHostCodexPrompt(), getHostCodexPrompt()]); + expect(first).toBe("Old cached content"); + expect(second).toBe("Old cached content"); + + await vi.waitFor(() => expect(mockFetch).toHaveBeenCalledTimes(2)); + await vi.waitFor(() => + expect(writeFile).toHaveBeenCalledWith( + expect.stringContaining("host-codex-prompt.txt"), + "Prompt after retry", + "utf-8", + ), + ); + await vi.waitFor(async () => { + await expect(getHostCodexPrompt()).resolves.toBe("Prompt after retry"); + }); + }); + it("falls back to cache on network error", async () => { const { getHostCodexPrompt } = await import("../lib/prompts/host-codex-prompt.js"); diff --git a/test/index.test.ts b/test/index.test.ts index d6d9549..e039b1c 100644 --- a/test/index.test.ts +++ b/test/index.test.ts @@ -90,7 +90,7 @@ vi.mock("../lib/config.js", () => ({ getEmptyResponseMaxRetries: () => 2, getEmptyResponseRetryDelayMs: () => 1000, getPidOffsetEnabled: () => false, - getFetchTimeoutMs: () => 60000, + getFetchTimeoutMs: vi.fn(() => 60000), getStreamStallTimeoutMs: () => 45000, getLiveAccountSync: vi.fn(() => false), getLiveAccountSyncDebounceMs: () => 250, @@ -111,7 +111,7 @@ vi.mock("../lib/config.js", () => ({ getCodexTuiV2: () => false, getCodexTuiColorProfile: () => "ansi16", getCodexTuiGlyphMode: () => "ascii", - loadPluginConfig: () => ({}), + loadPluginConfig: vi.fn(() => ({})), })); const liveAccountSyncSyncToPathMock = vi.fn(async () => {}); @@ -505,6 +505,126 @@ describe("OpenAIOAuthPlugin", () => { expect(vi.mocked(authModule.exchangeAuthorizationCode)).not.toHaveBeenCalled(); }); + it("passes configured timeout to manual OAuth callback exchange", async () => { + const authModule = await import("../lib/auth/auth.js"); + const configModule = await import("../lib/config.js"); + const manualMethod = plugin.auth.methods[1] as unknown as { + authorize: () => Promise<{ + callback: (input: string) => Promise<{ type: string; reason?: string; message?: string }>; + }>; + }; + const expectedTimeoutMs = 4321; + const pluginConfig = { fetchTimeoutMs: expectedTimeoutMs }; + vi.mocked(configModule.loadPluginConfig).mockReturnValue(pluginConfig); + vi.mocked(configModule.getFetchTimeoutMs).mockReturnValue(expectedTimeoutMs); + + const flow = await manualMethod.authorize(); + await flow.callback("http://127.0.0.1:1455/auth/callback?code=abc123&state=test-state"); + + expect(vi.mocked(authModule.exchangeAuthorizationCode)).toHaveBeenCalledWith( + "abc123", + "test-verifier", + authModule.REDIRECT_URI, + { timeoutMs: expectedTimeoutMs }, + ); + expect(vi.mocked(configModule.getFetchTimeoutMs)).toHaveBeenCalledWith(pluginConfig); + }); + + it("passes configured timeout to browser OAuth callback exchange", async () => { + const authModule = await import("../lib/auth/auth.js"); + const configModule = await import("../lib/config.js"); + const browserModule = await import("../lib/auth/browser.js"); + const serverModule = await import("../lib/auth/server.js"); + const expectedTimeoutMs = 6789; + const pluginConfig = { fetchTimeoutMs: expectedTimeoutMs }; + vi.mocked(configModule.loadPluginConfig).mockReturnValue(pluginConfig); + vi.mocked(configModule.getFetchTimeoutMs).mockReturnValue(expectedTimeoutMs); + vi.mocked(authModule.createAuthorizationFlow).mockResolvedValue({ + pkce: { verifier: "custom-verifier", challenge: "custom-challenge" }, + state: "custom-state", + url: "https://auth.openai.com/oauth/authorize?state=custom-state", + }); + vi.mocked(browserModule.openBrowserUrl).mockReturnValue(true); + vi.mocked(serverModule.startLocalOAuthServer).mockResolvedValue({ + ready: true, + close: vi.fn(), + waitForCode: vi.fn(async () => ({ code: "oauth-code" })), + }); + + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: () => Promise; + }; + await autoMethod.authorize(); + + expect(vi.mocked(authModule.exchangeAuthorizationCode)).toHaveBeenCalledWith( + "oauth-code", + "custom-verifier", + authModule.REDIRECT_URI, + { timeoutMs: expectedTimeoutMs }, + ); + expect(vi.mocked(configModule.getFetchTimeoutMs)).toHaveBeenCalledWith(pluginConfig); + }); + + it("keeps timeout wiring stable under concurrent OAuth authorize calls", async () => { + const authModule = await import("../lib/auth/auth.js"); + const configModule = await import("../lib/config.js"); + const browserModule = await import("../lib/auth/browser.js"); + const serverModule = await import("../lib/auth/server.js"); + const expectedTimeoutMs = 2468; + vi.mocked(configModule.loadPluginConfig).mockReturnValue({ fetchTimeoutMs: expectedTimeoutMs }); + vi.mocked(configModule.getFetchTimeoutMs).mockReturnValue(expectedTimeoutMs); + vi.mocked(authModule.createAuthorizationFlow) + .mockResolvedValueOnce({ + pkce: { verifier: "verifier-1", challenge: "challenge-1" }, + state: "state-1", + url: "https://auth.openai.com/oauth/authorize?state=state-1", + }) + .mockResolvedValueOnce({ + pkce: { verifier: "verifier-2", challenge: "challenge-2" }, + state: "state-2", + url: "https://auth.openai.com/oauth/authorize?state=state-2", + }) + .mockResolvedValueOnce({ + pkce: { verifier: "verifier-3", challenge: "challenge-3" }, + state: "state-3", + url: "https://auth.openai.com/oauth/authorize?state=state-3", + }); + vi.mocked(browserModule.openBrowserUrl).mockReturnValue(true); + vi.mocked(serverModule.startLocalOAuthServer) + .mockResolvedValueOnce({ + ready: true, + close: vi.fn(), + waitForCode: vi.fn(async () => ({ code: "code-1" })), + }) + .mockResolvedValueOnce({ + ready: true, + close: vi.fn(), + waitForCode: vi.fn(async () => ({ code: "code-2" })), + }) + .mockResolvedValueOnce({ + ready: true, + close: vi.fn(), + waitForCode: vi.fn(async () => ({ code: "code-3" })), + }); + + const autoMethod = plugin.auth.methods[0] as unknown as { + authorize: () => Promise; + }; + await Promise.all([autoMethod.authorize(), autoMethod.authorize(), autoMethod.authorize()]); + + expect(vi.mocked(authModule.exchangeAuthorizationCode)).toHaveBeenCalledTimes(3); + const calls = vi.mocked(authModule.exchangeAuthorizationCode).mock.calls; + expect(calls.map((call) => call[0])).toEqual( + expect.arrayContaining(["code-1", "code-2", "code-3"]), + ); + expect(calls.map((call) => call[1])).toEqual( + expect.arrayContaining(["verifier-1", "verifier-2", "verifier-3"]), + ); + for (const call of calls) { + expect(call[3]).toEqual({ timeoutMs: expectedTimeoutMs }); + } + }); + it("uses REDIRECT_URI in manual callback validation copy", async () => { const authModule = await import("../lib/auth/auth.js"); const manualMethod = plugin.auth.methods[1] as unknown as { diff --git a/test/network.test.ts b/test/network.test.ts new file mode 100644 index 0000000..f2b3fac --- /dev/null +++ b/test/network.test.ts @@ -0,0 +1,221 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { fetchWithTimeoutAndRetry } from "../lib/network.js"; + +describe("fetchWithTimeoutAndRetry", () => { + const originalFetch = globalThis.fetch; + let fetchMock: ReturnType; + + beforeEach(() => { + vi.useFakeTimers(); + fetchMock = vi.fn(); + globalThis.fetch = fetchMock as unknown as typeof fetch; + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + vi.useRealTimers(); + vi.restoreAllMocks(); + }); + + it("returns response on first successful attempt", async () => { + fetchMock.mockResolvedValueOnce(new Response("ok", { status: 200 })); + + const promise = fetchWithTimeoutAndRetry("https://example.com", undefined, { + timeoutMs: 2_000, + }); + await vi.runAllTimersAsync(); + const result = await promise; + + expect(result.attempts).toBe(1); + expect(result.response.status).toBe(200); + expect(fetchMock).toHaveBeenCalledTimes(1); + }); + + it("retries once after network error and recovers", async () => { + const onRetry = vi.fn(); + fetchMock + .mockRejectedValueOnce(new Error("network down")) + .mockResolvedValueOnce(new Response("ok", { status: 200 })); + + const promise = fetchWithTimeoutAndRetry("https://example.com", undefined, { + timeoutMs: 2_000, + retries: 1, + baseDelayMs: 25, + jitterMs: 0, + onRetry, + }); + await vi.runAllTimersAsync(); + const result = await promise; + + expect(result.attempts).toBe(2); + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(onRetry).toHaveBeenCalledWith( + expect.objectContaining({ reason: "error", attempt: 1, maxAttempts: 2 }), + ); + const retryInfo = onRetry.mock.calls[0]?.[0]; + expect(retryInfo).toEqual(expect.objectContaining({ errorType: "Error" })); + expect(retryInfo).not.toHaveProperty("error"); + }); + + it("retries on configured HTTP status codes", async () => { + const onRetry = vi.fn(); + fetchMock + .mockResolvedValueOnce(new Response("busy", { status: 503 })) + .mockResolvedValueOnce(new Response("ok", { status: 200 })); + + const promise = fetchWithTimeoutAndRetry("https://example.com", undefined, { + timeoutMs: 2_000, + retries: 1, + baseDelayMs: 25, + jitterMs: 0, + retryOnStatuses: [503], + onRetry, + }); + await vi.runAllTimersAsync(); + const result = await promise; + + expect(result.attempts).toBe(2); + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(onRetry).toHaveBeenCalledWith( + expect.objectContaining({ + reason: "status", + status: 503, + attempt: 1, + maxAttempts: 2, + }), + ); + }); + + it("does not retry caller-aborted requests", async () => { + const controller = new AbortController(); + const abortError = Object.assign(new Error("aborted"), { name: "AbortError" }); + controller.abort(); + fetchMock.mockImplementationOnce(async () => { + throw abortError; + }); + + const promise = fetchWithTimeoutAndRetry("https://example.com", undefined, { + timeoutMs: 2_000, + retries: 3, + baseDelayMs: 25, + jitterMs: 0, + signal: controller.signal, + }); + const rejection = expect(promise).rejects.toThrow("aborted"); + await vi.runAllTimersAsync(); + await rejection; + expect(fetchMock).toHaveBeenCalledTimes(1); + }); + + it("retries when a fetch attempt times out", async () => { + const onRetry = vi.fn(); + fetchMock + .mockImplementationOnce( + (_input: RequestInfo | URL, init?: RequestInit) => + new Promise((_resolve, reject) => { + const signal = init?.signal as AbortSignal | undefined; + if (signal?.aborted) { + reject(signal.reason); + return; + } + signal?.addEventListener( + "abort", + () => { + reject(signal.reason); + }, + { once: true }, + ); + }), + ) + .mockResolvedValueOnce(new Response("ok", { status: 200 })); + + const pending = fetchWithTimeoutAndRetry("https://example.com", undefined, { + timeoutMs: 1_000, + retries: 1, + baseDelayMs: 25, + jitterMs: 0, + onRetry, + }); + await vi.runAllTimersAsync(); + const result = await pending; + + expect(result.attempts).toBe(2); + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(onRetry).toHaveBeenCalledWith( + expect.objectContaining({ + reason: "timeout", + errorType: "AbortError", + attempt: 1, + maxAttempts: 2, + }), + ); + await vi.runAllTimersAsync(); + expect(fetchMock).toHaveBeenCalledTimes(2); + }); + + it("aborts immediately when caller aborts mid-flight", async () => { + const controller = new AbortController(); + const abortError = Object.assign(new Error("caller-aborted"), { + name: "AbortError", + }); + fetchMock.mockImplementationOnce( + (_input: RequestInfo | URL, init?: RequestInit) => + new Promise((_resolve, reject) => { + const signal = init?.signal as AbortSignal | undefined; + if (signal?.aborted) { + reject(signal.reason); + return; + } + signal?.addEventListener( + "abort", + () => { + reject(signal.reason); + }, + { once: true }, + ); + }), + ); + + const pending = fetchWithTimeoutAndRetry("https://example.com", undefined, { + timeoutMs: 2_000, + retries: 3, + baseDelayMs: 25, + jitterMs: 0, + signal: controller.signal, + }); + controller.abort(abortError); + + await expect(pending).rejects.toThrow("caller-aborted"); + expect(fetchMock).toHaveBeenCalledTimes(1); + await vi.runAllTimersAsync(); + expect(fetchMock).toHaveBeenCalledTimes(1); + }); + + it("stops retrying when caller aborts during backoff", async () => { + const controller = new AbortController(); + fetchMock.mockRejectedValueOnce(new Error("first-attempt-failed")); + + const pending = fetchWithTimeoutAndRetry("https://example.com", undefined, { + timeoutMs: 2_000, + retries: 2, + baseDelayMs: 500, + jitterMs: 0, + signal: controller.signal, + }); + + await Promise.resolve(); + await Promise.resolve(); + expect(fetchMock).toHaveBeenCalledTimes(1); + + controller.abort( + Object.assign(new Error("abort-during-backoff"), { + name: "AbortError", + }), + ); + + await expect(pending).rejects.toThrow("abort-during-backoff"); + expect(fetchMock).toHaveBeenCalledTimes(1); + await vi.runAllTimersAsync(); + expect(fetchMock).toHaveBeenCalledTimes(1); + }); +}); diff --git a/test/plugin-config.test.ts b/test/plugin-config.test.ts index 9caebf9..e97003c 100644 --- a/test/plugin-config.test.ts +++ b/test/plugin-config.test.ts @@ -200,6 +200,46 @@ describe('Plugin Configuration', () => { }); }); + it('retries transient config read lock errors and succeeds', () => { + mockExistsSync.mockReturnValue(true); + const transientReadError = Object.assign(new Error('Resource busy'), { code: 'EBUSY' }); + mockReadFileSync + .mockImplementationOnce(() => { + throw transientReadError; + }) + .mockReturnValueOnce(JSON.stringify({ codexMode: false })); + + const config = loadPluginConfig(); + + expect(config.codexMode).toBe(false); + expect(mockReadFileSync).toHaveBeenCalledTimes(2); + const failedLoadWarnings = vi + .mocked(logger.logWarn) + .mock.calls.filter(([message]) => String(message).includes('Failed to load config')); + expect(failedLoadWarnings).toHaveLength(0); + }); + + it('retries transient config read lock errors (EPERM) and succeeds', () => { + mockExistsSync.mockReturnValue(true); + const transientReadError = Object.assign(new Error('Operation not permitted'), { + code: 'EPERM', + }); + mockReadFileSync + .mockImplementationOnce(() => { + throw transientReadError; + }) + .mockReturnValueOnce(JSON.stringify({ codexMode: false })); + + const config = loadPluginConfig(); + + expect(config.codexMode).toBe(false); + expect(mockReadFileSync).toHaveBeenCalledTimes(2); + const failedLoadWarnings = vi + .mocked(logger.logWarn) + .mock.calls.filter(([message]) => String(message).includes('Failed to load config')); + expect(failedLoadWarnings).toHaveLength(0); + }); + it('should detect CODEX_HOME legacy auth config path before global legacy path', async () => { const runWithCodexHome = async (codexHomePath: string) => { vi.resetModules(); @@ -599,6 +639,43 @@ describe('Plugin Configuration', () => { ); expect(validationWarnings).toHaveLength(1); }); + + it('sanitizes invalid config fields while preserving valid settings', () => { + mockExistsSync.mockReturnValue(true); + mockReadFileSync.mockReturnValue( + JSON.stringify({ + codexMode: false, + unsupportedCodexPolicy: 'fallback', + emptyResponseMaxRetries: 4, + fetchTimeoutMs: 'invalid-timeout', + preemptiveQuotaRemainingPercent5h: 'invalid-percent', + }), + ); + + const config = loadPluginConfig(); + + expect(config.codexMode).toBe(false); + expect(config.unsupportedCodexPolicy).toBe('fallback'); + expect(config.emptyResponseMaxRetries).toBe(4); + // Invalid values should be dropped and defaulted safely. + expect(config.fetchTimeoutMs).toBe(60_000); + expect(config.preemptiveQuotaRemainingPercent5h).toBe(5); + }); + + it('sanitizes out-of-range numeric config fields to safe defaults', () => { + mockExistsSync.mockReturnValue(true); + mockReadFileSync.mockReturnValue( + JSON.stringify({ + fetchTimeoutMs: 10, + preemptiveQuotaRemainingPercent5h: 500, + }), + ); + + const config = loadPluginConfig(); + + expect(config.fetchTimeoutMs).toBe(60_000); + expect(config.preemptiveQuotaRemainingPercent5h).toBe(5); + }); }); describe('getCodexMode', () => { diff --git a/test/shutdown.test.ts b/test/shutdown.test.ts index c64ecf4..36ac2f3 100644 --- a/test/shutdown.test.ts +++ b/test/shutdown.test.ts @@ -47,6 +47,15 @@ describe("Graceful shutdown", () => { expect(fn).toHaveBeenCalledTimes(1); }); + it("deduplicates concurrent cleanup execution", async () => { + const fn = vi.fn(async () => { + await new Promise((resolve) => setTimeout(resolve, 10)); + }); + registerCleanup(fn); + await Promise.all([runCleanup(), runCleanup()]); + expect(fn).toHaveBeenCalledTimes(1); + }); + it("continues cleanup even if one function throws", async () => { const fn1 = vi.fn(() => { throw new Error("fail"); }); const fn2 = vi.fn(); @@ -71,6 +80,50 @@ describe("Graceful shutdown", () => { expect(getCleanupCount()).toBe(0); }); + it("returns after configured shutdown timeout when cleanup hangs", async () => { + const originalTimeout = process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS; + process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS = "1000"; + vi.useFakeTimers(); + try { + const hangingFn = vi.fn( + () => + new Promise(() => { + // Intentionally unresolved. + }), + ); + registerCleanup(hangingFn); + const cleanupPromise = runCleanup(); + await vi.advanceTimersByTimeAsync(1000); + await cleanupPromise; + expect(hangingFn).toHaveBeenCalledTimes(1); + } finally { + if (originalTimeout === undefined) { + delete process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS; + } else { + process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS = originalTimeout; + } + vi.useRealTimers(); + } + }); + + it("does not leave a pending shutdown timer after fast cleanup", async () => { + const originalTimeout = process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS; + process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS = "5000"; + vi.useFakeTimers(); + try { + registerCleanup(() => {}); + await runCleanup(); + expect(vi.getTimerCount()).toBe(0); + } finally { + if (originalTimeout === undefined) { + delete process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS; + } else { + process.env.CODEX_AUTH_SHUTDOWN_TIMEOUT_MS = originalTimeout; + } + vi.useRealTimers(); + } + }); + describe("process signal integration", () => { it("SIGINT handler runs cleanup and exits with code 0", async () => { const capturedHandlers = new Map void>();