diff --git a/docs/codex-account-switching-design.md b/docs/codex-account-switching-design.md new file mode 100644 index 00000000..f9ef5c06 --- /dev/null +++ b/docs/codex-account-switching-design.md @@ -0,0 +1,451 @@ +# Codex Account Switching Design + +**Status:** Draft +**Date:** 2026-04-17 + +## Summary + +Orca's current Codex account switcher swaps the entire `CODEX_HOME` for each managed account. That isolates authentication, but it also unintentionally forks config, permissions, history, sessions, memories, skills, and other local Codex state. The result is that account switching feels like switching between separate Codex installs instead of switching which account powers the same Codex environment. + +The recommended design is: + +- Keep `~/.codex` as the shared runtime `CODEX_HOME` for all Codex user state. +- Store only `auth.json` per managed account in Orca-owned storage. +- Introduce a dedicated main-process runtime-home owner that materializes the selected account's `auth.json` into the shared runtime home before any Codex launch, login, or rate-limit fetch. +- Restart live Codex panes after switch; newly launched panes use the new account while preserving the shared Codex state. + +This matches the intended product behavior: account switching is for authentication and usage limits, not for creating separate Codex worlds. It also matches how manual Codex account switching already behaves outside Orca: logging out and back in mutates the same `~/.codex` state the user sees in terminal Codex. + +## Motivation + +The current managed-account design causes user-visible problems: + +- `config.toml` diverges per account, so permissions and sandbox defaults reset unexpectedly. +- `history.jsonl` and `sessions/` are scoped per managed home, so chat history appears to disappear after account switches. +- `memories`, `skills`, `rules`, and likely sqlite-backed local state drift per account. +- Live sessions require restart because the active terminal process keeps using the old `CODEX_HOME`. + +We already patched the first symptom by syncing `config.toml` into managed homes. That is a tactical fix, not the right long-term model. The full-home-per-account design still leaves history and session continuity split across accounts. + +The deeper issue is ownership. Orca currently has no single component that owns Codex runtime state preparation. `CodexAccountService`, `pty.ts`, rate-limit fetchers, and usage scanning each participate in path or environment decisions. The long-term fix must therefore be a runtime-home ownership refactor, not just a storage-layout tweak. + +## Current State + +### Orca behavior today + +- Managed accounts are created under `app.getPath('userData')/codex-accounts//home`. +- Orca selects an account by updating `settings.activeCodexManagedAccountId`. +- New Codex PTYs inherit the selected managed home's path as `CODEX_HOME`. +- Codex rate-limit fetches also use that selected managed home. + +Relevant code: + +- [src/main/codex-accounts/service.ts](/Users/jinwoohong/orca/workspaces/orca/codex-fix-2/src/main/codex-accounts/service.ts) +- [src/main/ipc/pty.ts](/Users/jinwoohong/orca/workspaces/orca/codex-fix-2/src/main/ipc/pty.ts) +- [src/main/codex-usage/scanner.ts](/Users/jinwoohong/orca/workspaces/orca/codex-fix-2/src/main/codex-usage/scanner.ts) + +### Codex state observed on disk + +On a typical install, `CODEX_HOME` contains at least: + +- `auth.json` +- `config.toml` +- `history.jsonl` +- `sessions/` +- `memories/` +- `skills/` +- `rules/` +- `shell_snapshots/` +- `models_cache.json` +- `logs_2.sqlite` +- `state_5.sqlite` +- `installation_id` +- `version.json` + +Because Orca currently swaps the whole home, all of that becomes account-scoped. + +## Goals + +- Make Codex account switching feel like swapping credentials, not swapping environments. +- Preserve one continuous Codex history and session store across accounts. +- Keep permissions, sandbox defaults, MCP config, memories, and other user state stable across account changes. +- Preserve Orca's existing account-switch UX: one selected active account at a time, with restart prompts for live Codex panes. +- Keep the solution cross-platform across macOS, Linux, and Windows. + +## Non-Goals + +- Supporting simultaneous live Codex sessions under different accounts in the same Orca instance. +- Changing Codex upstream behavior or requiring first-class multi-account support from Codex. +- Building a replication system that continuously merges multiple independent `CODEX_HOME` trees. + +## Constraints + +### Codex does not currently expose a separate auth path + +From local CLI inspection, Codex supports: + +- `CODEX_HOME` +- config overrides via `-c key=value` + +It does not currently expose a first-class "shared config/home plus separate auth profile" interface. There is an upstream feature request for auth profiles, which suggests Orca cannot rely on such a feature today. + +### Orca is cross-platform + +The design must work on macOS, Linux, and Windows. That rules out relying on symlink-heavy designs as the primary solution: + +- Windows symlink/junction behavior is more fragile. +- Atomic copy + rename is simpler and more portable. +- Node path utilities should be used everywhere. + +### Orca's current mental model is single selected account + +The current switcher already assumes one active account at a time and uses restart prompts for live Codex panes. The recommended design leans into that model rather than trying to support concurrent mixed-account sessions. + +## Options Considered + +### Option 1: Keep full per-account homes and sync everything + +Each managed account keeps its own full `CODEX_HOME`, and Orca syncs: + +- `config.toml` +- `history.jsonl` +- `sessions/` +- sqlite state +- memories, skills, rules + +**Pros** + +- Minimal conceptual change from the current design. +- Per-account auth isolation stays simple. + +**Cons** + +- Orca becomes a replication system for Codex state. +- `history.jsonl` is mergeable, but `sessions/` and sqlite-backed files are much harder to reconcile safely. +- Concurrent activity can easily cause stale-copy or overwrite bugs. +- More code, more edge cases, weaker guarantees. + +**Verdict** + +Not recommended. This is the highest-complexity path for the weakest product result. + +### Option 2: Shared runtime home, per-account `auth.json` + +Keep a single shared runtime `CODEX_HOME` and store one `auth.json` per managed account. On switch, Orca copies the selected account's `auth.json` into the shared runtime home before launching or restarting Codex sessions. + +**Pros** + +- Cleanest mapping to the product intent. +- `config.toml`, history, sessions, memories, skills, rules, and local state are naturally shared. +- No state replication logic. +- Cross-platform implementation is straightforward with normal file copy and rename. + +**Cons** + +- Does not support simultaneous different-account live sessions in the same runtime home. +- Existing live sessions still likely need restart because Codex may read auth only at startup. + +**Verdict** + +Recommended. + +### Option 3: Wait for upstream Codex auth profiles + +If Codex eventually supports a true auth-profile model, Orca could delegate account isolation to Codex itself. + +**Pros** + +- Best long-term upstream integration. +- Less Orca-specific state management. + +**Cons** + +- Not available today. +- Does not solve Orca's user-facing problems now. + +**Verdict** + +Good future migration target, not a current solution. + +## Recommended Design + +### High-level model + +Introduce two separate concepts: + +1. **Shared runtime home** + The single `CODEX_HOME` used for all Codex launches inside Orca. For this design, the canonical shared runtime home is `~/.codex`. + +2. **Per-account auth store** + Orca-managed storage that keeps one `auth.json` per account. + +3. **Codex runtime-home owner** + A dedicated main-process component that prepares active Codex runtime state before any Codex subprocess, rate-limit fetch, or login flow touches it. + +At runtime: + +- Orca picks the selected managed account. +- The runtime-home owner copies that account's `auth.json` into `~/.codex/auth.json`. +- All Codex entry points consume the runtime-home owner's resolved home path instead of reasoning about Codex paths independently. +- Orca launches Codex with `CODEX_HOME` pointing to `~/.codex`. + +### Shared vs per-account state + +**Shared runtime home** + +- `config.toml` +- `history.jsonl` +- `sessions/` +- `memories/` +- `skills/` +- `rules/` +- `shell_snapshots/` +- `models_cache.json` +- `logs_2.sqlite` +- `state_5.sqlite` +- `installation_id` +- `version.json` +- transient caches and temp dirs, unless we later discover they must be treated specially + +**Per-account storage** + +- `auth.json` +- Orca account metadata already stored in Orca settings + +### Why this is the right split + +`auth.json` is the only file we explicitly know needs to vary by account. The rest of the files represent user environment, session continuity, and local Codex behavior. If those are split by account, the switcher does not feel seamless. + +## Detailed Design + +### Storage layout + +Recommended paths: + +- Shared runtime home: + - `~/.codex` +- Per-account auth store: + - `app.getPath('userData')/codex-accounts//home/auth.json` + +The important part is the separation of concerns, not the exact path choice. + +### Ownership and API + +The design should introduce a dedicated main-process owner for runtime-home preparation. A representative API shape: + +```ts +type PreparedCodexRuntime = { + homePath: string + activeAccountId: string | null +} + +interface CodexRuntimeHomeService { + prepareForAccountSwitch(accountId: string | null): PreparedCodexRuntime + prepareForCodexLaunch(): PreparedCodexRuntime + prepareForRateLimitFetch(): PreparedCodexRuntime + prepareForLogin(accountId: string): { loginHomePath: string } +} +``` + +Why: today path/runtime ownership is fragmented across `CodexAccountService`, PTY spawn env injection, rate-limit fetches, and usage scanning. A single owner prevents those code paths from drifting again. + +### Serialization contract + +Because `~/.codex/auth.json` is shared mutable state, the runtime-home owner must be the only component allowed to mutate active Codex auth. It must serialize these operations behind one coordination primitive: + +- `prepareForAccountSwitch` +- `prepareForCodexLaunch` +- `prepareForRateLimitFetch` +- `prepareForLogin` +- any future reauth or logout helpers + +Required contract: + +- account switch auth materialization is exclusive +- launch and rate-limit preparation must either observe the auth state from before the switch or the fully committed auth state from after the switch +- they must never observe an in-progress partial write +- login preparation must not mutate the active runtime auth in place + +Why: without this contract, PTY launch, quota fetch, and auth swap can still race and intermittently bind work to the wrong account. + +### Account switch flow + +1. User selects a managed account. +2. Orca validates that account's stored `auth.json`. +3. The runtime-home owner writes the selected `auth.json` into `~/.codex/auth.json`. +4. Orca refreshes Codex rate-limit state using the same prepared runtime home. +5. Orca prompts restart for live Codex panes, marks them stale until restarted, and blocks further Codex execution from those panes. +6. New or restarted Codex panes launch with: + - `CODEX_HOME=~/.codex` + - shared config/history/session state + - selected account auth + +Interaction states that must be explicit in product/UI copy: + +- **Switch in progress**: selection disabled while auth materialization and rate-limit refresh run. +- **Switch complete, restart required**: existing live Codex panes are stale, must show a restart affordance, and must not be allowed to submit further Codex work until restarted. +- **Switch failed**: active account remains unchanged and stale restart notices are not applied. +- **Switch to system default**: Orca clears managed-account selection and restores the system-default auth snapshot into `~/.codex`. + +### System default source of truth + +This design treats “System default” as a first-class auth source, not as “whatever happens to be left in `~/.codex/auth.json`.” + +Rules: + +- On first startup of the new architecture, before any managed-account switch mutates `~/.codex/auth.json`, Orca captures a `system-default` auth snapshot from the current `~/.codex/auth.json` when present. +- That snapshot is stored in Orca-owned storage separately from managed account auth blobs. +- Switching to “System default” restores `~/.codex/auth.json` from that stored snapshot. +- If the user changes external Codex auth outside Orca and wants Orca’s “System default” target to follow it, Orca should expose an explicit refresh/import action or perform refresh only at startup before any managed account takes ownership in the current app session. + +Why: without a defined snapshot-and-restore model, switching back to “System default” is nondeterministic and can leave the last managed account active or overwrite the user’s expected external Codex auth. + +### New account add flow + +1. Orca prepares a temporary login home that inherits the current shared config baseline but does not dirty the active runtime state on failure. +2. Orca runs `codex login` against that temporary login home. +3. Orca captures the resulting `auth.json`. +4. Orca stores only that `auth.json` under the managed account's storage. +5. Orca does not change the active runtime home until the user selects that account or explicitly makes it active on completion. + +Why: a failed or aborted login must not poison the currently active `~/.codex` runtime state. + +### Legacy managed-home migration + +Migration of existing managed-home history and sessions is required, not optional. + +Rules: + +- On first startup after the new architecture lands, Orca scans legacy managed homes for: + - `history.jsonl` + - `sessions/` +- Orca imports legacy history into `~/.codex/history.jsonl` using append/merge semantics that avoid dropping existing shared-home history. +- Orca imports legacy sessions into `~/.codex/sessions/` with an explicit collision policy: + - import non-conflicting legacy session files directly + - for conflicting session files, merge turns when Orca can prove the files represent the same logical session with append-only divergence + - if Orca cannot safely merge a conflicting session file, preserve both copies under deterministic names and emit a diagnostic record rather than silently dropping either side +- Orca records migration completion so the import does not repeat on every startup. + +Why: shared history/session continuity is a core goal of the design. Leaving legacy managed-home data unresolved would make the upgrade look like history loss for users who previously used managed accounts. + +### Live session behavior + +The safe assumption is: + +- switching accounts affects new Codex launches +- existing live Codex sessions should still restart +- stale live Codex panes are blocked from further execution until restart completes + +If future validation shows Codex hot-reloads `auth.json`, Orca can relax this. The architecture should not rely on hot-reload behavior today. + +### Startup and recovery behavior + +The design must explicitly cover startup and error handling: + +- If `~/.codex` exists but has no `history.jsonl` or `sessions/`, Orca should launch cleanly and let Codex create them lazily. +- If the selected managed account's stored `auth.json` is missing or corrupt, Orca should: + - log a recoverable warning, + - fall back to system-default semantics, + - clear or mark invalid the selected managed account, + - avoid leaving rate-limit UI bound to the wrong identity. +- If a rate-limit refresh fails after account switch, Orca should keep the account switch result but show quota fetch failure separately rather than rolling back auth materialization implicitly. + +## Why `~/.codex` Is The Shared Home + +This document explicitly chooses `~/.codex` as the canonical shared runtime home. + +Reasons: + +- It matches the user's existing Codex mental model inside and outside Orca. +- It avoids split-brain between Orca Codex usage and terminal Codex usage. +- It matches what manual account switching already does today: logout/login mutates the same shared Codex state. + +Tradeoff: + +- Orca account switching will mutate the same Codex state used outside Orca. + +That is acceptable for this product direction. Orca is acting as an automated frontend for the user's existing Codex environment, not a separate Codex silo. + +## Migration Plan + +### Phase 1: Preserve the config sync patch + +Keep the existing `config.toml` sync patch as a tactical fix while the broader migration is in progress. + +### Phase 2: Introduce runtime-home owner + +- Add a dedicated main-process runtime-home owner/service. +- Route PTY spawning, rate-limit fetches, and login preparation through it. +- Make `~/.codex` the explicit resolved runtime home for those flows. + +### Phase 3: Move managed accounts to auth-only semantics + +- Update account add/reauth logic to persist only the account's `auth.json`. +- Stop treating per-account homes as full runtime environments. + +### Phase 4: Account switch writes auth into shared runtime home + +- On select, materialize the chosen account's `auth.json` into `~/.codex`. +- Keep the existing restart notice flow for live Codex panes. + +### Phase 5: Cleanup / compatibility + +- Run the one-time legacy managed-home migration into `~/.codex`. +- Mark old full-home-per-account storage as legacy. +- Remove code paths that assume managed homes are full `CODEX_HOME`s. + +## Risks + +### Concurrent mixed-account sessions + +This design assumes one selected active account at a time. If Orca ever needs simultaneous live sessions under different accounts, a single shared runtime home with one `auth.json` will not be sufficient. + +### Unknown Codex coupling + +We know `auth.json` is account-specific. We infer that most other files are environment/user-state and should be shared. If Codex later proves that some sqlite or cache files are also account-coupled, Orca may need to carve out a small additional per-account subset. That is still far simpler than syncing whole homes. + +### Mutating the shared runtime home + +This design intentionally updates `~/.codex/auth.json`. That means Orca and terminal Codex outside Orca share one Codex world. This is a deliberate product choice, not an accidental side effect. + +## Testing Strategy + +### Unit tests + +- account selection writes the selected `auth.json` into `~/.codex` +- PTY spawn uses `~/.codex` instead of the managed account home +- config/history/session paths resolve from `~/.codex` +- invalid or unreadable account auth does not corrupt the shared runtime home +- startup with missing shared runtime files is repaired gracefully +- rate-limit fetches and PTY launches consume the same runtime-home owner output + +### Integration tests + +- add account A, launch Codex, verify `~/.codex` gets history +- switch to account B, restart pane, verify history remains available +- verify `config.toml` and permissions do not change across account switches +- verify only `auth.json` differs across account switches + +### Manual verification + +1. Start a Codex session under account A and create visible history. +2. Switch to account B. +3. Restart the Codex pane. +4. Verify the session uses account B for auth/rate limits. +5. Verify history, sessions, config, memories, and skills remain available. + +## Open Questions + +- Do `logs_2.sqlite` and `state_5.sqlite` behave correctly when fully shared across account switches? This is the expected design, but should be validated during rollout. +- How should Orca expose refresh of the stored `system-default` auth snapshot when the user logs into Codex outside Orca? + +## Recommendation + +Adopt **`~/.codex` as the shared runtime `CODEX_HOME`, plus per-account `auth.json` only**, and implement it through a dedicated runtime-home owner in the main process. + +This is the simplest design that: + +- matches the intended account-switching UX, +- avoids fragile replication logic, +- works cross-platform, +- and leaves room to adopt upstream Codex auth-profile support later if it becomes available. diff --git a/src/main/codex-accounts/fs-utils.test.ts b/src/main/codex-accounts/fs-utils.test.ts new file mode 100644 index 00000000..706a1bed --- /dev/null +++ b/src/main/codex-accounts/fs-utils.test.ts @@ -0,0 +1,80 @@ +import { describe, expect, it } from 'vitest' +import { existsSync, mkdtempSync, readFileSync, rmSync, statSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { writeFileAtomically } from './fs-utils' + +describe('writeFileAtomically', () => { + let dir: string + + function setup(): string { + dir = mkdtempSync(join(tmpdir(), 'orca-fs-utils-')) + return dir + } + + function cleanup(): void { + if (dir) { + rmSync(dir, { recursive: true, force: true }) + } + } + + it('writes a file atomically', () => { + setup() + try { + const target = join(dir, 'test.json') + writeFileAtomically(target, '{"key":"value"}\n') + + expect(readFileSync(target, 'utf-8')).toBe('{"key":"value"}\n') + } finally { + cleanup() + } + }) + + it('overwrites an existing file', () => { + setup() + try { + const target = join(dir, 'test.json') + writeFileAtomically(target, 'old') + writeFileAtomically(target, 'new') + + expect(readFileSync(target, 'utf-8')).toBe('new') + } finally { + cleanup() + } + }) + + it('applies the mode option to the written file', () => { + if (process.platform === 'win32') { + return + } + + setup() + try { + const target = join(dir, 'secret.json') + writeFileAtomically(target, '{"token":"abc"}\n', { mode: 0o600 }) + + const mode = statSync(target).mode & 0o777 + expect(mode).toBe(0o600) + } finally { + cleanup() + } + }) + + it('cleans up temp file on write failure', () => { + setup() + try { + const target = join(dir, 'nonexistent-dir', 'nested', 'test.json') + + expect(() => writeFileAtomically(target, 'data')).toThrow() + + const tmpFiles = existsSync(dir) + ? require('node:fs') + .readdirSync(dir) + .filter((f: string) => f.endsWith('.tmp')) + : [] + expect(tmpFiles).toHaveLength(0) + } finally { + cleanup() + } + }) +}) diff --git a/src/main/codex-accounts/fs-utils.ts b/src/main/codex-accounts/fs-utils.ts new file mode 100644 index 00000000..540c1f5e --- /dev/null +++ b/src/main/codex-accounts/fs-utils.ts @@ -0,0 +1,41 @@ +import { randomUUID } from 'node:crypto' +import { renameSync, rmSync, writeFileSync } from 'node:fs' + +export function writeFileAtomically( + targetPath: string, + contents: string, + options?: { mode?: number } +): void { + const tmpPath = `${targetPath}.${process.pid}.${randomUUID()}.tmp` + try { + writeFileSync(tmpPath, contents, { encoding: 'utf-8', mode: options?.mode }) + renameWithRetry(tmpPath, targetPath) + } catch (error) { + rmSync(tmpPath, { force: true }) + throw error + } +} + +// Why: on Windows, renameSync can fail with EPERM/EACCES if another process +// (antivirus, Codex CLI) holds the target file open. A short retry avoids +// transient failures without masking real permission errors. +function renameWithRetry(source: string, target: string): void { + const maxAttempts = process.platform === 'win32' ? 3 : 1 + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + try { + renameSync(source, target) + return + } catch (error) { + const code = (error as NodeJS.ErrnoException).code + if (attempt < maxAttempts && (code === 'EPERM' || code === 'EACCES')) { + const delayMs = attempt * 50 + const until = Date.now() + delayMs + while (Date.now() < until) { + /* busy-wait: setTimeout is async and callers must stay sync */ + } + continue + } + throw error + } + } +} diff --git a/src/main/codex-accounts/runtime-home-service.test.ts b/src/main/codex-accounts/runtime-home-service.test.ts new file mode 100644 index 00000000..8f7dd751 --- /dev/null +++ b/src/main/codex-accounts/runtime-home-service.test.ts @@ -0,0 +1,431 @@ +/* eslint-disable max-lines -- test suite covers snapshot, migration, auth materialization, and error-resilience scenarios */ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { + existsSync, + mkdtempSync, + mkdirSync, + readFileSync, + rmSync, + statSync, + writeFileSync +} from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import type { GlobalSettings } from '../../shared/types' + +const testState = { userDataDir: '', fakeHomeDir: '' } + +vi.mock('electron', () => ({ + app: { + getPath: () => testState.userDataDir + } +})) + +vi.mock('node:os', async () => { + const actual = await vi.importActual('node:os') // eslint-disable-line @typescript-eslint/consistent-type-imports -- vi.importActual requires inline import() + return { + ...actual, + homedir: () => testState.fakeHomeDir + } +}) + +function createSettings(overrides: Partial = {}): GlobalSettings { + return { + workspaceDir: testState.fakeHomeDir, + nestWorkspaces: false, + refreshLocalBaseRefOnWorktreeCreate: false, + branchPrefix: 'git-username', + branchPrefixCustom: '', + theme: 'system', + editorAutoSave: false, + editorAutoSaveDelayMs: 1000, + terminalFontSize: 14, + terminalFontFamily: 'JetBrains Mono', + terminalFontWeight: 500, + terminalCursorStyle: 'block', + terminalCursorBlink: false, + terminalThemeDark: 'orca-dark', + terminalDividerColorDark: '#000000', + terminalUseSeparateLightTheme: false, + terminalThemeLight: 'orca-light', + terminalDividerColorLight: '#ffffff', + terminalInactivePaneOpacity: 0.5, + terminalActivePaneOpacity: 1, + terminalPaneOpacityTransitionMs: 150, + terminalDividerThicknessPx: 1, + terminalRightClickToPaste: false, + terminalFocusFollowsMouse: false, + setupScriptLaunchMode: 'split-vertical', + terminalScrollbackBytes: 10_000_000, + openLinksInApp: false, + rightSidebarOpenByDefault: true, + showTitlebarAgentActivity: true, + diffDefaultView: 'inline', + notifications: { + enabled: true, + agentTaskComplete: true, + terminalBell: false, + suppressWhenFocused: true + }, + promptCacheTimerEnabled: false, + promptCacheTtlMs: 300_000, + codexManagedAccounts: [], + activeCodexManagedAccountId: null, + terminalScopeHistoryByWorktree: true, + defaultTuiAgent: null, + skipDeleteWorktreeConfirm: false, + defaultTaskViewPreset: 'all', + agentCmdOverrides: {}, + terminalMacOptionAsAlt: 'false', + experimentalTerminalDaemon: false, + experimentalTerminalDaemonNoticeShown: false, + ...overrides + } +} + +function createStore(settings: GlobalSettings) { + return { + getSettings: vi.fn(() => settings), + updateSettings: vi.fn((updates: Partial) => { + settings = { + ...settings, + ...updates, + notifications: { + ...settings.notifications, + ...updates.notifications + } + } + return settings + }) + } +} + +function createManagedAuth(rootDir: string, accountId: string, auth: string): string { + const managedHomePath = join(rootDir, 'codex-accounts', accountId, 'home') + mkdirSync(managedHomePath, { recursive: true }) + writeFileSync(join(managedHomePath, '.orca-managed-home'), `${accountId}\n`, 'utf-8') + writeFileSync(join(managedHomePath, 'auth.json'), auth, 'utf-8') + return managedHomePath +} + +describe('CodexRuntimeHomeService', () => { + beforeEach(() => { + vi.resetModules() + vi.clearAllMocks() + testState.userDataDir = mkdtempSync(join(tmpdir(), 'orca-runtime-home-')) + testState.fakeHomeDir = mkdtempSync(join(tmpdir(), 'orca-codex-home-')) + mkdirSync(join(testState.fakeHomeDir, '.codex'), { recursive: true }) + }) + + afterEach(() => { + rmSync(testState.userDataDir, { recursive: true, force: true }) + rmSync(testState.fakeHomeDir, { recursive: true, force: true }) + }) + + it('captures the existing ~/.codex auth as the system-default snapshot', async () => { + const runtimeAuthPath = join(testState.fakeHomeDir, '.codex', 'auth.json') + writeFileSync(runtimeAuthPath, '{"account":"system"}\n', 'utf-8') + const store = createStore(createSettings()) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + new CodexRuntimeHomeService(store as never) + + expect( + readFileSync( + join(testState.userDataDir, 'codex-runtime-home', 'system-default-auth.json'), + 'utf-8' + ) + ).toBe('{"account":"system"}\n') + }) + + it('materializes the active managed account auth into ~/.codex on startup', async () => { + const runtimeAuthPath = join(testState.fakeHomeDir, '.codex', 'auth.json') + writeFileSync(runtimeAuthPath, '{"account":"system"}\n', 'utf-8') + const managedHomePath = createManagedAuth( + testState.userDataDir, + 'account-1', + '{"account":"managed"}\n' + ) + const store = createStore( + createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'account-1' + }) + ) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + new CodexRuntimeHomeService(store as never) + + expect(readFileSync(runtimeAuthPath, 'utf-8')).toBe('{"account":"managed"}\n') + expect( + readFileSync( + join(testState.userDataDir, 'codex-runtime-home', 'system-default-auth.json'), + 'utf-8' + ) + ).toBe('{"account":"system"}\n') + }) + + it('restores the system-default snapshot when no managed account is selected', async () => { + const runtimeAuthPath = join(testState.fakeHomeDir, '.codex', 'auth.json') + writeFileSync(runtimeAuthPath, '{"account":"system"}\n', 'utf-8') + const managedHomePath = createManagedAuth( + testState.userDataDir, + 'account-1', + '{"account":"managed"}\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'account-1' + }) + const store = createStore(settings) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + const service = new CodexRuntimeHomeService(store as never) + settings.activeCodexManagedAccountId = null + writeFileSync(runtimeAuthPath, '{"account":"managed"}\n', 'utf-8') + + service.syncForCurrentSelection() + + expect(readFileSync(runtimeAuthPath, 'utf-8')).toBe('{"account":"system"}\n') + }) + + it('clears an invalid active account selection and restores the system default snapshot', async () => { + const runtimeAuthPath = join(testState.fakeHomeDir, '.codex', 'auth.json') + writeFileSync(runtimeAuthPath, '{"account":"system"}\n', 'utf-8') + const missingManagedHomePath = join( + testState.userDataDir, + 'codex-accounts', + 'account-1', + 'home' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath: missingManagedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'account-1' + }) + const store = createStore(settings) + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + const service = new CodexRuntimeHomeService(store as never) + writeFileSync(runtimeAuthPath, '{"account":"managed"}\n', 'utf-8') + + service.syncForCurrentSelection() + + expect(store.updateSettings).toHaveBeenCalledWith({ activeCodexManagedAccountId: null }) + expect(readFileSync(runtimeAuthPath, 'utf-8')).toBe('{"account":"system"}\n') + expect(warnSpy).toHaveBeenCalled() + }) + + it('returns ~/.codex for Codex launch and rate-limit preparation', async () => { + const store = createStore(createSettings()) + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + const service = new CodexRuntimeHomeService(store as never) + + expect(service.prepareForCodexLaunch()).toBe(join(testState.fakeHomeDir, '.codex')) + expect(service.prepareForRateLimitFetch()).toBe(join(testState.fakeHomeDir, '.codex')) + expect(existsSync(join(testState.fakeHomeDir, '.codex'))).toBe(true) + }) + + it('imports legacy managed-home history into the shared runtime history', async () => { + const runtimeHomePath = join(testState.fakeHomeDir, '.codex') + const runtimeHistoryPath = join(runtimeHomePath, 'history.jsonl') + writeFileSync(runtimeHistoryPath, '{"id":"shared-1"}\n', 'utf-8') + const managedHomePath = createManagedAuth( + testState.userDataDir, + 'account-1', + '{"account":"managed"}\n' + ) + writeFileSync( + join(managedHomePath, 'history.jsonl'), + '{"id":"shared-1"}\n{"id":"managed-2"}\n', + 'utf-8' + ) + const store = createStore(createSettings()) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + new CodexRuntimeHomeService(store as never) + + expect(readFileSync(runtimeHistoryPath, 'utf-8')).toBe( + '{"id":"shared-1"}\n{"id":"managed-2"}\n' + ) + expect(existsSync(join(testState.userDataDir, 'codex-runtime-home', 'migration-v1.json'))).toBe( + true + ) + }) + + it('writes auth.json with restrictive permissions', async () => { + if (process.platform === 'win32') { + return + } + + const runtimeAuthPath = join(testState.fakeHomeDir, '.codex', 'auth.json') + writeFileSync(runtimeAuthPath, '{"account":"system"}\n', 'utf-8') + const managedHomePath = createManagedAuth( + testState.userDataDir, + 'account-1', + '{"account":"managed"}\n' + ) + const store = createStore( + createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'account-1' + }) + ) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + new CodexRuntimeHomeService(store as never) + + const mode = statSync(runtimeAuthPath).mode & 0o777 + expect(mode).toBe(0o600) + }) + + it('does not throw when syncForCurrentSelection encounters an error', async () => { + const store = createStore( + createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath: '/nonexistent/path/home', + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'account-1' + }) + ) + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + expect(() => new CodexRuntimeHomeService(store as never)).not.toThrow() + expect(warnSpy).toHaveBeenCalled() + }) + + it('does not re-run migration when marker already exists', async () => { + const managedHomePath = createManagedAuth( + testState.userDataDir, + 'account-1', + '{"account":"managed"}\n' + ) + writeFileSync(join(managedHomePath, 'history.jsonl'), '{"id":"legacy-1"}\n', 'utf-8') + const store = createStore(createSettings()) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + new CodexRuntimeHomeService(store as never) + + const runtimeHistoryPath = join(testState.fakeHomeDir, '.codex', 'history.jsonl') + expect(readFileSync(runtimeHistoryPath, 'utf-8')).toContain('legacy-1') + + writeFileSync( + join(managedHomePath, 'history.jsonl'), + '{"id":"legacy-1"}\n{"id":"legacy-2"}\n', + 'utf-8' + ) + + vi.resetModules() + const mod2 = await import('./runtime-home-service') + new mod2.CodexRuntimeHomeService(store as never) + + expect(readFileSync(runtimeHistoryPath, 'utf-8')).not.toContain('legacy-2') + }) + + it('clears system-default snapshot via clearSystemDefaultSnapshot', async () => { + const runtimeAuthPath = join(testState.fakeHomeDir, '.codex', 'auth.json') + writeFileSync(runtimeAuthPath, '{"account":"system"}\n', 'utf-8') + const store = createStore(createSettings()) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + const service = new CodexRuntimeHomeService(store as never) + + const snapshotPath = join( + testState.userDataDir, + 'codex-runtime-home', + 'system-default-auth.json' + ) + expect(existsSync(snapshotPath)).toBe(true) + + service.clearSystemDefaultSnapshot() + expect(existsSync(snapshotPath)).toBe(false) + }) + + it('preserves conflicting legacy session files under deterministic names', async () => { + const runtimeSessionsDir = join(testState.fakeHomeDir, '.codex', 'sessions') + mkdirSync(runtimeSessionsDir, { recursive: true }) + writeFileSync(join(runtimeSessionsDir, 'session.json'), '{"turns":[1]}', 'utf-8') + const managedHomePath = createManagedAuth( + testState.userDataDir, + 'account-1', + '{"account":"managed"}\n' + ) + const legacySessionsDir = join(managedHomePath, 'sessions') + mkdirSync(legacySessionsDir, { recursive: true }) + writeFileSync(join(legacySessionsDir, 'session.json'), '{"turns":[1,2]}', 'utf-8') + const store = createStore(createSettings()) + + const { CodexRuntimeHomeService } = await import('./runtime-home-service') + new CodexRuntimeHomeService(store as never) + + expect(readFileSync(join(runtimeSessionsDir, 'session.json'), 'utf-8')).toBe('{"turns":[1]}') + expect( + readFileSync(join(runtimeSessionsDir, 'session.orca-legacy-account-1.json'), 'utf-8') + ).toBe('{"turns":[1,2]}') + expect( + readFileSync( + join(testState.userDataDir, 'codex-runtime-home', 'migration-diagnostics.jsonl'), + 'utf-8' + ) + ).toContain('"type":"session-conflict"') + }) +}) diff --git a/src/main/codex-accounts/runtime-home-service.ts b/src/main/codex-accounts/runtime-home-service.ts new file mode 100644 index 00000000..84cfcf91 --- /dev/null +++ b/src/main/codex-accounts/runtime-home-service.ts @@ -0,0 +1,293 @@ +/* eslint-disable max-lines -- Why: this service owns the single runtime-home +contract for Codex inside Orca. Keeping path resolution, system-default +snapshots, auth materialization, and recovery together prevents account-switch +semantics from drifting across PTY launch, login, and quota fetch paths. */ +import { + copyFileSync, + existsSync, + mkdirSync, + readdirSync, + readFileSync, + rmSync, + statSync +} from 'node:fs' +import { homedir } from 'node:os' +import { dirname, extname, join, parse, relative } from 'node:path' +import { app } from 'electron' +import type { CodexManagedAccount } from '../../shared/types' +import type { Store } from '../persistence' +import { writeFileAtomically } from './fs-utils' + +export class CodexRuntimeHomeService { + constructor(private readonly store: Store) { + this.safeMigrateLegacyManagedState() + this.safeSyncForCurrentSelection() + } + + prepareForCodexLaunch(): string { + this.safeSyncForCurrentSelection() + return this.getRuntimeHomePath() + } + + prepareForRateLimitFetch(): string { + this.safeSyncForCurrentSelection() + return this.getRuntimeHomePath() + } + + syncForCurrentSelection(): void { + this.captureSystemDefaultSnapshotIfNeeded() + + const settings = this.store.getSettings() + const activeAccount = this.getActiveAccount( + settings.codexManagedAccounts, + settings.activeCodexManagedAccountId + ) + if (!activeAccount) { + this.restoreSystemDefaultSnapshot() + return + } + + const activeAuthPath = join(activeAccount.managedHomePath, 'auth.json') + if (!existsSync(activeAuthPath)) { + console.warn( + '[codex-runtime-home] Active managed account is missing auth.json, restoring system default' + ) + this.store.updateSettings({ activeCodexManagedAccountId: null }) + this.restoreSystemDefaultSnapshot() + return + } + + this.writeRuntimeAuth(readFileSync(activeAuthPath, 'utf-8')) + } + + private safeSyncForCurrentSelection(): void { + try { + this.syncForCurrentSelection() + } catch (error) { + console.warn('[codex-runtime-home] Failed to sync runtime auth state:', error) + } + } + + private getActiveAccount( + accounts: CodexManagedAccount[], + activeAccountId: string | null + ): CodexManagedAccount | null { + if (!activeAccountId) { + return null + } + return accounts.find((account) => account.id === activeAccountId) ?? null + } + + private safeMigrateLegacyManagedState(): void { + try { + this.migrateLegacyManagedStateIfNeeded() + } catch (error) { + console.warn('[codex-runtime-home] Failed to migrate legacy managed Codex state:', error) + } + } + + private getRuntimeHomePath(): string { + const runtimeHomePath = join(homedir(), '.codex') + mkdirSync(runtimeHomePath, { recursive: true }) + return runtimeHomePath + } + + private getRuntimeAuthPath(): string { + return join(this.getRuntimeHomePath(), 'auth.json') + } + + private getSystemDefaultSnapshotPath(): string { + return join(this.getRuntimeMetadataDir(), 'system-default-auth.json') + } + + private getRuntimeMetadataDir(): string { + const metadataDir = join(app.getPath('userData'), 'codex-runtime-home') + mkdirSync(metadataDir, { recursive: true }) + return metadataDir + } + + private getMigrationMarkerPath(): string { + return join(this.getRuntimeMetadataDir(), 'migration-v1.json') + } + + private getMigrationDiagnosticsPath(): string { + return join(this.getRuntimeMetadataDir(), 'migration-diagnostics.jsonl') + } + + private getManagedAccountsRoot(): string { + return join(app.getPath('userData'), 'codex-accounts') + } + + private migrateLegacyManagedStateIfNeeded(): void { + if (existsSync(this.getMigrationMarkerPath())) { + return + } + + const managedHomes = this.getLegacyManagedHomes() + for (const managedHomePath of managedHomes) { + const accountId = parse(relative(this.getManagedAccountsRoot(), managedHomePath)).dir.split( + /[\\/]/ + )[0] + if (!accountId) { + continue + } + this.migrateLegacyHistory(managedHomePath) + this.migrateLegacySessions(managedHomePath, accountId) + } + + // Why: migration is intentionally one-shot. Re-importing every startup + // would keep replaying stale managed-home state back into ~/.codex and + // make the shared runtime feel nondeterministic. + writeFileAtomically( + this.getMigrationMarkerPath(), + `${JSON.stringify({ completedAt: Date.now(), migratedHomeCount: managedHomes.length })}\n` + ) + } + + private getLegacyManagedHomes(): string[] { + const managedAccountsRoot = this.getManagedAccountsRoot() + if (!existsSync(managedAccountsRoot)) { + return [] + } + + const accountEntries = readdirSync(managedAccountsRoot, { withFileTypes: true }) + const managedHomes: string[] = [] + for (const entry of accountEntries) { + if (!entry.isDirectory()) { + continue + } + const managedHomePath = join(managedAccountsRoot, entry.name, 'home') + if (existsSync(join(managedHomePath, '.orca-managed-home'))) { + managedHomes.push(managedHomePath) + } + } + return managedHomes.sort() + } + + private migrateLegacyHistory(managedHomePath: string): void { + const legacyHistoryPath = join(managedHomePath, 'history.jsonl') + if (!existsSync(legacyHistoryPath)) { + return + } + + const runtimeHistoryPath = join(this.getRuntimeHomePath(), 'history.jsonl') + const existingLines = existsSync(runtimeHistoryPath) + ? readFileSync(runtimeHistoryPath, 'utf-8').split('\n').filter(Boolean) + : [] + const mergedLines = [...existingLines] + const seenLines = new Set(existingLines) + for (const line of readFileSync(legacyHistoryPath, 'utf-8').split('\n')) { + if (!line || seenLines.has(line)) { + continue + } + seenLines.add(line) + mergedLines.push(line) + } + + if (mergedLines.length === 0) { + return + } + writeFileAtomically(runtimeHistoryPath, `${mergedLines.join('\n')}\n`) + } + + private migrateLegacySessions(managedHomePath: string, accountId: string): void { + const legacySessionsRoot = join(managedHomePath, 'sessions') + if (!existsSync(legacySessionsRoot)) { + return + } + + const runtimeSessionsRoot = join(this.getRuntimeHomePath(), 'sessions') + mkdirSync(runtimeSessionsRoot, { recursive: true }) + for (const legacyFilePath of this.listFilesRecursively(legacySessionsRoot)) { + const relativePath = relative(legacySessionsRoot, legacyFilePath) + const runtimeFilePath = join(runtimeSessionsRoot, relativePath) + mkdirSync(dirname(runtimeFilePath), { recursive: true }) + if (!existsSync(runtimeFilePath)) { + copyFileSync(legacyFilePath, runtimeFilePath) + continue + } + + const legacyContents = readFileSync(legacyFilePath) + const runtimeContents = readFileSync(runtimeFilePath) + if (runtimeContents.equals(legacyContents)) { + continue + } + + const preservedPath = this.getPreservedLegacySessionPath(runtimeFilePath, accountId) + copyFileSync(legacyFilePath, preservedPath) + this.appendMigrationDiagnostic({ + type: 'session-conflict', + accountId, + runtimeFilePath, + preservedPath + }) + } + } + + private listFilesRecursively(rootPath: string): string[] { + const stat = statSync(rootPath) + if (!stat.isDirectory()) { + return [rootPath] + } + + const files: string[] = [] + for (const entry of readdirSync(rootPath, { withFileTypes: true })) { + const childPath = join(rootPath, entry.name) + if (entry.isDirectory()) { + files.push(...this.listFilesRecursively(childPath)) + continue + } + if (entry.isFile()) { + files.push(childPath) + } + } + return files.sort() + } + + private getPreservedLegacySessionPath(runtimeFilePath: string, accountId: string): string { + const extension = extname(runtimeFilePath) + const basename = runtimeFilePath.slice(0, runtimeFilePath.length - extension.length) + return `${basename}.orca-legacy-${accountId}${extension}` + } + + private appendMigrationDiagnostic(record: Record): void { + const diagnosticsPath = this.getMigrationDiagnosticsPath() + const existingContents = existsSync(diagnosticsPath) + ? readFileSync(diagnosticsPath, 'utf-8') + : '' + writeFileAtomically(diagnosticsPath, `${existingContents}${JSON.stringify(record)}\n`) + } + + private captureSystemDefaultSnapshotIfNeeded(): void { + const snapshotPath = this.getSystemDefaultSnapshotPath() + if (existsSync(snapshotPath)) { + return + } + + const runtimeAuthPath = this.getRuntimeAuthPath() + if (!existsSync(runtimeAuthPath)) { + return + } + + writeFileAtomically(snapshotPath, readFileSync(runtimeAuthPath, 'utf-8')) + } + + private restoreSystemDefaultSnapshot(): void { + const snapshotPath = this.getSystemDefaultSnapshotPath() + if (!existsSync(snapshotPath)) { + return + } + + this.writeRuntimeAuth(readFileSync(snapshotPath, 'utf-8')) + } + + private writeRuntimeAuth(contents: string): void { + // Why: auth.json contains sensitive credentials. Restrict to owner-only + // so other users on a shared Linux/macOS machine cannot read it. + writeFileAtomically(this.getRuntimeAuthPath(), contents, { mode: 0o600 }) + } + + clearSystemDefaultSnapshot(): void { + rmSync(this.getSystemDefaultSnapshotPath(), { force: true }) + } +} diff --git a/src/main/codex-accounts/service.test.ts b/src/main/codex-accounts/service.test.ts new file mode 100644 index 00000000..69701186 --- /dev/null +++ b/src/main/codex-accounts/service.test.ts @@ -0,0 +1,559 @@ +/* eslint-disable max-lines -- test suite covers config sync, login seeding, and fallback scenarios */ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { EventEmitter } from 'node:events' +import { existsSync, mkdtempSync, mkdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { PassThrough } from 'node:stream' +import type { GlobalSettings } from '../../shared/types' + +const testState = { userDataDir: '', fakeHomeDir: '' } + +vi.mock('electron', () => ({ + app: { + getPath: () => testState.userDataDir + } +})) + +vi.mock('node:os', async () => { + const actual = await vi.importActual('node:os') // eslint-disable-line @typescript-eslint/consistent-type-imports -- vi.importActual requires inline import() + return { + ...actual, + homedir: () => testState.fakeHomeDir + } +}) + +function createSettings(overrides: Partial = {}): GlobalSettings { + return { + workspaceDir: testState.fakeHomeDir, + nestWorkspaces: false, + refreshLocalBaseRefOnWorktreeCreate: false, + branchPrefix: 'git-username', + branchPrefixCustom: '', + theme: 'system', + editorAutoSave: false, + editorAutoSaveDelayMs: 1000, + terminalFontSize: 14, + terminalFontFamily: 'JetBrains Mono', + terminalFontWeight: 500, + terminalCursorStyle: 'block', + terminalCursorBlink: false, + terminalThemeDark: 'orca-dark', + terminalDividerColorDark: '#000000', + terminalUseSeparateLightTheme: false, + terminalThemeLight: 'orca-light', + terminalDividerColorLight: '#ffffff', + terminalInactivePaneOpacity: 0.5, + terminalActivePaneOpacity: 1, + terminalPaneOpacityTransitionMs: 150, + terminalDividerThicknessPx: 1, + terminalRightClickToPaste: false, + terminalFocusFollowsMouse: false, + setupScriptLaunchMode: 'split-vertical', + terminalScrollbackBytes: 10_000_000, + openLinksInApp: false, + rightSidebarOpenByDefault: true, + showTitlebarAgentActivity: true, + diffDefaultView: 'inline', + notifications: { + enabled: true, + agentTaskComplete: true, + terminalBell: false, + suppressWhenFocused: true + }, + promptCacheTimerEnabled: false, + promptCacheTtlMs: 300_000, + codexManagedAccounts: [], + activeCodexManagedAccountId: null, + terminalScopeHistoryByWorktree: true, + defaultTuiAgent: null, + skipDeleteWorktreeConfirm: false, + defaultTaskViewPreset: 'all', + agentCmdOverrides: {}, + terminalMacOptionAsAlt: 'false', + experimentalTerminalDaemon: false, + experimentalTerminalDaemonNoticeShown: false, + ...overrides + } +} + +function createStore(settings: GlobalSettings) { + return { + getSettings: vi.fn(() => settings), + updateSettings: vi.fn((updates: Partial) => { + settings = { + ...settings, + ...updates, + notifications: { + ...settings.notifications, + ...updates.notifications + } + } + return settings + }) + } +} + +function createRateLimits() { + return { + refreshForCodexAccountChange: vi.fn().mockResolvedValue(undefined) + } +} + +function createRuntimeHome() { + return { + syncForCurrentSelection: vi.fn() + } +} + +function createManagedHome(rootDir: string, accountId: string, config = '', auth = ''): string { + const managedHomePath = join(rootDir, 'codex-accounts', accountId, 'home') + mkdirSync(managedHomePath, { recursive: true }) + writeFileSync(join(managedHomePath, '.orca-managed-home'), `${accountId}\n`, 'utf-8') + if (config) { + writeFileSync(join(managedHomePath, 'config.toml'), config, 'utf-8') + } + if (auth) { + writeFileSync(join(managedHomePath, 'auth.json'), auth, 'utf-8') + } + return managedHomePath +} + +describe('CodexAccountService config sync', () => { + beforeEach(() => { + vi.resetModules() + vi.clearAllMocks() + testState.userDataDir = mkdtempSync(join(tmpdir(), 'orca-codex-accounts-')) + testState.fakeHomeDir = mkdtempSync(join(tmpdir(), 'orca-codex-home-')) + mkdirSync(join(testState.fakeHomeDir, '.codex'), { recursive: true }) + }) + + afterEach(() => { + rmSync(testState.userDataDir, { recursive: true, force: true }) + rmSync(testState.fakeHomeDir, { recursive: true, force: true }) + }) + + it('syncs the canonical ~/.codex/config.toml into managed homes on startup', async () => { + const canonicalConfigPath = join(testState.fakeHomeDir, '.codex', 'config.toml') + const canonicalConfig = 'approval_policy = "never"\nsandbox_mode = "danger-full-access"\n' + writeFileSync(canonicalConfigPath, canonicalConfig, 'utf-8') + const managedHomePath = createManagedHome( + testState.userDataDir, + 'account-1', + 'approval_policy = "on-request"\n', + '{"account":"managed"}\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'account-1' + }) + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + new CodexAccountService(store as never, rateLimits as never, runtimeHome as never) + + expect(readFileSync(join(managedHomePath, 'config.toml'), 'utf-8')).toBe(canonicalConfig) + expect(readFileSync(join(managedHomePath, 'auth.json'), 'utf-8')).toBe( + '{"account":"managed"}\n' + ) + }) + + it('does not sync configs when ~/.codex/config.toml is missing', async () => { + const firstManagedHomePath = createManagedHome( + testState.userDataDir, + 'account-1', + 'sandbox_mode = "danger-full-access"\n', + '{"account":"one"}\n' + ) + const secondManagedHomePath = createManagedHome( + testState.userDataDir, + 'account-2', + 'sandbox_mode = "workspace-write"\n', + '{"account":"two"}\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'one@example.com', + managedHomePath: firstManagedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + }, + { + id: 'account-2', + email: 'two@example.com', + managedHomePath: secondManagedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 2, + updatedAt: 2, + lastAuthenticatedAt: 2 + } + ] + }) + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + new CodexAccountService(store as never, rateLimits as never, runtimeHome as never) + + expect(readFileSync(join(firstManagedHomePath, 'config.toml'), 'utf-8')).toBe( + 'sandbox_mode = "danger-full-access"\n' + ) + expect(readFileSync(join(secondManagedHomePath, 'config.toml'), 'utf-8')).toBe( + 'sandbox_mode = "workspace-write"\n' + ) + }) + + it('re-syncs config when selecting an account', async () => { + const canonicalConfigPath = join(testState.fakeHomeDir, '.codex', 'config.toml') + const canonicalConfig = 'approval_policy = "never"\nsandbox_mode = "danger-full-access"\n' + writeFileSync(canonicalConfigPath, canonicalConfig, 'utf-8') + const managedHomePath = createManagedHome( + testState.userDataDir, + 'account-1', + 'approval_policy = "on-request"\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ] + }) + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + const service = new CodexAccountService( + store as never, + rateLimits as never, + runtimeHome as never + ) + + writeFileSync(join(managedHomePath, 'config.toml'), 'approval_policy = "untrusted"\n', 'utf-8') + + await service.selectAccount('account-1') + + expect(readFileSync(join(managedHomePath, 'config.toml'), 'utf-8')).toBe(canonicalConfig) + expect(rateLimits.refreshForCodexAccountChange).toHaveBeenCalledTimes(1) + expect(runtimeHome.syncForCurrentSelection).toHaveBeenCalledTimes(1) + }) + + it('does not throw on startup when the canonical config path is unreadable', async () => { + mkdirSync(join(testState.fakeHomeDir, '.codex', 'config.toml'), { recursive: true }) + const managedHomePath = createManagedHome( + testState.userDataDir, + 'account-1', + 'approval_policy = "on-request"\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ] + }) + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}) + const { CodexAccountService } = await import('./service') + + expect( + () => new CodexAccountService(store as never, rateLimits as never, runtimeHome as never) + ).not.toThrow() + expect(readFileSync(join(managedHomePath, 'config.toml'), 'utf-8')).toBe( + 'approval_policy = "on-request"\n' + ) + expect(warnSpy).toHaveBeenCalled() + }) + + it('seeds the managed home config before codex login runs', async () => { + vi.resetModules() + + const canonicalConfigPath = join(testState.fakeHomeDir, '.codex', 'config.toml') + const canonicalConfig = 'approval_policy = "never"\nsandbox_mode = "danger-full-access"\n' + writeFileSync(canonicalConfigPath, canonicalConfig, 'utf-8') + + const spawnMock = vi.fn( + (_command: string, _args: string[], options: { env: NodeJS.ProcessEnv }) => { + const child = new EventEmitter() as EventEmitter & { + stdout: PassThrough + stderr: PassThrough + kill: () => void + } + child.stdout = new PassThrough() + child.stderr = new PassThrough() + child.kill = vi.fn() + + const loginHome = options.env.CODEX_HOME + expect(loginHome).toBeTruthy() + expect(readFileSync(join(loginHome!, 'config.toml'), 'utf-8')).toBe(canonicalConfig) + + const payload = Buffer.from(JSON.stringify({ email: 'user@example.com' })).toString( + 'base64url' + ) + writeFileSync( + join(loginHome!, 'auth.json'), + JSON.stringify({ + tokens: { + id_token: `header.${payload}.signature` + } + }), + 'utf-8' + ) + + queueMicrotask(() => child.emit('close', 0)) + return child + } + ) + + vi.doMock('node:child_process', () => ({ + spawn: spawnMock + })) + vi.doMock('../codex-cli/command', () => ({ + resolveCodexCommand: () => 'codex' + })) + + const settings = createSettings() + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + const service = new CodexAccountService( + store as never, + rateLimits as never, + runtimeHome as never + ) + + await service.addAccount() + + expect(spawnMock).toHaveBeenCalledTimes(1) + expect(runtimeHome.syncForCurrentSelection).toHaveBeenCalledTimes(1) + }) + + it('deselects active account via selectAccount(null)', async () => { + const managedHomePath = createManagedHome( + testState.userDataDir, + 'account-1', + '', + '{"account":"managed"}\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'account-1' + }) + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + const service = new CodexAccountService( + store as never, + rateLimits as never, + runtimeHome as never + ) + + const result = await service.selectAccount(null) + + expect(result.activeAccountId).toBe(null) + expect(runtimeHome.syncForCurrentSelection).toHaveBeenCalled() + expect(rateLimits.refreshForCodexAccountChange).toHaveBeenCalled() + }) + + it('removes an account and cleans up managed home', async () => { + const managedHomePath = createManagedHome( + testState.userDataDir, + 'account-1', + '', + '{"account":"managed"}\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'account-1' + }) + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + const service = new CodexAccountService( + store as never, + rateLimits as never, + runtimeHome as never + ) + + const result = await service.removeAccount('account-1') + + expect(result.accounts).toHaveLength(0) + expect(result.activeAccountId).toBe(null) + expect(existsSync(managedHomePath)).toBe(false) + expect(runtimeHome.syncForCurrentSelection).toHaveBeenCalled() + }) + + it('lists accounts with normalizeActiveSelection', async () => { + const managedHomePath = createManagedHome( + testState.userDataDir, + 'account-1', + '', + '{"account":"managed"}\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ], + activeCodexManagedAccountId: 'nonexistent-id' + }) + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + const service = new CodexAccountService( + store as never, + rateLimits as never, + runtimeHome as never + ) + + const result = service.listAccounts() + + expect(result.accounts).toHaveLength(1) + expect(result.activeAccountId).toBe(null) + }) + + it('rejects paths that escape the managed accounts root', async () => { + const settings = createSettings() + const store = createStore(settings) + const rateLimits = createRateLimits() + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + const service = new CodexAccountService( + store as never, + rateLimits as never, + runtimeHome as never + ) + + await expect(service.removeAccount('nonexistent')).rejects.toThrow('no longer exists') + }) + + it('serializes concurrent mutations', async () => { + const managedHomePath = createManagedHome( + testState.userDataDir, + 'account-1', + '', + '{"account":"managed"}\n' + ) + const settings = createSettings({ + codexManagedAccounts: [ + { + id: 'account-1', + email: 'user@example.com', + managedHomePath, + providerAccountId: null, + workspaceLabel: null, + workspaceAccountId: null, + createdAt: 1, + updatedAt: 1, + lastAuthenticatedAt: 1 + } + ] + }) + const store = createStore(settings) + const callOrder: string[] = [] + const rateLimits = { + refreshForCodexAccountChange: vi.fn(async () => { + callOrder.push('refresh') + }) + } + const runtimeHome = createRuntimeHome() + + const { CodexAccountService } = await import('./service') + const service = new CodexAccountService( + store as never, + rateLimits as never, + runtimeHome as never + ) + + const p1 = service.selectAccount('account-1') + const p2 = service.selectAccount(null) + await Promise.all([p1, p2]) + + expect(rateLimits.refreshForCodexAccountChange).toHaveBeenCalledTimes(2) + }) +}) diff --git a/src/main/codex-accounts/service.ts b/src/main/codex-accounts/service.ts index 16e798bf..ac9da586 100644 --- a/src/main/codex-accounts/service.ts +++ b/src/main/codex-accounts/service.ts @@ -5,12 +5,15 @@ import { randomUUID } from 'node:crypto' import { spawn } from 'node:child_process' import { existsSync, mkdirSync, readFileSync, realpathSync, rmSync, writeFileSync } from 'node:fs' import { join, relative, resolve, sep } from 'node:path' +import { homedir } from 'node:os' import { app } from 'electron' import type { CodexManagedAccount, CodexManagedAccountSummary, CodexRateLimitAccountsState } from '../../shared/types' +import type { CodexRuntimeHomeService } from './runtime-home-service' +import { writeFileAtomically } from './fs-utils' import { resolveCodexCommand } from '../codex-cli/command' import type { Store } from '../persistence' import type { RateLimitService } from '../rate-limits/service' @@ -31,10 +34,24 @@ type ResolvedCodexIdentity = { } export class CodexAccountService { + // Why: account mutations read settings, do async work (login, rate-limit + // refresh), then write settings. Without serialization, overlapping calls + // (e.g. double-click "Add Account") can cause lost updates. + private mutationQueue: Promise = Promise.resolve() + constructor( private readonly store: Store, - private readonly rateLimits: RateLimitService - ) {} + private readonly rateLimits: RateLimitService, + private readonly runtimeHome: CodexRuntimeHomeService + ) { + this.safeSyncCanonicalConfigToManagedHomes() + } + + private serializeMutation(fn: () => Promise): Promise { + const next = this.mutationQueue.then(fn, fn) + this.mutationQueue = next.catch(() => {}) + return next + } listAccounts(): CodexRateLimitAccountsState { this.normalizeActiveSelection() @@ -42,10 +59,27 @@ export class CodexAccountService { } async addAccount(): Promise { + return this.serializeMutation(() => this.doAddAccount()) + } + + async reauthenticateAccount(accountId: string): Promise { + return this.serializeMutation(() => this.doReauthenticateAccount(accountId)) + } + + async removeAccount(accountId: string): Promise { + return this.serializeMutation(() => this.doRemoveAccount(accountId)) + } + + async selectAccount(accountId: string | null): Promise { + return this.serializeMutation(() => this.doSelectAccount(accountId)) + } + + private async doAddAccount(): Promise { const accountId = randomUUID() const managedHomePath = this.createManagedHome(accountId) try { + this.safeSyncCanonicalConfigIntoManagedHome(managedHomePath) await this.runCodexLogin(managedHomePath) const identity = this.readIdentityFromHome(managedHomePath) if (!identity.email) { @@ -70,6 +104,8 @@ export class CodexAccountService { codexManagedAccounts: [...settings.codexManagedAccounts, account], activeCodexManagedAccountId: account.id }) + this.safeSyncCanonicalConfigToManagedHomes() + this.runtimeHome.syncForCurrentSelection() await this.rateLimits.refreshForCodexAccountChange() return this.getSnapshot() @@ -79,7 +115,7 @@ export class CodexAccountService { } } - async reauthenticateAccount(accountId: string): Promise { + private async doReauthenticateAccount(accountId: string): Promise { const account = this.requireAccount(accountId) const managedHomePath = this.assertManagedHomePath(account.managedHomePath) @@ -108,6 +144,8 @@ export class CodexAccountService { this.store.updateSettings({ codexManagedAccounts: updatedAccounts }) + this.safeSyncCanonicalConfigToManagedHomes() + this.runtimeHome.syncForCurrentSelection() // Why: re-auth can change which actual Codex identity the managed home // points at. Force a fresh read immediately so the status bar cannot keep @@ -116,7 +154,7 @@ export class CodexAccountService { return this.getSnapshot() } - async removeAccount(accountId: string): Promise { + private async doRemoveAccount(accountId: string): Promise { const account = this.requireAccount(accountId) const settings = this.store.getSettings() const nextAccounts = settings.codexManagedAccounts.filter((entry) => entry.id !== accountId) @@ -129,13 +167,14 @@ export class CodexAccountService { codexManagedAccounts: nextAccounts, activeCodexManagedAccountId: nextActiveId }) + this.runtimeHome.syncForCurrentSelection() this.safeRemoveManagedHome(account.managedHomePath) await this.rateLimits.refreshForCodexAccountChange() return this.getSnapshot() } - async selectAccount(accountId: string | null): Promise { + private async doSelectAccount(accountId: string | null): Promise { if (accountId !== null) { this.requireAccount(accountId) } @@ -143,29 +182,13 @@ export class CodexAccountService { this.store.updateSettings({ activeCodexManagedAccountId: accountId }) + this.safeSyncCanonicalConfigToManagedHomes() + this.runtimeHome.syncForCurrentSelection() await this.rateLimits.refreshForCodexAccountChange() return this.getSnapshot() } - getSelectedManagedHomePath(): string | null { - const account = this.getActiveAccount() - if (!account) { - return null - } - - try { - return this.assertManagedHomePath(account.managedHomePath) - } catch (error) { - // Why: if the selected managed home was deleted or tampered with outside - // Orca, the safest recovery is to fall back to the ambient system Codex - // login immediately rather than keeping a broken active selection around. - this.store.updateSettings({ activeCodexManagedAccountId: null }) - console.warn('[codex-accounts] Ignoring invalid managed home path:', error) - return null - } - } - private getSnapshot(): CodexRateLimitAccountsState { const settings = this.store.getSettings() return { @@ -176,19 +199,6 @@ export class CodexAccountService { } } - private getActiveAccount(): CodexManagedAccount | null { - this.normalizeActiveSelection() - const settings = this.store.getSettings() - if (!settings.activeCodexManagedAccountId) { - return null - } - return ( - settings.codexManagedAccounts.find( - (entry) => entry.id === settings.activeCodexManagedAccountId - ) ?? null - ) - } - private toSummary(account: CodexManagedAccount): CodexManagedAccountSummary { return { id: account.id, @@ -234,6 +244,72 @@ export class CodexAccountService { return this.assertManagedHomePath(managedHomePath) } + private safeSyncCanonicalConfigToManagedHomes(): void { + try { + this.syncCanonicalConfigToManagedHomes() + } catch (error) { + console.warn('[codex-accounts] Failed to sync canonical config:', error) + } + } + + private safeSyncCanonicalConfigIntoManagedHome(managedHomePath: string): void { + try { + this.syncCanonicalConfigIntoManagedHome(managedHomePath) + } catch (error) { + console.warn('[codex-accounts] Failed to seed managed config:', error) + } + } + + private syncCanonicalConfigToManagedHomes(): void { + const canonicalConfig = this.readCanonicalConfig() + if (canonicalConfig === null) { + return + } + + const settings = this.store.getSettings() + for (const account of settings.codexManagedAccounts) { + try { + this.syncCanonicalConfigIntoManagedHome(account.managedHomePath, canonicalConfig) + } catch (error) { + console.warn('[codex-accounts] Failed to sync managed config:', error) + } + } + } + + private syncCanonicalConfigIntoManagedHome( + managedHomePath: string, + canonicalConfig = this.readCanonicalConfig() + ): void { + if (canonicalConfig === null) { + return + } + + const trustedManagedHomePath = this.assertManagedHomePath(managedHomePath) + // Why: Orca account switching is meant to swap Codex credentials and quota + // identity, not silently fork the user's sandbox/config defaults. Syncing + // one canonical config into every managed home keeps auth isolated per + // account while preserving consistent Codex behavior. + this.writeManagedConfig(trustedManagedHomePath, canonicalConfig) + } + + private readCanonicalConfig(): string | null { + const primaryConfigPath = join(homedir(), '.codex', 'config.toml') + if (!existsSync(primaryConfigPath)) { + return null + } + + try { + return readFileSync(primaryConfigPath, 'utf-8') + } catch (error) { + console.warn('[codex-accounts] Failed to read canonical config:', error) + return null + } + } + + private writeManagedConfig(managedHomePath: string, contents: string): void { + writeFileAtomically(join(managedHomePath, 'config.toml'), contents) + } + private getManagedAccountsRoot(): string { const root = join(app.getPath('userData'), 'codex-accounts') mkdirSync(root, { recursive: true }) @@ -252,10 +328,7 @@ export class CodexAccountService { const canonicalRoot = realpathSync(resolvedRoot) const relativePath = relative(canonicalRoot, canonicalCandidate) const escaped = - relativePath === '' || - relativePath === '.' || - relativePath.startsWith('..') || - relativePath.includes(`..${sep}`) + relativePath === '' || relativePath.startsWith('..') || relativePath.includes(`..${sep}`) if (escaped) { throw new Error('Managed Codex home escaped Orca account storage.') @@ -278,6 +351,18 @@ export class CodexAccountService { } rmSync(managedHomePath, { recursive: true, force: true }) + + // Why: managed homes live at //home. Removing + // just the home/ leaf leaves an empty / directory behind. + try { + const parentDir = resolve(managedHomePath, '..') + const root = this.getManagedAccountsRoot() + if (parentDir.startsWith(root) && parentDir !== root) { + rmSync(parentDir, { recursive: true, force: true }) + } + } catch { + // Best-effort cleanup + } } private async runCodexLogin(managedHomePath: string): Promise { @@ -390,6 +475,10 @@ export class CodexAccountService { const authFilePath = join(this.assertManagedHomePath(managedHomePath), 'auth.json') const raw = JSON.parse(readFileSync(authFilePath, 'utf-8')) as Record + // Why: API-key-based auth files have no OAuth tokens or JWT identity + // claims. Returning nulls causes the caller to fail with a clear + // "could not resolve the account email" error rather than crashing + // on missing nested token fields. if (typeof raw.OPENAI_API_KEY === 'string' && raw.OPENAI_API_KEY.trim() !== '') { return { idToken: null, diff --git a/src/main/index.ts b/src/main/index.ts index c93839e3..e06b8161 100644 --- a/src/main/index.ts +++ b/src/main/index.ts @@ -31,6 +31,7 @@ import { RateLimitService } from './rate-limits/service' import { attachMainWindowServices } from './window/attach-main-window-services' import { createMainWindow } from './window/createMainWindow' import { CodexAccountService } from './codex-accounts/service' +import { CodexRuntimeHomeService } from './codex-accounts/runtime-home-service' import { openCodeHookService } from './opencode/hook-service' let mainWindow: BrowserWindow | null = null @@ -43,6 +44,7 @@ let stats: StatsCollector | null = null let claudeUsage: ClaudeUsageStore | null = null let codexUsage: CodexUsageStore | null = null let codexAccounts: CodexAccountService | null = null +let codexRuntimeHome: CodexRuntimeHomeService | null = null let runtime: OrcaRuntimeService | null = null let rateLimits: RateLimitService | null = null let runtimeRpc: OrcaRuntimeRpcServer | null = null @@ -85,6 +87,9 @@ function openMainWindow(): BrowserWindow { if (!codexAccounts) { throw new Error('Codex account service must be initialized before opening the main window') } + if (!codexRuntimeHome) { + throw new Error('Codex runtime home service must be initialized before opening the main window') + } const window = createMainWindow(store, { getIsQuitting: () => isQuitting, @@ -102,9 +107,7 @@ function openMainWindow(): BrowserWindow { rateLimits, window.webContents.id ) - attachMainWindowServices(window, store, runtime, () => - codexAccounts!.getSelectedManagedHomePath() - ) + attachMainWindowServices(window, store, runtime, () => codexRuntimeHome!.prepareForCodexLaunch()) rateLimits.attach(window) rateLimits.start() window.on('closed', () => { @@ -130,8 +133,9 @@ app.whenReady().then(async () => { claudeUsage = new ClaudeUsageStore(store) codexUsage = new CodexUsageStore(store) rateLimits = new RateLimitService() - codexAccounts = new CodexAccountService(store, rateLimits) - rateLimits.setCodexHomePathResolver(() => codexAccounts!.getSelectedManagedHomePath()) + codexRuntimeHome = new CodexRuntimeHomeService(store) + codexAccounts = new CodexAccountService(store, rateLimits, codexRuntimeHome) + rateLimits.setCodexHomePathResolver(() => codexRuntimeHome!.prepareForRateLimitFetch()) runtime = new OrcaRuntimeService(store, stats) nativeTheme.themeSource = store.getSettings().theme ?? 'system' registerAppMenu({ diff --git a/src/main/ipc/pty.ts b/src/main/ipc/pty.ts index 78aa842f..aa993476 100644 --- a/src/main/ipc/pty.ts +++ b/src/main/ipc/pty.ts @@ -165,11 +165,11 @@ export function registerPtyHandlers( piTitlebarExtensionService.buildPtyEnv(id, baseEnv.PI_CODING_AGENT_DIR) ) - // Why: the selected Codex account should affect Codex launched inside - // Orca terminals too, not just Orca's background quota fetches. Inject - // the managed CODEX_HOME only into this PTY environment so the override - // stays scoped to Orca terminals instead of mutating the app process or - // the user's external shells. + // Why: Codex account switching now materializes auth into one shared + // runtime home (~/.codex), and Codex launched inside Orca terminals + // must use that same prepared home as quota fetches and other entry + // points. Keep the override PTY-scoped so Orca does not mutate the app + // process environment or the user's unrelated external shells. if (selectedCodexHomePath) { baseEnv.CODEX_HOME = selectedCodexHomePath } diff --git a/src/renderer/src/components/terminal-pane/pty-connection.test.ts b/src/renderer/src/components/terminal-pane/pty-connection.test.ts index 9701a688..a414510e 100644 --- a/src/renderer/src/components/terminal-pane/pty-connection.test.ts +++ b/src/renderer/src/components/terminal-pane/pty-connection.test.ts @@ -3,10 +3,15 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' type StoreState = { tabsByWorktree: Record + ptyIdsByTabId?: Record worktreesByRepo: Record repos: { id: string; connectionId?: string | null }[] cacheTimerByKey: Record settings: { promptCacheTimerEnabled?: boolean; experimentalTerminalDaemon?: boolean } | null + codexRestartNoticeByPtyId: Record< + string, + { previousAccountLabel: string; nextAccountLabel: string } + > consumePendingColdRestore: ReturnType consumePendingSnapshot: ReturnType } @@ -151,12 +156,16 @@ describe('connectPanePty', () => { tabsByWorktree: { 'wt-1': [{ id: 'tab-1', ptyId: 'tab-pty' }] }, + ptyIdsByTabId: { + 'tab-1': ['tab-pty'] + }, worktreesByRepo: { repo1: [{ id: 'wt-1', repoId: 'repo1', path: '/tmp/wt-1' }] }, repos: [{ id: 'repo1', connectionId: null }], cacheTimerByKey: {}, settings: { promptCacheTimerEnabled: true }, + codexRestartNoticeByPtyId: {}, consumePendingColdRestore: vi.fn(() => null), consumePendingSnapshot: vi.fn(() => null) } as StoreState @@ -220,6 +229,80 @@ describe('connectPanePty', () => { ) }) + it('blocks input to stale Codex panes until they restart', async () => { + const { connectPanePty } = await import('./pty-connection') + + const transport = createMockTransport('pty-codex-stale') + transportFactoryQueue.push(transport) + mockStoreState = { + ...mockStoreState, + tabsByWorktree: { + 'wt-1': [{ id: 'tab-1', ptyId: 'pty-codex-stale' }] + }, + ptyIdsByTabId: { + 'tab-1': ['pty-codex-stale'] + }, + codexRestartNoticeByPtyId: { + 'pty-codex-stale': { previousAccountLabel: 'A', nextAccountLabel: 'B' } + } + } + + const pane = createPane(1) + let onDataHandler: ((data: string) => void) | null = null + pane.terminal.onData = vi.fn(((handler: (data: string) => void) => { + onDataHandler = handler + return { dispose: vi.fn() } + }) as typeof pane.terminal.onData) + const manager = createManager(1) + const deps = createDeps() + + connectPanePty(pane as never, manager as never, deps as never) + + expect(onDataHandler).toBeDefined() + if (!onDataHandler) { + throw new Error('expected onData handler to be registered') + } + const sendTerminalInput = onDataHandler as (data: string) => void + sendTerminalInput('hello') + + expect(transport.sendInput).not.toHaveBeenCalled() + }) + + it('blocks input when tab-level ptyId is stale even if panePtyId is null', async () => { + const { connectPanePty } = await import('./pty-connection') + + const transport = createMockTransport(null) + transportFactoryQueue.push(transport) + mockStoreState = { + ...mockStoreState, + tabsByWorktree: { + 'wt-1': [{ id: 'tab-1', ptyId: 'tab-level-pty' }] + }, + codexRestartNoticeByPtyId: { + 'tab-level-pty': { previousAccountLabel: 'A', nextAccountLabel: 'B' } + } + } + + const pane = createPane(1) + let onDataHandler: ((data: string) => void) | null = null + pane.terminal.onData = vi.fn(((handler: (data: string) => void) => { + onDataHandler = handler + return { dispose: vi.fn() } + }) as typeof pane.terminal.onData) + const manager = createManager(1) + const deps = createDeps() + + connectPanePty(pane as never, manager as never, deps as never) + + expect(onDataHandler).toBeDefined() + if (!onDataHandler) { + throw new Error('expected onData handler to be registered') + } + ;(onDataHandler as (data: string) => void)('hello') + + expect(transport.sendInput).not.toHaveBeenCalled() + }) + it('sends startup command via sendInput for SSH connections (relay has no shell-ready mechanism)', async () => { // Capture the setTimeout callback directly so we can fire it without // vi.useFakeTimers() (which would also replace the rAF mock from beforeEach). diff --git a/src/renderer/src/components/terminal-pane/pty-connection.ts b/src/renderer/src/components/terminal-pane/pty-connection.ts index f4eee4fd..6e3e6671 100644 --- a/src/renderer/src/components/terminal-pane/pty-connection.ts +++ b/src/renderer/src/components/terminal-pane/pty-connection.ts @@ -11,6 +11,22 @@ import type { PtyConnectionDeps } from './pty-connection-types' const pendingSpawnByTabId = new Map>() +function isCodexPaneStale(args: { tabId: string; panePtyId: string | null }): boolean { + const state = useAppStore.getState() + const { codexRestartNoticeByPtyId } = state + if (args.panePtyId && codexRestartNoticeByPtyId[args.panePtyId]) { + return true + } + + const tabs = Object.values(state.tabsByWorktree ?? {}).flat() + const tab = tabs.find((entry) => entry.id === args.tabId) + if (tab?.ptyId && codexRestartNoticeByPtyId[tab.ptyId]) { + return true + } + + return false +} + export function connectPanePty( pane: ManagedPane, manager: PaneManager, @@ -163,6 +179,16 @@ export function connectPanePty( deps.paneTransportsRef.current.set(pane.id, transport) const onDataDisposable = pane.terminal.onData((data) => { + const currentPtyId = transport.getPtyId() + // Why: after a Codex account switch, the runtime auth has already moved to + // the newly selected account. Stale panes must not keep sending input until + // they restart, or work can execute under the wrong account while the UI + // still says the pane is stale. Fall back to the tab's persisted PTY ID so + // the block still holds during reconnect races before the live transport has + // updated its local PTY binding. + if (isCodexPaneStale({ tabId: deps.tabId, panePtyId: currentPtyId })) { + return + } transport.sendInput(data) })