mirror of
https://github.com/google-gemini/gemini-cli
synced 2026-04-21 13:37:17 +00:00
Build binary (#18933)
Co-authored-by: Gal Zahavi <38544478+galz10@users.noreply.github.com>
This commit is contained in:
parent
46231a1755
commit
0d69f9f7fa
16 changed files with 1881 additions and 30 deletions
160
.github/workflows/test-build-binary.yml
vendored
Normal file
160
.github/workflows/test-build-binary.yml
vendored
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
name: 'Test Build Binary'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: 'read'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: 'bash'
|
||||
|
||||
jobs:
|
||||
build-node-binary:
|
||||
name: 'Build Binary (${{ matrix.os }})'
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: 'ubuntu-latest'
|
||||
platform_name: 'linux-x64'
|
||||
arch: 'x64'
|
||||
- os: 'windows-latest'
|
||||
platform_name: 'win32-x64'
|
||||
arch: 'x64'
|
||||
- os: 'macos-latest' # Apple Silicon (ARM64)
|
||||
platform_name: 'darwin-arm64'
|
||||
arch: 'arm64'
|
||||
- os: 'macos-latest' # Intel (x64) running on ARM via Rosetta
|
||||
platform_name: 'darwin-x64'
|
||||
arch: 'x64'
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: 'actions/checkout@v4'
|
||||
|
||||
- name: 'Optimize Windows Performance'
|
||||
if: "matrix.os == 'windows-latest'"
|
||||
run: |
|
||||
Set-MpPreference -DisableRealtimeMonitoring $true
|
||||
Stop-Service -Name "wsearch" -Force -ErrorAction SilentlyContinue
|
||||
Set-Service -Name "wsearch" -StartupType Disabled
|
||||
Stop-Service -Name "SysMain" -Force -ErrorAction SilentlyContinue
|
||||
Set-Service -Name "SysMain" -StartupType Disabled
|
||||
shell: 'powershell'
|
||||
|
||||
- name: 'Set up Node.js'
|
||||
uses: 'actions/setup-node@v4'
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
architecture: '${{ matrix.arch }}'
|
||||
cache: 'npm'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: 'npm ci'
|
||||
|
||||
- name: 'Check Secrets'
|
||||
id: 'check_secrets'
|
||||
run: |
|
||||
echo "has_win_cert=${{ secrets.WINDOWS_PFX_BASE64 != '' }}" >> "$GITHUB_OUTPUT"
|
||||
echo "has_mac_cert=${{ secrets.MACOS_CERT_P12_BASE64 != '' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: 'Setup Windows SDK (Windows)'
|
||||
if: "matrix.os == 'windows-latest'"
|
||||
uses: 'microsoft/setup-msbuild@v2'
|
||||
|
||||
- name: 'Add Signtool to Path (Windows)'
|
||||
if: "matrix.os == 'windows-latest'"
|
||||
run: |
|
||||
$signtoolPath = Get-ChildItem -Path "C:\Program Files (x86)\Windows Kits\10\bin" -Recurse -Filter "signtool.exe" | Sort-Object FullName -Descending | Select-Object -First 1 -ExpandProperty DirectoryName
|
||||
echo "Found signtool at: $signtoolPath"
|
||||
echo "$signtoolPath" >> $env:GITHUB_PATH
|
||||
shell: 'pwsh'
|
||||
|
||||
- name: 'Setup macOS Keychain'
|
||||
if: "startsWith(matrix.os, 'macos') && steps.check_secrets.outputs.has_mac_cert == 'true' && github.event_name != 'pull_request'"
|
||||
env:
|
||||
BUILD_CERTIFICATE_BASE64: '${{ secrets.MACOS_CERT_P12_BASE64 }}'
|
||||
P12_PASSWORD: '${{ secrets.MACOS_CERT_PASSWORD }}'
|
||||
KEYCHAIN_PASSWORD: 'temp-password'
|
||||
run: |
|
||||
# Create the P12 file
|
||||
echo "$BUILD_CERTIFICATE_BASE64" | base64 --decode > certificate.p12
|
||||
|
||||
# Create a temporary keychain
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
|
||||
security default-keychain -s build.keychain
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
|
||||
|
||||
# Import the certificate
|
||||
security import certificate.p12 -k build.keychain -P "$P12_PASSWORD" -T /usr/bin/codesign
|
||||
|
||||
# Allow codesign to access it
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" build.keychain
|
||||
|
||||
# Set Identity for build script
|
||||
echo "APPLE_IDENTITY=${{ secrets.MACOS_CERT_IDENTITY }}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: 'Setup Windows Certificate'
|
||||
if: "matrix.os == 'windows-latest' && steps.check_secrets.outputs.has_win_cert == 'true' && github.event_name != 'pull_request'"
|
||||
env:
|
||||
PFX_BASE64: '${{ secrets.WINDOWS_PFX_BASE64 }}'
|
||||
PFX_PASSWORD: '${{ secrets.WINDOWS_PFX_PASSWORD }}'
|
||||
run: |
|
||||
$pfx_cert_byte = [System.Convert]::FromBase64String("$env:PFX_BASE64")
|
||||
$certPath = Join-Path (Get-Location) "cert.pfx"
|
||||
[IO.File]::WriteAllBytes($certPath, $pfx_cert_byte)
|
||||
echo "WINDOWS_PFX_FILE=$certPath" >> $env:GITHUB_ENV
|
||||
echo "WINDOWS_PFX_PASSWORD=$env:PFX_PASSWORD" >> $env:GITHUB_ENV
|
||||
shell: 'pwsh'
|
||||
|
||||
- name: 'Build Binary'
|
||||
run: 'npm run build:binary'
|
||||
|
||||
- name: 'Build Core Package'
|
||||
run: 'npm run build -w @google/gemini-cli-core'
|
||||
|
||||
- name: 'Verify Output Exists'
|
||||
run: |
|
||||
if [ -f "dist/${{ matrix.platform_name }}/gemini" ]; then
|
||||
echo "Binary found at dist/${{ matrix.platform_name }}/gemini"
|
||||
elif [ -f "dist/${{ matrix.platform_name }}/gemini.exe" ]; then
|
||||
echo "Binary found at dist/${{ matrix.platform_name }}/gemini.exe"
|
||||
else
|
||||
echo "Error: Binary not found in dist/${{ matrix.platform_name }}/"
|
||||
ls -R dist/
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 'Smoke Test Binary'
|
||||
run: |
|
||||
echo "Running binary smoke test..."
|
||||
if [ -f "dist/${{ matrix.platform_name }}/gemini.exe" ]; then
|
||||
"./dist/${{ matrix.platform_name }}/gemini.exe" --version
|
||||
else
|
||||
"./dist/${{ matrix.platform_name }}/gemini" --version
|
||||
fi
|
||||
|
||||
- name: 'Run Integration Tests'
|
||||
if: "github.event_name != 'pull_request'"
|
||||
env:
|
||||
GEMINI_API_KEY: '${{ secrets.GEMINI_API_KEY }}'
|
||||
run: |
|
||||
echo "Running integration tests with binary..."
|
||||
if [[ "${{ matrix.os }}" == 'windows-latest' ]]; then
|
||||
BINARY_PATH="$(cygpath -m "$(pwd)/dist/${{ matrix.platform_name }}/gemini.exe")"
|
||||
else
|
||||
BINARY_PATH="$(pwd)/dist/${{ matrix.platform_name }}/gemini"
|
||||
fi
|
||||
echo "Using binary at $BINARY_PATH"
|
||||
export INTEGRATION_TEST_GEMINI_BINARY_PATH="$BINARY_PATH"
|
||||
npm run test:integration:sandbox:none -- --testTimeout=600000
|
||||
|
||||
- name: 'Upload Artifact'
|
||||
uses: 'actions/upload-artifact@v4'
|
||||
with:
|
||||
name: 'gemini-cli-${{ matrix.platform_name }}'
|
||||
path: 'dist/${{ matrix.platform_name }}/'
|
||||
retention-days: 5
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -61,4 +61,4 @@ gemini-debug.log
|
|||
.genkit
|
||||
.gemini-clipboard/
|
||||
.eslintcache
|
||||
evals/logs/
|
||||
evals/logs/
|
||||
|
|
|
|||
|
|
@ -88,6 +88,9 @@ const cliConfig = {
|
|||
outfile: 'bundle/gemini.js',
|
||||
define: {
|
||||
'process.env.CLI_VERSION': JSON.stringify(pkg.version),
|
||||
'process.env.GEMINI_SANDBOX_IMAGE_DEFAULT': JSON.stringify(
|
||||
pkg.config?.sandboxImageUri,
|
||||
),
|
||||
},
|
||||
plugins: createWasmPlugins(),
|
||||
alias: {
|
||||
|
|
|
|||
|
|
@ -165,14 +165,15 @@ describe('Hooks Agent Flow', () => {
|
|||
|
||||
// BeforeModel hook to track message counts across LLM calls
|
||||
const messageCountFile = join(rig.testDir!, 'message-counts.json');
|
||||
const escapedPath = JSON.stringify(messageCountFile);
|
||||
const beforeModelScript = `
|
||||
const fs = require('fs');
|
||||
const input = JSON.parse(fs.readFileSync(0, 'utf-8'));
|
||||
const messageCount = input.llm_request?.contents?.length || 0;
|
||||
let counts = [];
|
||||
try { counts = JSON.parse(fs.readFileSync(${JSON.stringify(messageCountFile)}, 'utf-8')); } catch (e) {}
|
||||
try { counts = JSON.parse(fs.readFileSync(${escapedPath}, 'utf-8')); } catch (e) {}
|
||||
counts.push(messageCount);
|
||||
fs.writeFileSync(${JSON.stringify(messageCountFile)}, JSON.stringify(counts));
|
||||
fs.writeFileSync(${escapedPath}, JSON.stringify(counts));
|
||||
console.log(JSON.stringify({ decision: 'allow' }));
|
||||
`;
|
||||
const beforeModelScriptPath = rig.createScript(
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ const { shell } = getShellConfiguration();
|
|||
function getLineCountCommand(): { command: string; tool: string } {
|
||||
switch (shell) {
|
||||
case 'powershell':
|
||||
return { command: `Measure-Object -Line`, tool: 'Measure-Object' };
|
||||
case 'cmd':
|
||||
return { command: `find /c /v`, tool: 'find' };
|
||||
case 'bash':
|
||||
|
|
@ -238,8 +239,12 @@ describe('run_shell_command', () => {
|
|||
});
|
||||
|
||||
it('should succeed in yolo mode', async () => {
|
||||
const isWindows = process.platform === 'win32';
|
||||
await rig.setup('should succeed in yolo mode', {
|
||||
settings: { tools: { core: ['run_shell_command'] } },
|
||||
settings: {
|
||||
tools: { core: ['run_shell_command'] },
|
||||
shell: isWindows ? { enableInteractiveShell: false } : undefined,
|
||||
},
|
||||
});
|
||||
|
||||
const testFile = rig.createFile('test.txt', 'Lorem\nIpsum\nDolor\n');
|
||||
|
|
|
|||
84
package-lock.json
generated
84
package-lock.json
generated
|
|
@ -5464,6 +5464,13 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/array-flatten": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/array-includes": {
|
||||
"version": "3.1.9",
|
||||
"resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz",
|
||||
|
|
@ -6563,6 +6570,10 @@
|
|||
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz",
|
||||
"integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"safe-buffer": "5.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
|
|
@ -8539,6 +8550,36 @@
|
|||
"express": ">= 4.11"
|
||||
}
|
||||
},
|
||||
"node_modules/express/node_modules/cookie": {
|
||||
"version": "0.7.1",
|
||||
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz",
|
||||
"integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/express/node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/express/node_modules/statuses": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/extend": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||
|
|
@ -8790,11 +8831,34 @@
|
|||
"statuses": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/express"
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/finalhandler/node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/finalhandler/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/finalhandler/node_modules/statuses": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/find-up": {
|
||||
|
|
@ -16222,6 +16286,16 @@
|
|||
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/utils-merge": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
|
||||
"integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/uuid": {
|
||||
"version": "9.0.1",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz",
|
||||
|
|
|
|||
|
|
@ -37,10 +37,12 @@
|
|||
"build:all": "npm run build && npm run build:sandbox && npm run build:vscode",
|
||||
"build:packages": "npm run build --workspaces",
|
||||
"build:sandbox": "node scripts/build_sandbox.js",
|
||||
"build:binary": "node scripts/build_binary.js",
|
||||
"bundle": "npm run generate && npm run build --workspace=@google/gemini-cli-devtools && node esbuild.config.js && node scripts/copy_bundle_assets.js",
|
||||
"test": "npm run test --workspaces --if-present",
|
||||
"test:ci": "npm run test:ci --workspaces --if-present && npm run test:scripts",
|
||||
"test": "npm run test --workspaces --if-present && npm run test:sea-launch",
|
||||
"test:ci": "npm run test:ci --workspaces --if-present && npm run test:scripts && npm run test:sea-launch",
|
||||
"test:scripts": "vitest run --config ./scripts/tests/vitest.config.ts",
|
||||
"test:sea-launch": "vitest run sea/sea-launch.test.js",
|
||||
"test:always_passing_evals": "vitest run --config evals/vitest.config.ts",
|
||||
"test:all_evals": "cross-env RUN_EVALS=1 vitest run --config evals/vitest.config.ts",
|
||||
"test:e2e": "cross-env VERBOSE=true KEEP_OUTPUT=true npm run test:integration:sandbox:none",
|
||||
|
|
|
|||
|
|
@ -102,7 +102,9 @@ export async function loadSandboxConfig(
|
|||
|
||||
const packageJson = await getPackageJson(__dirname);
|
||||
const image =
|
||||
process.env['GEMINI_SANDBOX_IMAGE'] ?? packageJson?.config?.sandboxImageUri;
|
||||
process.env['GEMINI_SANDBOX_IMAGE'] ??
|
||||
process.env['GEMINI_SANDBOX_IMAGE_DEFAULT'] ??
|
||||
packageJson?.config?.sandboxImageUri;
|
||||
|
||||
return command && image ? { command, image } : undefined;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { spawn } from 'node:child_process';
|
||||
import { spawn, execSync } from 'node:child_process';
|
||||
import type {
|
||||
HookConfig,
|
||||
CommandHookConfig,
|
||||
|
|
@ -331,12 +331,17 @@ export class HookRunner {
|
|||
let timedOut = false;
|
||||
|
||||
const shellConfig = getShellConfiguration();
|
||||
const command = this.expandCommand(
|
||||
let command = this.expandCommand(
|
||||
hookConfig.command,
|
||||
input,
|
||||
shellConfig.shell,
|
||||
);
|
||||
|
||||
if (shellConfig.shell === 'powershell') {
|
||||
// Append exit code check to ensure the exit code of the command is propagated
|
||||
command = `${command}; if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE }`;
|
||||
}
|
||||
|
||||
// Set up environment variables
|
||||
const env = {
|
||||
...sanitizeEnvironment(process.env, this.config.sanitizationConfig),
|
||||
|
|
@ -359,12 +364,31 @@ export class HookRunner {
|
|||
// Set up timeout
|
||||
const timeoutHandle = setTimeout(() => {
|
||||
timedOut = true;
|
||||
child.kill('SIGTERM');
|
||||
|
||||
if (process.platform === 'win32' && child.pid) {
|
||||
try {
|
||||
execSync(`taskkill /pid ${child.pid} /f /t`, { timeout: 2000 });
|
||||
} catch (_e) {
|
||||
// Ignore errors if process is already dead or access denied
|
||||
debugLogger.debug(`Taskkill failed: ${_e}`);
|
||||
}
|
||||
} else {
|
||||
child.kill('SIGTERM');
|
||||
}
|
||||
|
||||
// Force kill after 5 seconds
|
||||
setTimeout(() => {
|
||||
if (!child.killed) {
|
||||
child.kill('SIGKILL');
|
||||
if (process.platform === 'win32' && child.pid) {
|
||||
try {
|
||||
execSync(`taskkill /pid ${child.pid} /f /t`, { timeout: 2000 });
|
||||
} catch (_e) {
|
||||
// Ignore
|
||||
debugLogger.debug(`Taskkill failed: ${_e}`);
|
||||
}
|
||||
} else {
|
||||
child.kill('SIGKILL');
|
||||
}
|
||||
}
|
||||
}, 5000);
|
||||
}, timeout);
|
||||
|
|
|
|||
|
|
@ -74,16 +74,41 @@ async function main() {
|
|||
});
|
||||
|
||||
if (zipProcess.error || zipProcess.status !== 0) {
|
||||
// Fallback to tar --format=zip if zip is not available (common on Windows)
|
||||
console.log('zip command not found, falling back to tar...');
|
||||
zipProcess = spawnSync(
|
||||
'tar',
|
||||
['-a', '-c', '--format=zip', '-f', outputFilename, '.'],
|
||||
{
|
||||
cwd: skillPath,
|
||||
stdio: 'inherit',
|
||||
},
|
||||
);
|
||||
if (process.platform === 'win32') {
|
||||
// Fallback to PowerShell Compress-Archive on Windows
|
||||
// Note: Compress-Archive only supports .zip extension, so we zip to .zip and rename
|
||||
console.log('zip command not found, falling back to PowerShell...');
|
||||
const tempZip = outputFilename + '.zip';
|
||||
// Escape single quotes for PowerShell (replace ' with '') and use single quotes for the path
|
||||
const safeTempZip = tempZip.replace(/'/g, "''");
|
||||
zipProcess = spawnSync(
|
||||
'powershell.exe',
|
||||
[
|
||||
'-NoProfile',
|
||||
'-Command',
|
||||
`Compress-Archive -Path .\\* -DestinationPath '${safeTempZip}' -Force`,
|
||||
],
|
||||
{
|
||||
cwd: skillPath,
|
||||
stdio: 'inherit',
|
||||
},
|
||||
);
|
||||
|
||||
if (zipProcess.status === 0 && require('node:fs').existsSync(tempZip)) {
|
||||
require('node:fs').renameSync(tempZip, outputFilename);
|
||||
}
|
||||
} else {
|
||||
// Fallback to tar on Unix-like systems
|
||||
console.log('zip command not found, falling back to tar...');
|
||||
zipProcess = spawnSync(
|
||||
'tar',
|
||||
['-a', '-c', '--format=zip', '-f', outputFilename, '.'],
|
||||
{
|
||||
cwd: skillPath,
|
||||
stdio: 'inherit',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (zipProcess.error) {
|
||||
|
|
|
|||
|
|
@ -498,13 +498,19 @@ export class TestRig {
|
|||
command: string;
|
||||
initialArgs: string[];
|
||||
} {
|
||||
const binaryPath = env['INTEGRATION_TEST_GEMINI_BINARY_PATH'];
|
||||
const isNpmReleaseTest =
|
||||
env['INTEGRATION_TEST_USE_INSTALLED_GEMINI'] === 'true';
|
||||
const geminiCommand = os.platform() === 'win32' ? 'gemini.cmd' : 'gemini';
|
||||
const command = isNpmReleaseTest ? geminiCommand : 'node';
|
||||
const initialArgs = isNpmReleaseTest
|
||||
? extraInitialArgs
|
||||
: [BUNDLE_PATH, ...extraInitialArgs];
|
||||
let command = 'node';
|
||||
let initialArgs = [BUNDLE_PATH, ...extraInitialArgs];
|
||||
if (binaryPath) {
|
||||
command = binaryPath;
|
||||
initialArgs = extraInitialArgs;
|
||||
} else if (isNpmReleaseTest) {
|
||||
command = geminiCommand;
|
||||
initialArgs = extraInitialArgs;
|
||||
}
|
||||
if (this.fakeResponsesPath) {
|
||||
if (process.env['REGENERATE_MODEL_GOLDENS'] === 'true') {
|
||||
initialArgs.push('--record-responses', this.fakeResponsesPath);
|
||||
|
|
|
|||
|
|
@ -1676,6 +1676,33 @@ TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
|||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
============================================================
|
||||
safe-buffer@5.2.1
|
||||
(git://github.com/feross/safe-buffer.git)
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Feross Aboukhadijeh
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
|
||||
============================================================
|
||||
cookie@0.7.2
|
||||
(No repository found)
|
||||
|
|
|
|||
424
scripts/build_binary.js
Normal file
424
scripts/build_binary.js
Normal file
|
|
@ -0,0 +1,424 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { spawnSync } from 'node:child_process';
|
||||
import {
|
||||
cpSync,
|
||||
rmSync,
|
||||
mkdirSync,
|
||||
existsSync,
|
||||
copyFileSync,
|
||||
writeFileSync,
|
||||
readFileSync,
|
||||
} from 'node:fs';
|
||||
import { join, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import process from 'node:process';
|
||||
import { globSync } from 'glob';
|
||||
import { createHash } from 'node:crypto';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const root = join(__dirname, '..');
|
||||
const distDir = join(root, 'dist');
|
||||
const bundleDir = join(root, 'bundle');
|
||||
const stagingDir = join(bundleDir, 'native_modules');
|
||||
const seaConfigPath = join(root, 'sea-config.json');
|
||||
const manifestPath = join(bundleDir, 'manifest.json');
|
||||
const entitlementsPath = join(root, 'scripts/entitlements.plist');
|
||||
|
||||
// --- Helper Functions ---
|
||||
|
||||
/**
|
||||
* Safely executes a command using spawnSync.
|
||||
* @param {string} command
|
||||
* @param {string[]} args
|
||||
* @param {object} options
|
||||
*/
|
||||
function runCommand(command, args, options = {}) {
|
||||
let finalCommand = command;
|
||||
let useShell = options.shell || false;
|
||||
|
||||
// On Windows, npm/npx are batch files and need a shell
|
||||
if (
|
||||
process.platform === 'win32' &&
|
||||
(command === 'npm' || command === 'npx')
|
||||
) {
|
||||
finalCommand = `${command}.cmd`;
|
||||
useShell = true;
|
||||
}
|
||||
|
||||
const finalOptions = {
|
||||
stdio: 'inherit',
|
||||
cwd: root,
|
||||
shell: useShell,
|
||||
...options,
|
||||
};
|
||||
|
||||
const result = spawnSync(finalCommand, args, finalOptions);
|
||||
|
||||
if (result.status !== 0) {
|
||||
if (result.error) {
|
||||
throw result.error;
|
||||
}
|
||||
throw new Error(
|
||||
`Command failed with exit code ${result.status}: ${command}`,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes existing digital signatures from a binary.
|
||||
* @param {string} filePath
|
||||
*/
|
||||
function removeSignature(filePath) {
|
||||
console.log(`Removing signature from ${filePath}...`);
|
||||
const platform = process.platform;
|
||||
try {
|
||||
if (platform === 'darwin') {
|
||||
spawnSync('codesign', ['--remove-signature', filePath], {
|
||||
stdio: 'ignore',
|
||||
});
|
||||
} else if (platform === 'win32') {
|
||||
spawnSync('signtool', ['remove', '/s', filePath], {
|
||||
stdio: 'ignore',
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Best effort: Ignore failures
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Signs a binary using hardcoded tools for the platform.
|
||||
* @param {string} filePath
|
||||
*/
|
||||
function signFile(filePath) {
|
||||
const platform = process.platform;
|
||||
|
||||
if (platform === 'darwin') {
|
||||
const identity = process.env.APPLE_IDENTITY || '-';
|
||||
console.log(`Signing ${filePath} (Identity: ${identity})...`);
|
||||
|
||||
const args = [
|
||||
'--sign',
|
||||
identity,
|
||||
'--force',
|
||||
'--timestamp',
|
||||
'--options',
|
||||
'runtime',
|
||||
];
|
||||
|
||||
if (existsSync(entitlementsPath)) {
|
||||
args.push('--entitlements', entitlementsPath);
|
||||
}
|
||||
|
||||
args.push(filePath);
|
||||
|
||||
runCommand('codesign', args);
|
||||
} else if (platform === 'win32') {
|
||||
const args = ['sign'];
|
||||
|
||||
if (process.env.WINDOWS_PFX_FILE && process.env.WINDOWS_PFX_PASSWORD) {
|
||||
args.push(
|
||||
'/f',
|
||||
process.env.WINDOWS_PFX_FILE,
|
||||
'/p',
|
||||
process.env.WINDOWS_PFX_PASSWORD,
|
||||
);
|
||||
} else {
|
||||
args.push('/a');
|
||||
}
|
||||
|
||||
args.push(
|
||||
'/fd',
|
||||
'SHA256',
|
||||
'/td',
|
||||
'SHA256',
|
||||
'/tr',
|
||||
'http://timestamp.digicert.com',
|
||||
filePath,
|
||||
);
|
||||
|
||||
console.log(`Signing ${filePath}...`);
|
||||
try {
|
||||
runCommand('signtool', args, { stdio: 'pipe' });
|
||||
} catch (e) {
|
||||
let msg = e.message;
|
||||
if (process.env.WINDOWS_PFX_PASSWORD) {
|
||||
msg = msg.replaceAll(process.env.WINDOWS_PFX_PASSWORD, '******');
|
||||
}
|
||||
throw new Error(msg);
|
||||
}
|
||||
} else if (platform === 'linux') {
|
||||
console.log(`Skipping signing for ${filePath} on Linux.`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Build Binary Script Started...');
|
||||
|
||||
// 1. Clean dist
|
||||
if (existsSync(distDir)) {
|
||||
console.log('Cleaning dist directory...');
|
||||
rmSync(distDir, { recursive: true, force: true });
|
||||
}
|
||||
mkdirSync(distDir, { recursive: true });
|
||||
|
||||
// 2. Build Bundle
|
||||
console.log('Running npm clean, install, and bundle...');
|
||||
try {
|
||||
runCommand('npm', ['run', 'clean']);
|
||||
runCommand('npm', ['install']);
|
||||
runCommand('npm', ['run', 'bundle']);
|
||||
} catch (e) {
|
||||
console.error('Build step failed:', e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// 3. Stage & Sign Native Modules
|
||||
const includeNativeModules = process.env.BUNDLE_NATIVE_MODULES !== 'false';
|
||||
console.log(`Include Native Modules: ${includeNativeModules}`);
|
||||
|
||||
if (includeNativeModules) {
|
||||
console.log('Staging and signing native modules...');
|
||||
// Prepare staging
|
||||
if (existsSync(stagingDir))
|
||||
rmSync(stagingDir, { recursive: true, force: true });
|
||||
mkdirSync(stagingDir, { recursive: true });
|
||||
|
||||
// Copy @lydell/node-pty to staging
|
||||
const lydellSrc = join(root, 'node_modules/@lydell');
|
||||
const lydellStaging = join(stagingDir, 'node_modules/@lydell');
|
||||
|
||||
if (existsSync(lydellSrc)) {
|
||||
mkdirSync(dirname(lydellStaging), { recursive: true });
|
||||
cpSync(lydellSrc, lydellStaging, { recursive: true });
|
||||
} else {
|
||||
console.warn(
|
||||
'Warning: @lydell/node-pty not found in node_modules. Native terminal features may fail.',
|
||||
);
|
||||
}
|
||||
|
||||
// Sign Staged .node files
|
||||
try {
|
||||
const nodeFiles = globSync('**/*.node', {
|
||||
cwd: stagingDir,
|
||||
absolute: true,
|
||||
});
|
||||
for (const file of nodeFiles) {
|
||||
signFile(file);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Warning: Failed to sign native modules:', e.code);
|
||||
}
|
||||
} else {
|
||||
console.log('Skipping native modules bundling (BUNDLE_NATIVE_MODULES=false)');
|
||||
}
|
||||
|
||||
// 4. Generate SEA Configuration and Manifest
|
||||
console.log('Generating SEA configuration and manifest...');
|
||||
const packageJson = JSON.parse(
|
||||
readFileSync(join(root, 'package.json'), 'utf8'),
|
||||
);
|
||||
|
||||
// Helper to calc hash
|
||||
const sha256 = (content) => createHash('sha256').update(content).digest('hex');
|
||||
|
||||
// Read Main Bundle
|
||||
const geminiBundlePath = join(root, 'bundle/gemini.js');
|
||||
const geminiContent = readFileSync(geminiBundlePath);
|
||||
const geminiHash = sha256(geminiContent);
|
||||
|
||||
const assets = {
|
||||
'gemini.mjs': geminiBundlePath, // Use .js source but map to .mjs for runtime ESM
|
||||
'manifest.json': 'bundle/manifest.json',
|
||||
};
|
||||
|
||||
const manifest = {
|
||||
main: 'gemini.mjs',
|
||||
mainHash: geminiHash,
|
||||
version: packageJson.version,
|
||||
files: [],
|
||||
};
|
||||
|
||||
// Helper to recursively find files from STAGING
|
||||
function addAssetsFromDir(baseDir, runtimePrefix) {
|
||||
const fullDir = join(stagingDir, baseDir);
|
||||
if (!existsSync(fullDir)) return;
|
||||
|
||||
const items = globSync('**/*', { cwd: fullDir, nodir: true });
|
||||
for (const item of items) {
|
||||
const relativePath = join(runtimePrefix, item);
|
||||
const assetKey = `files:${relativePath}`;
|
||||
const fsPath = join(fullDir, item);
|
||||
|
||||
// Calc hash
|
||||
const content = readFileSync(fsPath);
|
||||
const hash = sha256(content);
|
||||
|
||||
assets[assetKey] = fsPath;
|
||||
manifest.files.push({ key: assetKey, path: relativePath, hash: hash });
|
||||
}
|
||||
}
|
||||
|
||||
// Add sb files
|
||||
const sbFiles = globSync('sandbox-macos-*.sb', { cwd: bundleDir });
|
||||
for (const sbFile of sbFiles) {
|
||||
const fsPath = join(bundleDir, sbFile);
|
||||
const content = readFileSync(fsPath);
|
||||
const hash = sha256(content);
|
||||
assets[sbFile] = fsPath;
|
||||
manifest.files.push({ key: sbFile, path: sbFile, hash: hash });
|
||||
}
|
||||
|
||||
// Add policy files
|
||||
const policyDir = join(bundleDir, 'policies');
|
||||
if (existsSync(policyDir)) {
|
||||
const policyFiles = globSync('*.toml', { cwd: policyDir });
|
||||
for (const policyFile of policyFiles) {
|
||||
const fsPath = join(policyDir, policyFile);
|
||||
const relativePath = join('policies', policyFile);
|
||||
const content = readFileSync(fsPath);
|
||||
const hash = sha256(content);
|
||||
// Use a unique key to avoid collision if filenames overlap (though unlikely here)
|
||||
// But sea-launch writes to 'path', so key is just for lookup.
|
||||
const assetKey = `policies:${policyFile}`;
|
||||
assets[assetKey] = fsPath;
|
||||
manifest.files.push({ key: assetKey, path: relativePath, hash: hash });
|
||||
}
|
||||
}
|
||||
|
||||
// Add assets from Staging
|
||||
if (includeNativeModules) {
|
||||
addAssetsFromDir('node_modules/@lydell', 'node_modules/@lydell');
|
||||
}
|
||||
|
||||
writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));
|
||||
|
||||
const seaConfig = {
|
||||
main: 'sea/sea-launch.cjs',
|
||||
output: 'dist/sea-prep.blob',
|
||||
disableExperimentalSEAWarning: true,
|
||||
assets: assets,
|
||||
};
|
||||
|
||||
writeFileSync(seaConfigPath, JSON.stringify(seaConfig, null, 2));
|
||||
console.log(`Configured ${Object.keys(assets).length} embedded assets.`);
|
||||
|
||||
// 5. Generate SEA Blob
|
||||
console.log('Generating SEA blob...');
|
||||
try {
|
||||
runCommand('node', ['--experimental-sea-config', 'sea-config.json']);
|
||||
} catch (e) {
|
||||
console.error('Failed to generate SEA blob:', e.message);
|
||||
// Cleanup
|
||||
if (existsSync(seaConfigPath)) rmSync(seaConfigPath);
|
||||
if (existsSync(manifestPath)) rmSync(manifestPath);
|
||||
if (existsSync(stagingDir))
|
||||
rmSync(stagingDir, { recursive: true, force: true });
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Check blob existence
|
||||
const blobPath = join(distDir, 'sea-prep.blob');
|
||||
if (!existsSync(blobPath)) {
|
||||
console.error('Error: sea-prep.blob not found in dist/');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// 6. Identify Target & Prepare Binary
|
||||
const platform = process.platform;
|
||||
const arch = process.arch;
|
||||
const targetName = `${platform}-${arch}`;
|
||||
console.log(`Targeting: ${targetName}`);
|
||||
|
||||
const targetDir = join(distDir, targetName);
|
||||
mkdirSync(targetDir, { recursive: true });
|
||||
|
||||
const nodeBinary = process.execPath;
|
||||
const binaryName = platform === 'win32' ? 'gemini.exe' : 'gemini';
|
||||
const targetBinaryPath = join(targetDir, binaryName);
|
||||
|
||||
console.log(`Copying node binary from ${nodeBinary} to ${targetBinaryPath}...`);
|
||||
copyFileSync(nodeBinary, targetBinaryPath);
|
||||
|
||||
// Remove existing signature using helper
|
||||
removeSignature(targetBinaryPath);
|
||||
|
||||
// Copy standard bundle assets (policies, .sb files)
|
||||
console.log('Copying additional resources...');
|
||||
if (existsSync(bundleDir)) {
|
||||
cpSync(bundleDir, targetDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Clean up source JS files from output (we only want embedded)
|
||||
const filesToRemove = [
|
||||
'gemini.js',
|
||||
'gemini.mjs',
|
||||
'gemini.js.map',
|
||||
'gemini.mjs.map',
|
||||
'gemini-sea.cjs',
|
||||
'sea-launch.cjs',
|
||||
'manifest.json',
|
||||
'native_modules',
|
||||
'policies',
|
||||
];
|
||||
|
||||
filesToRemove.forEach((f) => {
|
||||
const p = join(targetDir, f);
|
||||
if (existsSync(p)) rmSync(p, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
// Remove .sb files from targetDir
|
||||
const sbFilesToRemove = globSync('sandbox-macos-*.sb', { cwd: targetDir });
|
||||
for (const f of sbFilesToRemove) {
|
||||
rmSync(join(targetDir, f));
|
||||
}
|
||||
|
||||
// 7. Inject Blob
|
||||
console.log('Injecting SEA blob...');
|
||||
const sentinelFuse = 'NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2';
|
||||
|
||||
try {
|
||||
const args = [
|
||||
'postject',
|
||||
targetBinaryPath,
|
||||
'NODE_SEA_BLOB',
|
||||
blobPath,
|
||||
'--sentinel-fuse',
|
||||
sentinelFuse,
|
||||
];
|
||||
|
||||
if (platform === 'darwin') {
|
||||
args.push('--macho-segment-name', 'NODE_SEA');
|
||||
}
|
||||
|
||||
runCommand('npx', args);
|
||||
console.log('Injection successful.');
|
||||
} catch (e) {
|
||||
console.error('Postject failed:', e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// 8. Final Signing
|
||||
console.log('Signing final executable...');
|
||||
try {
|
||||
signFile(targetBinaryPath);
|
||||
} catch (e) {
|
||||
console.warn('Warning: Final signing failed:', e.code);
|
||||
console.warn('Continuing without signing...');
|
||||
}
|
||||
|
||||
// 9. Cleanup
|
||||
console.log('Cleaning up artifacts...');
|
||||
rmSync(blobPath);
|
||||
if (existsSync(seaConfigPath)) rmSync(seaConfigPath);
|
||||
if (existsSync(manifestPath)) rmSync(manifestPath);
|
||||
if (existsSync(stagingDir))
|
||||
rmSync(stagingDir, { recursive: true, force: true });
|
||||
|
||||
console.log(`Binary built successfully in ${targetDir}`);
|
||||
21
scripts/entitlements.plist
Normal file
21
scripts/entitlements.plist
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Allow JIT compilation (Required for Node.js/V8) -->
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow executable memory modification (Required for Node.js/V8) -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow loading unsigned libraries (Helpful for native modules extracted to temp) -->
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow access to environment variables (Standard for CLI tools) -->
|
||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
278
sea/sea-launch.cjs
Normal file
278
sea/sea-launch.cjs
Normal file
|
|
@ -0,0 +1,278 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
const { getAsset } = require('node:sea');
|
||||
const process = require('node:process');
|
||||
const nodeModule = require('node:module');
|
||||
const path = require('node:path');
|
||||
const { pathToFileURL } = require('node:url');
|
||||
const fs = require('node:fs');
|
||||
const os = require('node:os');
|
||||
const crypto = require('node:crypto');
|
||||
|
||||
// --- Helper Functions ---
|
||||
|
||||
/**
|
||||
* Strips the "ghost" argument that Node SEA sometimes injects (argv[2] == argv[0]).
|
||||
* @param {string[]} argv
|
||||
* @param {string} execPath
|
||||
* @param {function} resolveFn
|
||||
* @returns {boolean} True if an argument was removed.
|
||||
*/
|
||||
function sanitizeArgv(argv, execPath, resolveFn = path.resolve) {
|
||||
if (argv.length > 2) {
|
||||
const binaryAbs = execPath;
|
||||
const arg2Abs = resolveFn(argv[2]);
|
||||
if (binaryAbs === arg2Abs) {
|
||||
argv.splice(2, 1);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes a string for use in file paths.
|
||||
* @param {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
function getSafeName(name) {
|
||||
return (name || 'unknown').toString().replace(/[^a-zA-Z0-9.-]/g, '_');
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies the integrity of the runtime directory against the manifest.
|
||||
* @param {string} dir
|
||||
* @param {object} manifest
|
||||
* @param {object} fsMod
|
||||
* @param {object} cryptoMod
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function verifyIntegrity(dir, manifest, fsMod = fs, cryptoMod = crypto) {
|
||||
try {
|
||||
const calculateHash = (filePath) => {
|
||||
const hash = cryptoMod.createHash('sha256');
|
||||
const fd = fsMod.openSync(filePath, 'r');
|
||||
const buffer = new Uint8Array(65536); // 64KB
|
||||
try {
|
||||
let bytesRead = 0;
|
||||
while (
|
||||
(bytesRead = fsMod.readSync(fd, buffer, 0, buffer.length, null)) !== 0
|
||||
) {
|
||||
hash.update(buffer.subarray(0, bytesRead));
|
||||
}
|
||||
} finally {
|
||||
fsMod.closeSync(fd);
|
||||
}
|
||||
return hash.digest('hex');
|
||||
};
|
||||
|
||||
if (calculateHash(path.join(dir, 'gemini.mjs')) !== manifest.mainHash)
|
||||
return false;
|
||||
if (manifest.files) {
|
||||
for (const file of manifest.files) {
|
||||
if (calculateHash(path.join(dir, file.path)) !== file.hash)
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} catch (_e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the runtime directory, extracting assets if necessary.
|
||||
* @param {object} manifest
|
||||
* @param {function} getAssetFn
|
||||
* @param {object} deps Dependencies (fs, os, path, processEnv)
|
||||
* @returns {string} The path to the prepared runtime directory.
|
||||
*/
|
||||
function prepareRuntime(manifest, getAssetFn, deps = {}) {
|
||||
const fsMod = deps.fs || fs;
|
||||
const osMod = deps.os || os;
|
||||
const pathMod = deps.path || path;
|
||||
const processEnv = deps.processEnv || process.env;
|
||||
const processPid = deps.processPid || process.pid;
|
||||
const processUid =
|
||||
deps.processUid || (process.getuid ? process.getuid() : 'unknown');
|
||||
|
||||
const version = manifest.version || '0.0.0';
|
||||
const safeVersion = getSafeName(version);
|
||||
const userInfo = osMod.userInfo();
|
||||
const username =
|
||||
userInfo.username || processEnv.USER || processUid || 'unknown';
|
||||
const safeUsername = getSafeName(username);
|
||||
|
||||
let tempBase = osMod.tmpdir();
|
||||
|
||||
if (process.platform === 'win32' && processEnv.LOCALAPPDATA) {
|
||||
const appDir = pathMod.join(processEnv.LOCALAPPDATA, 'Google', 'GeminiCLI');
|
||||
try {
|
||||
if (!fsMod.existsSync(appDir)) {
|
||||
fsMod.mkdirSync(appDir, { recursive: true, mode: 0o700 });
|
||||
}
|
||||
tempBase = appDir;
|
||||
} catch (_) {
|
||||
// Fallback to tmpdir
|
||||
}
|
||||
}
|
||||
|
||||
const finalRuntimeDir = pathMod.join(
|
||||
tempBase,
|
||||
`gemini-runtime-${safeVersion}-${safeUsername}`,
|
||||
);
|
||||
|
||||
let runtimeDir;
|
||||
let useExisting = false;
|
||||
|
||||
const isSecure = (dir) => {
|
||||
try {
|
||||
const stat = fsMod.lstatSync(dir);
|
||||
if (!stat.isDirectory()) return false;
|
||||
if (processUid !== 'unknown' && stat.uid !== processUid) return false;
|
||||
// Skip strict permission check on Windows as it's unreliable with standard fs.stat
|
||||
if (process.platform !== 'win32' && (stat.mode & 0o777) !== 0o700)
|
||||
return false;
|
||||
return true;
|
||||
} catch (_) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
if (fsMod.existsSync(finalRuntimeDir)) {
|
||||
if (isSecure(finalRuntimeDir)) {
|
||||
if (
|
||||
verifyIntegrity(finalRuntimeDir, manifest, fsMod, deps.crypto || crypto)
|
||||
) {
|
||||
runtimeDir = finalRuntimeDir;
|
||||
useExisting = true;
|
||||
} else {
|
||||
try {
|
||||
fsMod.rmSync(finalRuntimeDir, { recursive: true, force: true });
|
||||
} catch (_) {}
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
fsMod.rmSync(finalRuntimeDir, { recursive: true, force: true });
|
||||
} catch (_) {}
|
||||
}
|
||||
}
|
||||
|
||||
if (!useExisting) {
|
||||
const setupDir = pathMod.join(
|
||||
tempBase,
|
||||
`gemini-setup-${processPid}-${Date.now()}`,
|
||||
);
|
||||
|
||||
try {
|
||||
fsMod.mkdirSync(setupDir, { recursive: true, mode: 0o700 });
|
||||
const writeToSetup = (assetKey, relPath) => {
|
||||
const content = getAssetFn(assetKey);
|
||||
if (!content) return;
|
||||
const destPath = pathMod.join(setupDir, relPath);
|
||||
const destDir = pathMod.dirname(destPath);
|
||||
if (!fsMod.existsSync(destDir))
|
||||
fsMod.mkdirSync(destDir, { recursive: true, mode: 0o700 });
|
||||
fsMod.writeFileSync(destPath, new Uint8Array(content), {
|
||||
mode: 0o755,
|
||||
});
|
||||
};
|
||||
writeToSetup('gemini.mjs', 'gemini.mjs');
|
||||
if (manifest.files) {
|
||||
for (const file of manifest.files) {
|
||||
writeToSetup(file.key, file.path);
|
||||
}
|
||||
}
|
||||
try {
|
||||
fsMod.renameSync(setupDir, finalRuntimeDir);
|
||||
runtimeDir = finalRuntimeDir;
|
||||
} catch (renameErr) {
|
||||
if (
|
||||
fsMod.existsSync(finalRuntimeDir) &&
|
||||
isSecure(finalRuntimeDir) &&
|
||||
verifyIntegrity(
|
||||
finalRuntimeDir,
|
||||
manifest,
|
||||
fsMod,
|
||||
deps.crypto || crypto,
|
||||
)
|
||||
) {
|
||||
runtimeDir = finalRuntimeDir;
|
||||
try {
|
||||
fsMod.rmSync(setupDir, { recursive: true, force: true });
|
||||
} catch (_) {}
|
||||
} else {
|
||||
throw renameErr;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(
|
||||
'Fatal Error: Failed to setup secure runtime. Please try running again and if error persists please reinstall.',
|
||||
e,
|
||||
);
|
||||
try {
|
||||
fsMod.rmSync(setupDir, { recursive: true, force: true });
|
||||
} catch (_) {}
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
return runtimeDir;
|
||||
}
|
||||
|
||||
// --- Main Execution ---
|
||||
|
||||
async function main(getAssetFn = getAsset) {
|
||||
process.env.IS_BINARY = 'true';
|
||||
|
||||
if (nodeModule.enableCompileCache) {
|
||||
nodeModule.enableCompileCache();
|
||||
}
|
||||
|
||||
process.noDeprecation = true;
|
||||
|
||||
sanitizeArgv(process.argv, process.execPath);
|
||||
|
||||
const manifestJson = getAssetFn('manifest.json', 'utf8');
|
||||
if (!manifestJson) {
|
||||
console.error('Fatal Error: Corrupted binary. Please reinstall.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const manifest = JSON.parse(manifestJson);
|
||||
|
||||
const runtimeDir = prepareRuntime(manifest, getAssetFn, {
|
||||
fs,
|
||||
os,
|
||||
path,
|
||||
processEnv: process.env,
|
||||
crypto,
|
||||
});
|
||||
|
||||
const mainPath = path.join(runtimeDir, 'gemini.mjs');
|
||||
|
||||
await import(pathToFileURL(mainPath).href).catch((err) => {
|
||||
console.error('Fatal Error: Failed to launch. Please reinstall.', err);
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
// Only execute if this is the main module (standard Node behavior)
|
||||
// or if explicitly running as the SEA entry point (heuristic).
|
||||
if (require.main === module) {
|
||||
main().catch((err) => {
|
||||
console.error('Unhandled error in sea-launch:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sanitizeArgv,
|
||||
getSafeName,
|
||||
verifyIntegrity,
|
||||
prepareRuntime,
|
||||
main,
|
||||
};
|
||||
799
sea/sea-launch.test.js
Normal file
799
sea/sea-launch.test.js
Normal file
|
|
@ -0,0 +1,799 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import * as path from 'node:path';
|
||||
import { Buffer } from 'node:buffer';
|
||||
import process from 'node:process';
|
||||
import {
|
||||
sanitizeArgv,
|
||||
getSafeName,
|
||||
verifyIntegrity,
|
||||
prepareRuntime,
|
||||
main,
|
||||
} from './sea-launch.cjs';
|
||||
|
||||
// Mocking fs and os
|
||||
// We need to use vi.mock factory for ESM mocking of built-in modules in Vitest
|
||||
vi.mock('node:fs', async () => {
|
||||
const fsMock = {
|
||||
mkdirSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
existsSync: vi.fn(),
|
||||
renameSync: vi.fn(),
|
||||
rmSync: vi.fn(),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
lstatSync: vi.fn(),
|
||||
statSync: vi.fn(),
|
||||
openSync: vi.fn(),
|
||||
readSync: vi.fn(),
|
||||
closeSync: vi.fn(),
|
||||
};
|
||||
return {
|
||||
default: fsMock,
|
||||
...fsMock,
|
||||
};
|
||||
});
|
||||
vi.mock('fs', async () => {
|
||||
const fsMock = {
|
||||
mkdirSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
existsSync: vi.fn(),
|
||||
renameSync: vi.fn(),
|
||||
rmSync: vi.fn(),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
lstatSync: vi.fn(),
|
||||
statSync: vi.fn(),
|
||||
openSync: vi.fn(),
|
||||
readSync: vi.fn(),
|
||||
closeSync: vi.fn(),
|
||||
};
|
||||
return {
|
||||
default: fsMock,
|
||||
...fsMock,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('node:os', async () => {
|
||||
const osMock = {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => '/tmp',
|
||||
};
|
||||
return {
|
||||
default: osMock,
|
||||
...osMock,
|
||||
};
|
||||
});
|
||||
vi.mock('os', async () => {
|
||||
const osMock = {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => '/tmp',
|
||||
};
|
||||
return {
|
||||
default: osMock,
|
||||
...osMock,
|
||||
};
|
||||
});
|
||||
|
||||
describe('sea-launch', () => {
|
||||
describe('main', () => {
|
||||
it('executes main logic', async () => {
|
||||
const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {});
|
||||
const consoleSpy = vi
|
||||
.spyOn(globalThis.console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const mockGetAsset = vi.fn((key) => {
|
||||
if (key === 'manifest.json')
|
||||
return JSON.stringify({ version: '1.0.0', mainHash: 'h1' });
|
||||
return Buffer.from('content');
|
||||
});
|
||||
|
||||
await main(mockGetAsset);
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalled();
|
||||
expect(exitSpy).toHaveBeenCalled();
|
||||
|
||||
exitSpy.mockRestore();
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('sanitizeArgv', () => {
|
||||
it('removes ghost argument when argv[2] matches execPath', () => {
|
||||
const execPath = '/bin/node';
|
||||
const argv = ['/bin/node', '/app/script.js', '/bin/node', 'arg1'];
|
||||
const resolveFn = (p) => p;
|
||||
const removed = sanitizeArgv(argv, execPath, resolveFn);
|
||||
expect(removed).toBe(true);
|
||||
expect(argv).toEqual(['/bin/node', '/app/script.js', 'arg1']);
|
||||
});
|
||||
|
||||
it('does nothing if argv[2] does not match execPath', () => {
|
||||
const execPath = '/bin/node';
|
||||
const argv = ['/bin/node', '/app/script.js', 'command', 'arg1'];
|
||||
const resolveFn = (p) => p;
|
||||
const removed = sanitizeArgv(argv, execPath, resolveFn);
|
||||
expect(removed).toBe(false);
|
||||
expect(argv).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('handles resolving relative paths', () => {
|
||||
const execPath = '/bin/node';
|
||||
const argv = ['/bin/node', '/app/script.js', './node', 'arg1'];
|
||||
const resolveFn = (p) => (p === './node' ? '/bin/node' : p);
|
||||
const removed = sanitizeArgv(argv, execPath, resolveFn);
|
||||
expect(removed).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSafeName', () => {
|
||||
it('sanitizes strings', () => {
|
||||
expect(getSafeName('user@name')).toBe('user_name');
|
||||
expect(getSafeName('../path')).toBe('.._path');
|
||||
expect(getSafeName('valid-1.2')).toBe('valid-1.2');
|
||||
expect(getSafeName(undefined)).toBe('unknown');
|
||||
});
|
||||
});
|
||||
|
||||
describe('verifyIntegrity', () => {
|
||||
it('returns true for matching hashes', () => {
|
||||
const dir = '/tmp/test';
|
||||
const manifest = {
|
||||
mainHash: 'hash1',
|
||||
files: [{ path: 'file.txt', hash: 'hash2' }],
|
||||
};
|
||||
|
||||
const mockFs = {
|
||||
openSync: vi.fn((p) => {
|
||||
if (p.endsWith('gemini.mjs')) return 10;
|
||||
if (p.endsWith('file.txt')) return 20;
|
||||
throw new Error('Not found');
|
||||
}),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
let content = '';
|
||||
if (fd === 10) content = 'content1';
|
||||
if (fd === 20) content = 'content2';
|
||||
|
||||
// Simulate simple read: write content to buffer and return length once, then return 0
|
||||
if (!buffer._readDone) {
|
||||
const buf = Buffer.from(content);
|
||||
buf.copy(buffer);
|
||||
buffer._readDone = true;
|
||||
return buf.length;
|
||||
} else {
|
||||
buffer._readDone = false; // Reset for next file
|
||||
return 0;
|
||||
}
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
};
|
||||
|
||||
const mockCrypto = {
|
||||
createHash: vi.fn(() => ({
|
||||
update: vi.fn(function (content) {
|
||||
this._content =
|
||||
(this._content || '') + Buffer.from(content).toString();
|
||||
return this;
|
||||
}),
|
||||
digest: vi.fn(function () {
|
||||
if (this._content === 'content1') return 'hash1';
|
||||
if (this._content === 'content2') return 'hash2';
|
||||
return 'wrong';
|
||||
}),
|
||||
})),
|
||||
};
|
||||
|
||||
expect(verifyIntegrity(dir, manifest, mockFs, mockCrypto)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false for mismatched hashes', () => {
|
||||
const dir = '/tmp/test';
|
||||
const manifest = { mainHash: 'hash1' };
|
||||
|
||||
const mockFs = {
|
||||
openSync: vi.fn(() => 10),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
const buf = Buffer.from('content_wrong');
|
||||
buf.copy(buffer);
|
||||
buffer._readDone = true;
|
||||
return buf.length;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
};
|
||||
|
||||
const mockCrypto = {
|
||||
createHash: vi.fn(() => ({
|
||||
update: vi.fn(function (content) {
|
||||
this._content =
|
||||
(this._content || '') + Buffer.from(content).toString();
|
||||
return this;
|
||||
}),
|
||||
digest: vi.fn(function () {
|
||||
return 'hash_wrong';
|
||||
}),
|
||||
})),
|
||||
};
|
||||
|
||||
expect(verifyIntegrity(dir, manifest, mockFs, mockCrypto)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false when fs throws error', () => {
|
||||
const dir = '/tmp/test';
|
||||
const manifest = { mainHash: 'hash1' };
|
||||
const mockFs = {
|
||||
openSync: vi.fn(() => {
|
||||
throw new Error('FS Error');
|
||||
}),
|
||||
};
|
||||
const mockCrypto = { createHash: vi.fn() };
|
||||
expect(verifyIntegrity(dir, manifest, mockFs, mockCrypto)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prepareRuntime', () => {
|
||||
const mockManifest = {
|
||||
version: '1.0.0',
|
||||
mainHash: 'h1',
|
||||
files: [{ key: 'f1', path: 'p1', hash: 'h1' }],
|
||||
};
|
||||
const mockGetAsset = vi.fn();
|
||||
const S_IFDIR = 0o40000;
|
||||
const MODE_700 = 0o700;
|
||||
|
||||
it('reuses existing runtime if secure and valid', () => {
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn(() => true),
|
||||
rmSync: vi.fn(),
|
||||
readFileSync: vi.fn(),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 1000,
|
||||
mode: S_IFDIR | MODE_700,
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => '/tmp',
|
||||
},
|
||||
path: path,
|
||||
processEnv: {},
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'h1'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 1000,
|
||||
};
|
||||
|
||||
deps.fs.readFileSync.mockReturnValue('content');
|
||||
|
||||
const runtime = prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
expect(runtime).toContain('gemini-runtime-1.0.0-user');
|
||||
expect(deps.fs.rmSync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('recreates runtime if existing has wrong owner', () => {
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn().mockReturnValueOnce(true).mockReturnValue(false),
|
||||
rmSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
renameSync: vi.fn(),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 999, // Wrong UID
|
||||
mode: S_IFDIR | MODE_700,
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => '/tmp',
|
||||
},
|
||||
path: path,
|
||||
processEnv: {},
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'h1'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 1000,
|
||||
processPid: 123,
|
||||
};
|
||||
|
||||
mockGetAsset.mockReturnValue(Buffer.from('asset_content'));
|
||||
|
||||
prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
|
||||
expect(deps.fs.rmSync).toHaveBeenCalledWith(
|
||||
expect.stringContaining('gemini-runtime'),
|
||||
expect.anything(),
|
||||
);
|
||||
expect(deps.fs.mkdirSync).toHaveBeenCalledWith(
|
||||
expect.stringContaining('gemini-setup'),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('recreates runtime if existing has wrong permissions', () => {
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn().mockReturnValueOnce(true).mockReturnValue(false),
|
||||
rmSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
renameSync: vi.fn(),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 1000,
|
||||
mode: S_IFDIR | 0o777, // Too open
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => '/tmp',
|
||||
},
|
||||
path: path,
|
||||
processEnv: {},
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'h1'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 1000,
|
||||
processPid: 123,
|
||||
};
|
||||
|
||||
mockGetAsset.mockReturnValue(Buffer.from('asset_content'));
|
||||
|
||||
prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
|
||||
expect(deps.fs.rmSync).toHaveBeenCalledWith(
|
||||
expect.stringContaining('gemini-runtime'),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('creates new runtime if existing is invalid (integrity check)', () => {
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn().mockReturnValueOnce(true).mockReturnValue(false),
|
||||
rmSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
renameSync: vi.fn(),
|
||||
readFileSync: vi.fn().mockReturnValue('wrong_content'),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 1000,
|
||||
mode: S_IFDIR | MODE_700,
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => '/tmp',
|
||||
},
|
||||
path: path,
|
||||
processEnv: {},
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'hash_calculated'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 1000,
|
||||
processPid: 123,
|
||||
};
|
||||
|
||||
mockGetAsset.mockReturnValue(Buffer.from('asset_content'));
|
||||
|
||||
prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
|
||||
expect(deps.fs.rmSync).toHaveBeenCalledWith(
|
||||
expect.stringContaining('gemini-runtime'),
|
||||
expect.anything(),
|
||||
);
|
||||
expect(deps.fs.mkdirSync).toHaveBeenCalledWith(
|
||||
expect.stringContaining('gemini-setup'),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles rename race condition: uses target if secure and valid', () => {
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn(),
|
||||
rmSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
renameSync: vi.fn(() => {
|
||||
throw new Error('Rename failed');
|
||||
}),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 1000,
|
||||
mode: S_IFDIR | MODE_700,
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => '/tmp',
|
||||
},
|
||||
path: path,
|
||||
processEnv: {},
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'h1'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 1000,
|
||||
processPid: 123,
|
||||
};
|
||||
|
||||
// 1. Initial exists check -> false
|
||||
// 2. mkdir checks (destDir) -> false
|
||||
// 3. renameSync -> throws
|
||||
// 4. existsSync (race check) -> true
|
||||
deps.fs.existsSync
|
||||
.mockReturnValueOnce(false)
|
||||
.mockReturnValueOnce(false)
|
||||
.mockReturnValue(true);
|
||||
|
||||
mockGetAsset.mockReturnValue(Buffer.from('asset_content'));
|
||||
|
||||
const runtime = prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
|
||||
expect(deps.fs.renameSync).toHaveBeenCalled();
|
||||
expect(runtime).toContain('gemini-runtime');
|
||||
expect(deps.fs.rmSync).toHaveBeenCalledWith(
|
||||
expect.stringContaining('gemini-setup'),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles rename race condition: fails if target is insecure', () => {
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn(),
|
||||
rmSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
renameSync: vi.fn(() => {
|
||||
throw new Error('Rename failed');
|
||||
}),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 999, // Wrong UID
|
||||
mode: S_IFDIR | MODE_700,
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => '/tmp',
|
||||
},
|
||||
path: path,
|
||||
processEnv: {},
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'h1'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 1000,
|
||||
processPid: 123,
|
||||
};
|
||||
|
||||
deps.fs.existsSync
|
||||
.mockReturnValueOnce(false)
|
||||
.mockReturnValueOnce(false)
|
||||
.mockReturnValue(true);
|
||||
|
||||
mockGetAsset.mockReturnValue(Buffer.from('asset_content'));
|
||||
|
||||
// Mock process.exit and console.error
|
||||
const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {});
|
||||
const consoleSpy = vi
|
||||
.spyOn(globalThis.console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
|
||||
expect(exitSpy).toHaveBeenCalledWith(1);
|
||||
|
||||
exitSpy.mockRestore();
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('uses LOCALAPPDATA on Windows if available', () => {
|
||||
const originalPlatform = process.platform;
|
||||
Object.defineProperty(process, 'platform', {
|
||||
value: 'win32',
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn().mockReturnValue(false),
|
||||
mkdirSync: vi.fn(),
|
||||
rmSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
renameSync: vi.fn(),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 0,
|
||||
mode: S_IFDIR | MODE_700,
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => 'C:\\Temp',
|
||||
},
|
||||
path: {
|
||||
join: (...args) => args.join('\\'),
|
||||
dirname: (p) => p.split('\\').slice(0, -1).join('\\'),
|
||||
resolve: (p) => p,
|
||||
},
|
||||
processEnv: {
|
||||
LOCALAPPDATA: 'C:\\Users\\User\\AppData\\Local',
|
||||
},
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'h1'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 'unknown',
|
||||
};
|
||||
|
||||
prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
|
||||
expect(deps.fs.mkdirSync).toHaveBeenCalledWith(
|
||||
'C:\\Users\\User\\AppData\\Local\\Google\\GeminiCLI',
|
||||
expect.objectContaining({ recursive: true }),
|
||||
);
|
||||
|
||||
Object.defineProperty(process, 'platform', {
|
||||
value: originalPlatform,
|
||||
configurable: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('falls back to tmpdir on Windows if LOCALAPPDATA is missing', () => {
|
||||
const originalPlatform = process.platform;
|
||||
Object.defineProperty(process, 'platform', {
|
||||
value: 'win32',
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn().mockReturnValue(false),
|
||||
mkdirSync: vi.fn(),
|
||||
rmSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
renameSync: vi.fn(),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 0,
|
||||
mode: S_IFDIR | MODE_700,
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => 'C:\\Temp',
|
||||
},
|
||||
path: {
|
||||
join: (...args) => args.join('\\'),
|
||||
dirname: (p) => p.split('\\').slice(0, -1).join('\\'),
|
||||
resolve: (p) => p,
|
||||
},
|
||||
processEnv: {}, // Missing LOCALAPPDATA
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'h1'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 'unknown',
|
||||
};
|
||||
|
||||
const runtime = prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
|
||||
// Should use tmpdir
|
||||
expect(runtime).toContain('C:\\Temp');
|
||||
expect(runtime).not.toContain('Google\\GeminiCLI');
|
||||
|
||||
Object.defineProperty(process, 'platform', {
|
||||
value: originalPlatform,
|
||||
configurable: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('falls back to tmpdir on Windows if mkdir fails', () => {
|
||||
const originalPlatform = process.platform;
|
||||
Object.defineProperty(process, 'platform', {
|
||||
value: 'win32',
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
const deps = {
|
||||
fs: {
|
||||
existsSync: vi.fn().mockReturnValue(false),
|
||||
mkdirSync: vi.fn((p) => {
|
||||
if (typeof p === 'string' && p.includes('Google\\GeminiCLI')) {
|
||||
throw new Error('Permission denied');
|
||||
}
|
||||
}),
|
||||
rmSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
renameSync: vi.fn(),
|
||||
readFileSync: vi.fn().mockReturnValue('content'),
|
||||
openSync: vi.fn(() => 1),
|
||||
readSync: vi.fn((fd, buffer) => {
|
||||
if (!buffer._readDone) {
|
||||
buffer._readDone = true;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
closeSync: vi.fn(),
|
||||
lstatSync: vi.fn(() => ({
|
||||
isDirectory: () => true,
|
||||
uid: 0,
|
||||
mode: S_IFDIR | MODE_700,
|
||||
})),
|
||||
},
|
||||
os: {
|
||||
userInfo: () => ({ username: 'user' }),
|
||||
tmpdir: () => 'C:\\Temp',
|
||||
},
|
||||
path: {
|
||||
join: (...args) => args.join('\\'),
|
||||
dirname: (p) => p.split('\\').slice(0, -1).join('\\'),
|
||||
resolve: (p) => p,
|
||||
},
|
||||
processEnv: {
|
||||
LOCALAPPDATA: 'C:\\Users\\User\\AppData\\Local',
|
||||
},
|
||||
crypto: {
|
||||
createHash: vi.fn(() => {
|
||||
const hash = {
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn(() => 'h1'),
|
||||
};
|
||||
return hash;
|
||||
}),
|
||||
},
|
||||
processUid: 'unknown',
|
||||
};
|
||||
|
||||
const runtime = prepareRuntime(mockManifest, mockGetAsset, deps);
|
||||
|
||||
// Should use tmpdir
|
||||
expect(runtime).toContain('C:\\Temp');
|
||||
expect(deps.fs.mkdirSync).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Google\\GeminiCLI'),
|
||||
expect.anything(),
|
||||
);
|
||||
|
||||
Object.defineProperty(process, 'platform', {
|
||||
value: originalPlatform,
|
||||
configurable: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
Loading…
Reference in a new issue