mirror of
https://github.com/lobehub/lobehub
synced 2026-04-21 09:37:28 +00:00
✨ feat: refactor desktop implement with brand new 2.0
This commit is contained in:
parent
b5720434e4
commit
10e048c9c5
80 changed files with 4327 additions and 1636 deletions
30
.github/actions/setup-node-bun/action.yml
vendored
Normal file
30
.github/actions/setup-node-bun/action.yml
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
name: Setup Node and Bun
|
||||
description: Setup Node.js and Bun for workflows
|
||||
|
||||
inputs:
|
||||
node-version:
|
||||
description: Node.js version
|
||||
required: true
|
||||
bun-version:
|
||||
description: Bun version
|
||||
required: true
|
||||
package-manager-cache:
|
||||
description: Pass-through to actions/setup-node package-manager-cache
|
||||
required: false
|
||||
default: 'false'
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
package-manager-cache: ${{ inputs.package-manager-cache }}
|
||||
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: ${{ inputs.bun-version }}
|
||||
|
||||
|
||||
27
.github/actions/setup-node-pnpm/action.yml
vendored
Normal file
27
.github/actions/setup-node-pnpm/action.yml
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
name: Setup Node and pnpm
|
||||
description: Setup Node.js and pnpm for workflows
|
||||
|
||||
inputs:
|
||||
node-version:
|
||||
description: Node.js version
|
||||
required: true
|
||||
package-manager-cache:
|
||||
description: Pass-through to actions/setup-node package-manager-cache
|
||||
required: false
|
||||
default: 'false'
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
package-manager-cache: ${{ inputs.package-manager-cache }}
|
||||
|
||||
|
||||
85
.github/workflows/desktop-build-electron.yml
vendored
Normal file
85
.github/workflows/desktop-build-electron.yml
vendored
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
name: Desktop Next Build
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- next
|
||||
pull_request:
|
||||
paths:
|
||||
- 'apps/desktop/**'
|
||||
- 'scripts/electronWorkflow/**'
|
||||
- 'package.json'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'bun.lockb'
|
||||
- 'src/**'
|
||||
- 'packages/**'
|
||||
- '.github/workflows/desktop-build-electron.yml'
|
||||
|
||||
concurrency:
|
||||
group: desktop-electron-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
NODE_VERSION: 24.11.1
|
||||
BUN_VERSION: 1.2.23
|
||||
|
||||
jobs:
|
||||
build-next:
|
||||
name: Build desktop Next bundle
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=6144
|
||||
UPDATE_CHANNEL: nightly
|
||||
NEXT_PUBLIC_DESKTOP_PROJECT_ID: ${{ secrets.UMAMI_NIGHTLY_DESKTOP_PROJECT_ID || 'dummy-desktop-project' }}
|
||||
NEXT_PUBLIC_DESKTOP_UMAMI_BASE_URL: ${{ secrets.UMAMI_NIGHTLY_DESKTOP_BASE_URL || 'https://analytics.example.com' }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Enable Corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-store
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache pnpm store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.pnpm-store.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ env.NODE_VERSION }}-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-${{ env.NODE_VERSION }}-
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Setup bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --node-linker=hoisted
|
||||
|
||||
- name: Install desktop dependencies
|
||||
run: |
|
||||
cd apps/desktop
|
||||
bun run install-isolated
|
||||
|
||||
- name: Build desktop Next.js bundle
|
||||
run: bun run desktop:build-electron
|
||||
341
.github/workflows/manual-build-desktop.yml
vendored
Normal file
341
.github/workflows/manual-build-desktop.yml
vendored
Normal file
|
|
@ -0,0 +1,341 @@
|
|||
name: Desktop Manual Build
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
channel:
|
||||
description: 'Release channel for desktop build (affects version suffix and workflow:set-desktop-version)'
|
||||
required: true
|
||||
default: nightly
|
||||
type: choice
|
||||
options:
|
||||
- nightly
|
||||
- beta
|
||||
- stable
|
||||
build_macos:
|
||||
description: 'Build macOS artifacts'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
build_windows:
|
||||
description: 'Build Windows artifacts'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
build_linux:
|
||||
description: 'Build Linux artifacts'
|
||||
required: true
|
||||
default: true
|
||||
type: boolean
|
||||
version:
|
||||
description: 'Override desktop version (e.g. 1.2.3). Leave empty to auto-generate.'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
concurrency:
|
||||
group: manual-${{ github.ref }}-${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
NODE_VERSION: 24.11.1
|
||||
BUN_VERSION: 1.2.23
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Code quality check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node & Bun
|
||||
uses: ./.github/actions/setup-node-bun
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
package-manager-cache: 'false'
|
||||
|
||||
- name: Install deps
|
||||
run: bun i
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=6144
|
||||
|
||||
- name: Lint
|
||||
run: bun run lint
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=6144
|
||||
|
||||
version:
|
||||
name: Determine version
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.set_version.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
package-manager-cache: false
|
||||
|
||||
- name: Set version
|
||||
id: set_version
|
||||
env:
|
||||
INPUT_VERSION: ${{ inputs.version }}
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
run: |
|
||||
base_version=$(node -p "require('./apps/desktop/package.json').version")
|
||||
|
||||
if [ -n "$INPUT_VERSION" ]; then
|
||||
version="$INPUT_VERSION"
|
||||
echo "📦 Using provided version: ${version} (base: ${base_version})"
|
||||
else
|
||||
ci_build_number="${{ github.run_number }}"
|
||||
version="0.0.0-${CHANNEL}.manual.${ci_build_number}"
|
||||
echo "📦 Generated version: ${version} (base: ${base_version})"
|
||||
fi
|
||||
|
||||
echo "version=${version}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Version Summary
|
||||
run: |
|
||||
echo "🚦 Release Version: ${{ steps.set_version.outputs.version }}"
|
||||
|
||||
build-macos:
|
||||
needs: [version, test]
|
||||
name: Build Desktop App (macOS)
|
||||
if: inputs.build_macos
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, macos-15-intel]
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node & pnpm
|
||||
uses: ./.github/actions/setup-node-pnpm
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
package-manager-cache: 'false'
|
||||
|
||||
# node-linker=hoisted 模式将可以确保 asar 压缩可用
|
||||
- name: Install dependencies
|
||||
run: pnpm install --node-linker=hoisted
|
||||
|
||||
- name: Install deps on Desktop
|
||||
run: npm run install-isolated --prefix=./apps/desktop
|
||||
|
||||
- name: Set package version
|
||||
run: npm run workflow:set-desktop-version ${{ needs.version.outputs.version }} ${{ inputs.channel }}
|
||||
|
||||
- name: Build artifact on macOS
|
||||
run: npm run desktop:build
|
||||
env:
|
||||
UPDATE_CHANNEL: ${{ inputs.channel }}
|
||||
APP_URL: http://localhost:3015
|
||||
DATABASE_URL: 'postgresql://postgres@localhost:5432/postgres'
|
||||
KEY_VAULTS_SECRET: 'oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE='
|
||||
CSC_LINK: ${{ secrets.APPLE_CERTIFICATE_BASE64 }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
NEXT_PUBLIC_DESKTOP_PROJECT_ID: ${{ inputs.channel == 'beta' && secrets.UMAMI_BETA_DESKTOP_PROJECT_ID || secrets.UMAMI_NIGHTLY_DESKTOP_PROJECT_ID }}
|
||||
NEXT_PUBLIC_DESKTOP_UMAMI_BASE_URL: ${{ inputs.channel == 'beta' && secrets.UMAMI_BETA_DESKTOP_BASE_URL || secrets.UMAMI_NIGHTLY_DESKTOP_BASE_URL }}
|
||||
CSC_FOR_PULL_REQUEST: true
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
|
||||
- name: Rename macOS latest-mac.yml for multi-architecture support
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
cd apps/desktop/release
|
||||
if [ -f "latest-mac.yml" ]; then
|
||||
SYSTEM_ARCH=$(uname -m)
|
||||
if [[ "$SYSTEM_ARCH" == "arm64" ]]; then
|
||||
ARCH_SUFFIX="arm64"
|
||||
else
|
||||
ARCH_SUFFIX="x64"
|
||||
fi
|
||||
|
||||
mv latest-mac.yml "latest-mac-${ARCH_SUFFIX}.yml"
|
||||
echo "✅ Renamed latest-mac.yml to latest-mac-${ARCH_SUFFIX}.yml (detected: $SYSTEM_ARCH)"
|
||||
ls -la latest-mac-*.yml
|
||||
else
|
||||
echo "⚠️ latest-mac.yml not found, skipping rename"
|
||||
ls -la latest*.yml || echo "No latest*.yml files found"
|
||||
fi
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: release-${{ matrix.os }}
|
||||
path: |
|
||||
apps/desktop/release/latest*
|
||||
apps/desktop/release/*.dmg*
|
||||
apps/desktop/release/*.zip*
|
||||
apps/desktop/release/*.exe*
|
||||
apps/desktop/release/*.AppImage
|
||||
apps/desktop/release/*.deb*
|
||||
apps/desktop/release/*.snap*
|
||||
apps/desktop/release/*.rpm*
|
||||
apps/desktop/release/*.tar.gz*
|
||||
retention-days: 5
|
||||
|
||||
build-windows:
|
||||
needs: [version, test]
|
||||
name: Build Desktop App (Windows)
|
||||
if: inputs.build_windows
|
||||
runs-on: windows-2025
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node & pnpm
|
||||
uses: ./.github/actions/setup-node-pnpm
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
package-manager-cache: 'false'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --node-linker=hoisted
|
||||
|
||||
- name: Install deps on Desktop
|
||||
run: npm run install-isolated --prefix=./apps/desktop
|
||||
|
||||
- name: Set package version
|
||||
run: npm run workflow:set-desktop-version ${{ needs.version.outputs.version }} ${{ inputs.channel }}
|
||||
|
||||
- name: Build artifact on Windows
|
||||
run: npm run desktop:build
|
||||
env:
|
||||
UPDATE_CHANNEL: ${{ inputs.channel }}
|
||||
APP_URL: http://localhost:3015
|
||||
DATABASE_URL: 'postgresql://postgres@localhost:5432/postgres'
|
||||
KEY_VAULTS_SECRET: 'oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE='
|
||||
NEXT_PUBLIC_DESKTOP_PROJECT_ID: ${{ inputs.channel == 'beta' && secrets.UMAMI_BETA_DESKTOP_PROJECT_ID || secrets.UMAMI_NIGHTLY_DESKTOP_PROJECT_ID }}
|
||||
NEXT_PUBLIC_DESKTOP_UMAMI_BASE_URL: ${{ inputs.channel == 'beta' && secrets.UMAMI_BETA_DESKTOP_BASE_URL || secrets.UMAMI_NIGHTLY_DESKTOP_BASE_URL }}
|
||||
TEMP: C:\temp
|
||||
TMP: C:\temp
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: release-windows-2025
|
||||
path: |
|
||||
apps/desktop/release/latest*
|
||||
apps/desktop/release/*.dmg*
|
||||
apps/desktop/release/*.zip*
|
||||
apps/desktop/release/*.exe*
|
||||
apps/desktop/release/*.AppImage
|
||||
apps/desktop/release/*.deb*
|
||||
apps/desktop/release/*.snap*
|
||||
apps/desktop/release/*.rpm*
|
||||
apps/desktop/release/*.tar.gz*
|
||||
retention-days: 5
|
||||
|
||||
build-linux:
|
||||
needs: [version, test]
|
||||
name: Build Desktop App (Linux)
|
||||
if: inputs.build_linux
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node & pnpm
|
||||
uses: ./.github/actions/setup-node-pnpm
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
package-manager-cache: 'false'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --node-linker=hoisted
|
||||
|
||||
- name: Install deps on Desktop
|
||||
run: npm run install-isolated --prefix=./apps/desktop
|
||||
|
||||
- name: Set package version
|
||||
run: npm run workflow:set-desktop-version ${{ needs.version.outputs.version }} ${{ inputs.channel }}
|
||||
|
||||
- name: Build artifact on Linux
|
||||
run: npm run desktop:build
|
||||
env:
|
||||
UPDATE_CHANNEL: ${{ inputs.channel }}
|
||||
APP_URL: http://localhost:3015
|
||||
DATABASE_URL: 'postgresql://postgres@localhost:5432/postgres'
|
||||
KEY_VAULTS_SECRET: 'oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE='
|
||||
NEXT_PUBLIC_DESKTOP_PROJECT_ID: ${{ inputs.channel == 'beta' && secrets.UMAMI_BETA_DESKTOP_PROJECT_ID || secrets.UMAMI_NIGHTLY_DESKTOP_PROJECT_ID }}
|
||||
NEXT_PUBLIC_DESKTOP_UMAMI_BASE_URL: ${{ inputs.channel == 'beta' && secrets.UMAMI_BETA_DESKTOP_BASE_URL || secrets.UMAMI_NIGHTLY_DESKTOP_BASE_URL }}
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: release-ubuntu-latest
|
||||
path: |
|
||||
apps/desktop/release/latest*
|
||||
apps/desktop/release/*.dmg*
|
||||
apps/desktop/release/*.zip*
|
||||
apps/desktop/release/*.exe*
|
||||
apps/desktop/release/*.AppImage
|
||||
apps/desktop/release/*.deb*
|
||||
apps/desktop/release/*.snap*
|
||||
apps/desktop/release/*.rpm*
|
||||
apps/desktop/release/*.tar.gz*
|
||||
retention-days: 5
|
||||
|
||||
merge-mac-files:
|
||||
needs: [build-macos, version]
|
||||
name: Merge macOS Release Files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
if: inputs.build_macos
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node & Bun
|
||||
uses: ./.github/actions/setup-node-bun
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
package-manager-cache: 'false'
|
||||
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
path: release
|
||||
pattern: release-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: List downloaded artifacts
|
||||
run: ls -R release
|
||||
|
||||
- name: Install yaml only for merge step
|
||||
run: |
|
||||
cd scripts/electronWorkflow
|
||||
if [ ! -f package.json ]; then
|
||||
echo '{"name":"merge-mac-release","private":true}' > package.json
|
||||
fi
|
||||
bun add --no-save yaml@2.8.1
|
||||
|
||||
- name: Merge latest-mac.yml files
|
||||
run: bun run scripts/electronWorkflow/mergeMacReleaseFiles.js
|
||||
|
||||
- name: Upload artifacts with merged macOS files
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: merged-release-manual
|
||||
path: release/
|
||||
retention-days: 1
|
||||
49
.github/workflows/pr-build-desktop.yml
vendored
49
.github/workflows/pr-build-desktop.yml
vendored
|
|
@ -29,16 +29,12 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
- name: Setup Node & Bun
|
||||
uses: ./.github/actions/setup-node-bun
|
||||
with:
|
||||
node-version: 24.11.1
|
||||
package-manager-cache: false
|
||||
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.2.23
|
||||
package-manager-cache: 'false'
|
||||
|
||||
- name: Install deps
|
||||
run: bun i
|
||||
|
|
@ -103,16 +99,11 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
- name: Setup Node & pnpm
|
||||
uses: ./.github/actions/setup-node-pnpm
|
||||
with:
|
||||
node-version: 24.11.1
|
||||
package-manager-cache: false
|
||||
package-manager-cache: 'false'
|
||||
|
||||
# node-linker=hoisted 模式将可以确保 asar 压缩可用
|
||||
- name: Install dependencies
|
||||
|
|
@ -132,11 +123,11 @@ jobs:
|
|||
run: npm run desktop:build
|
||||
env:
|
||||
# 设置更新通道,PR构建为nightly,否则为stable
|
||||
UPDATE_CHANNEL: "nightly"
|
||||
UPDATE_CHANNEL: 'nightly'
|
||||
APP_URL: http://localhost:3015
|
||||
DATABASE_URL: "postgresql://postgres@localhost:5432/postgres"
|
||||
DATABASE_URL: 'postgresql://postgres@localhost:5432/postgres'
|
||||
# 默认添加一个加密 SECRET
|
||||
KEY_VAULTS_SECRET: "oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE="
|
||||
KEY_VAULTS_SECRET: 'oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE='
|
||||
# macOS 签名和公证配置(fork 的 PR 访问不到 secrets,会跳过签名)
|
||||
CSC_LINK: ${{ secrets.APPLE_CERTIFICATE_BASE64 }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
|
|
@ -156,10 +147,10 @@ jobs:
|
|||
run: npm run desktop:build
|
||||
env:
|
||||
# 设置更新通道,PR构建为nightly,否则为stable
|
||||
UPDATE_CHANNEL: "nightly"
|
||||
UPDATE_CHANNEL: 'nightly'
|
||||
APP_URL: http://localhost:3015
|
||||
DATABASE_URL: "postgresql://postgres@localhost:5432/postgres"
|
||||
KEY_VAULTS_SECRET: "oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE="
|
||||
DATABASE_URL: 'postgresql://postgres@localhost:5432/postgres'
|
||||
KEY_VAULTS_SECRET: 'oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE='
|
||||
NEXT_PUBLIC_DESKTOP_PROJECT_ID: ${{ secrets.UMAMI_NIGHTLY_DESKTOP_PROJECT_ID }}
|
||||
NEXT_PUBLIC_DESKTOP_UMAMI_BASE_URL: ${{ secrets.UMAMI_NIGHTLY_DESKTOP_BASE_URL }}
|
||||
# 将 TEMP 和 TMP 目录设置到 C 盘
|
||||
|
|
@ -172,10 +163,10 @@ jobs:
|
|||
run: npm run desktop:build
|
||||
env:
|
||||
# 设置更新通道,PR构建为nightly,否则为stable
|
||||
UPDATE_CHANNEL: "nightly"
|
||||
UPDATE_CHANNEL: 'nightly'
|
||||
APP_URL: http://localhost:3015
|
||||
DATABASE_URL: "postgresql://postgres@localhost:5432/postgres"
|
||||
KEY_VAULTS_SECRET: "oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE="
|
||||
DATABASE_URL: 'postgresql://postgres@localhost:5432/postgres'
|
||||
KEY_VAULTS_SECRET: 'oLXWIiR/AKF+rWaqy9lHkrYgzpATbW3CtJp3UfkVgpE='
|
||||
NEXT_PUBLIC_DESKTOP_PROJECT_ID: ${{ secrets.UMAMI_NIGHTLY_DESKTOP_PROJECT_ID }}
|
||||
NEXT_PUBLIC_DESKTOP_UMAMI_BASE_URL: ${{ secrets.UMAMI_NIGHTLY_DESKTOP_BASE_URL }}
|
||||
|
||||
|
|
@ -229,16 +220,12 @@ jobs:
|
|||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
- name: Setup Node & Bun
|
||||
uses: ./.github/actions/setup-node-bun
|
||||
with:
|
||||
node-version: 24.11.1
|
||||
package-manager-cache: false
|
||||
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.2.23
|
||||
package-manager-cache: 'false'
|
||||
|
||||
# 下载所有平台的构建产物
|
||||
- name: Download artifacts
|
||||
|
|
|
|||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
|
|
@ -146,7 +146,7 @@ jobs:
|
|||
NODE_OPTIONS: --max-old-space-size=6144
|
||||
|
||||
- name: Typecheck Desktop
|
||||
run: pnpm typecheck
|
||||
run: pnpm type-check
|
||||
working-directory: apps/desktop
|
||||
|
||||
- name: Test Desktop Client
|
||||
|
|
|
|||
15
Dockerfile
15
Dockerfile
|
|
@ -107,6 +107,19 @@ COPY . .
|
|||
# run build standalone for docker version
|
||||
RUN npm run build:docker
|
||||
|
||||
# Prepare desktop export assets for Electron packaging (if generated)
|
||||
RUN <<'EOF'
|
||||
set -e
|
||||
if [ -d "/app/out" ]; then
|
||||
mkdir -p /app/apps/desktop/dist/next
|
||||
cp -a /app/out/. /app/apps/desktop/dist/next/
|
||||
echo "✅ Copied Next export output into /app/apps/desktop/dist/next"
|
||||
else
|
||||
echo "ℹ️ No Next export output found at /app/out, creating empty directory"
|
||||
mkdir -p /app/apps/desktop/dist/next
|
||||
fi
|
||||
EOF
|
||||
|
||||
## Application image, copy all the files for production
|
||||
FROM busybox:latest AS app
|
||||
|
||||
|
|
@ -115,6 +128,8 @@ COPY --from=base /distroless/ /
|
|||
# Automatically leverage output traces to reduce image size
|
||||
# https://nextjs.org/docs/advanced-features/output-file-tracing
|
||||
COPY --from=builder /app/.next/standalone /app/
|
||||
# Copy Next export output for desktop renderer
|
||||
COPY --from=builder /app/apps/desktop/dist/next /app/apps/desktop/dist/next
|
||||
|
||||
# Copy database migrations
|
||||
COPY --from=builder /app/packages/database/migrations /app/migrations
|
||||
|
|
|
|||
|
|
@ -4,3 +4,19 @@ ignore-workspace-root-check=true
|
|||
|
||||
electron_mirror=https://npmmirror.com/mirrors/electron/
|
||||
electron_builder_binaries_mirror=https://npmmirror.com/mirrors/electron-builder-binaries/
|
||||
|
||||
public-hoist-pattern[]=*@umijs/lint*
|
||||
public-hoist-pattern[]=*unicorn*
|
||||
public-hoist-pattern[]=*changelog*
|
||||
public-hoist-pattern[]=*commitlint*
|
||||
public-hoist-pattern[]=*eslint*
|
||||
public-hoist-pattern[]=*postcss*
|
||||
public-hoist-pattern[]=*prettier*
|
||||
public-hoist-pattern[]=*remark*
|
||||
public-hoist-pattern[]=*semantic-release*
|
||||
public-hoist-pattern[]=*stylelint*
|
||||
|
||||
public-hoist-pattern[]=@auth/core
|
||||
public-hoist-pattern[]=@clerk/backend
|
||||
public-hoist-pattern[]=@clerk/types
|
||||
public-hoist-pattern[]=pdfjs-dist
|
||||
|
|
|
|||
62
apps/desktop/.prettierignore
Normal file
62
apps/desktop/.prettierignore
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
# Prettierignore for LobeHub
|
||||
################################################################
|
||||
|
||||
# general
|
||||
.DS_Store
|
||||
.editorconfig
|
||||
.idea
|
||||
.history
|
||||
.temp
|
||||
.env.local
|
||||
.husky
|
||||
.npmrc
|
||||
.gitkeep
|
||||
venv
|
||||
temp
|
||||
tmp
|
||||
LICENSE
|
||||
|
||||
# dependencies
|
||||
node_modules
|
||||
*.log
|
||||
*.lock
|
||||
package-lock.json
|
||||
|
||||
# ci
|
||||
coverage
|
||||
.coverage
|
||||
.eslintcache
|
||||
.stylelintcache
|
||||
test-output
|
||||
__snapshots__
|
||||
*.snap
|
||||
|
||||
# production
|
||||
dist
|
||||
es
|
||||
lib
|
||||
logs
|
||||
|
||||
# umi
|
||||
.umi
|
||||
.umi-production
|
||||
.umi-test
|
||||
.dumi/tmp*
|
||||
|
||||
# ignore files
|
||||
.*ignore
|
||||
|
||||
# docker
|
||||
docker
|
||||
Dockerfile*
|
||||
|
||||
# image
|
||||
*.webp
|
||||
*.gif
|
||||
*.png
|
||||
*.jpg
|
||||
*.svg
|
||||
|
||||
# misc
|
||||
# add other ignore file below
|
||||
.next
|
||||
1
apps/desktop/.prettierrc.cjs
Normal file
1
apps/desktop/.prettierrc.cjs
Normal file
|
|
@ -0,0 +1 @@
|
|||
module.exports = require('@lobehub/lint').prettier;
|
||||
1
apps/desktop/.remarkrc.cjs
Normal file
1
apps/desktop/.remarkrc.cjs
Normal file
|
|
@ -0,0 +1 @@
|
|||
module.exports = require('@lobehub/lint').remarklint;
|
||||
39
apps/desktop/.stylelintignore
Normal file
39
apps/desktop/.stylelintignore
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
# Stylelintignore for LobeHub
|
||||
################################################################
|
||||
|
||||
# dependencies
|
||||
node_modules
|
||||
|
||||
# ci
|
||||
coverage
|
||||
.coverage
|
||||
|
||||
# production
|
||||
dist
|
||||
es
|
||||
lib
|
||||
logs
|
||||
|
||||
# framework specific
|
||||
.next
|
||||
.umi
|
||||
.umi-production
|
||||
.umi-test
|
||||
.dumi/tmp*
|
||||
|
||||
# temporary directories
|
||||
tmp
|
||||
temp
|
||||
.temp
|
||||
.local
|
||||
docs/.local
|
||||
|
||||
# cache directories
|
||||
.cache
|
||||
|
||||
# AI coding tools directories
|
||||
.claude
|
||||
.serena
|
||||
|
||||
# MCP tools
|
||||
/.serena/**
|
||||
9
apps/desktop/.stylelintrc.cjs
Normal file
9
apps/desktop/.stylelintrc.cjs
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
const config = require('@lobehub/lint').stylelint;
|
||||
|
||||
module.exports = {
|
||||
...config,
|
||||
rules: {
|
||||
'selector-id-pattern': null,
|
||||
...config.rules,
|
||||
},
|
||||
};
|
||||
|
|
@ -32,7 +32,7 @@ pnpm install-isolated
|
|||
pnpm electron:dev
|
||||
|
||||
# Type checking
|
||||
pnpm typecheck
|
||||
pnpm type-check
|
||||
|
||||
# Run tests
|
||||
pnpm test
|
||||
|
|
@ -66,9 +66,9 @@ cp .env.desktop .env
|
|||
pnpm electron:dev # Start with hot reload
|
||||
|
||||
# 2. Code Quality
|
||||
pnpm lint # ESLint checking
|
||||
pnpm format # Prettier formatting
|
||||
pnpm typecheck # TypeScript validation
|
||||
pnpm lint # ESLint checking
|
||||
pnpm format # Prettier formatting
|
||||
pnpm type-check # TypeScript validation
|
||||
|
||||
# 3. Testing
|
||||
pnpm test # Run Vitest tests
|
||||
|
|
@ -313,7 +313,7 @@ tests/ # Integration tests
|
|||
```bash
|
||||
pnpm test # Run all tests
|
||||
pnpm test:watch # Watch mode
|
||||
pnpm typecheck # Type validation
|
||||
pnpm type-check # Type validation
|
||||
```
|
||||
|
||||
### Test Coverage
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ pnpm install-isolated
|
|||
pnpm electron:dev
|
||||
|
||||
# 类型检查
|
||||
pnpm typecheck
|
||||
pnpm type-check
|
||||
|
||||
# 运行测试
|
||||
pnpm test
|
||||
|
|
@ -66,9 +66,9 @@ cp .env.desktop .env
|
|||
pnpm electron:dev # 启动热重载开发服务器
|
||||
|
||||
# 2. 代码质量
|
||||
pnpm lint # ESLint 检查
|
||||
pnpm format # Prettier 格式化
|
||||
pnpm typecheck # TypeScript 验证
|
||||
pnpm lint # ESLint 检查
|
||||
pnpm format # Prettier 格式化
|
||||
pnpm type-check # TypeScript 验证
|
||||
|
||||
# 3. 测试
|
||||
pnpm test # 运行 Vitest 测试
|
||||
|
|
@ -302,7 +302,7 @@ tests/ # 集成测试
|
|||
```bash
|
||||
pnpm test # 运行所有测试
|
||||
pnpm test:watch # 监视模式
|
||||
pnpm typecheck # 类型验证
|
||||
pnpm type-check # 类型验证
|
||||
```
|
||||
|
||||
### 测试覆盖
|
||||
|
|
|
|||
|
|
@ -17,6 +17,10 @@ console.log(`🏗️ Building for architecture: ${arch}`);
|
|||
const isNightly = channel === 'nightly';
|
||||
const isBeta = packageJSON.name.includes('beta');
|
||||
|
||||
// Keep only these Electron Framework localization folders (*.lproj)
|
||||
// (aligned with previous Electron Forge build config)
|
||||
const keepLanguages = new Set(['en', 'en_GB', 'en-US', 'en_US']);
|
||||
|
||||
// https://www.electron.build/code-signing-mac#how-to-disable-code-signing-during-the-build-process-on-macos
|
||||
if (!hasAppleCertificate) {
|
||||
// Disable auto discovery to keep electron-builder from searching unavailable signing identities
|
||||
|
|
@ -54,7 +58,7 @@ const config = {
|
|||
*/
|
||||
afterPack: async (context) => {
|
||||
// Only process macOS builds
|
||||
if (context.electronPlatformName !== 'darwin') {
|
||||
if (!['darwin', 'mas'].includes(context.electronPlatformName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -68,6 +72,36 @@ const config = {
|
|||
);
|
||||
const assetsCarDest = path.join(resourcesPath, 'Assets.car');
|
||||
|
||||
// Remove unused Electron Framework localizations to reduce app size
|
||||
// Equivalent to:
|
||||
// ../../Frameworks/Electron Framework.framework/Versions/A/Resources/*.lproj
|
||||
const frameworkResourcePath = path.join(
|
||||
context.appOutDir,
|
||||
`${context.packager.appInfo.productFilename}.app`,
|
||||
'Contents',
|
||||
'Frameworks',
|
||||
'Electron Framework.framework',
|
||||
'Versions',
|
||||
'A',
|
||||
'Resources',
|
||||
);
|
||||
|
||||
try {
|
||||
const entries = await fs.readdir(frameworkResourcePath);
|
||||
await Promise.all(
|
||||
entries.map(async (file) => {
|
||||
if (!file.endsWith('.lproj')) return;
|
||||
|
||||
const lang = file.split('.')[0];
|
||||
if (keepLanguages.has(lang)) return;
|
||||
|
||||
await fs.rm(path.join(frameworkResourcePath, file), { force: true, recursive: true });
|
||||
}),
|
||||
);
|
||||
} catch {
|
||||
// Non-critical: folder may not exist depending on packaging details
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.access(assetsCarSource);
|
||||
await fs.copyFile(assetsCarSource, assetsCarDest);
|
||||
|
|
@ -106,6 +140,8 @@ const config = {
|
|||
files: [
|
||||
'dist',
|
||||
'resources',
|
||||
// Ensure Next export assets are packaged
|
||||
'dist/next/**/*',
|
||||
'!resources/locales',
|
||||
'!dist/next/docs',
|
||||
'!dist/next/packages',
|
||||
|
|
|
|||
|
|
@ -11,21 +11,30 @@
|
|||
"author": "LobeHub",
|
||||
"main": "./dist/main/index.js",
|
||||
"scripts": {
|
||||
"build": "npm run typecheck && electron-vite build",
|
||||
"build": "electron-vite build",
|
||||
"build-local": "npm run build && electron-builder --dir --config electron-builder.js --c.mac.notarize=false -c.mac.identity=null --c.asar=false",
|
||||
"build:linux": "npm run build && electron-builder --linux --config electron-builder.js --publish never",
|
||||
"build:mac": "npm run build && electron-builder --mac --config electron-builder.js --publish never",
|
||||
"build:mac:local": "npm run build && UPDATE_CHANNEL=nightly electron-builder --mac --config electron-builder.js --publish never",
|
||||
"build:win": "npm run build && electron-builder --win --config electron-builder.js --publish never",
|
||||
"dev": "electron-vite dev",
|
||||
"electron:dev": "electron-vite dev",
|
||||
"electron:run-unpack": "electron .",
|
||||
"format": "prettier --write ",
|
||||
"i18n": "tsx scripts/i18nWorkflow/index.ts && lobe-i18n",
|
||||
"postinstall": "electron-builder install-app-deps",
|
||||
"install-isolated": "pnpm install",
|
||||
"lint": "eslint --cache ",
|
||||
"lint": "npm run lint:ts && npm run lint:style && npm run type-check && npm run lint:circular",
|
||||
"lint:circular": "npm run lint:circular:main && npm run lint:circular:packages",
|
||||
"lint:circular:main": "dpdm src/**/*.ts --no-warning --no-tree --exit-code circular:1 --no-progress -T true --skip-dynamic-imports circular",
|
||||
"lint:circular:packages": "dpdm packages/**/src/**/*.ts --no-warning --no-tree --exit-code circular:1 --no-progress -T true --skip-dynamic-imports circular",
|
||||
"lint:md": "remark . --silent --output",
|
||||
"lint:style": "stylelint \"{src,tests}/**/*.{js,jsx,ts,tsx}\" --fix",
|
||||
"lint:ts": "eslint \"{src,tests}/**/*.{js,jsx,ts,tsx}\" --fix",
|
||||
"start": "electron-vite preview",
|
||||
"stylelint": "stylelint \"src/**/*.{js,jsx,ts,tsx}\" --fix",
|
||||
"test": "vitest --run",
|
||||
"type-check": "tsgo --noEmit -p tsconfig.json",
|
||||
"typecheck": "tsgo --noEmit -p tsconfig.json"
|
||||
},
|
||||
"dependencies": {
|
||||
|
|
@ -33,7 +42,8 @@
|
|||
"electron-window-state": "^5.0.3",
|
||||
"fetch-socks": "^1.3.2",
|
||||
"get-port-please": "^3.2.0",
|
||||
"pdfjs-dist": "4.10.38"
|
||||
"pdfjs-dist": "4.10.38",
|
||||
"superjson": "^2.2.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@electron-toolkit/eslint-config-prettier": "^3.0.0",
|
||||
|
|
@ -41,15 +51,17 @@
|
|||
"@electron-toolkit/preload": "^3.0.2",
|
||||
"@electron-toolkit/tsconfig": "^2.0.0",
|
||||
"@electron-toolkit/utils": "^4.0.0",
|
||||
"@lobechat/desktop-bridge": "workspace:*",
|
||||
"@lobechat/electron-client-ipc": "workspace:*",
|
||||
"@lobechat/electron-server-ipc": "workspace:*",
|
||||
"@lobechat/file-loaders": "workspace:*",
|
||||
"@lobehub/i18n-cli": "^1.25.1",
|
||||
"@modelcontextprotocol/sdk": "^1.24.3",
|
||||
"@types/async-retry": "^1.4.9",
|
||||
"@types/lodash": "^4.17.21",
|
||||
"@types/resolve": "^1.20.6",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/set-cookie-parser": "^2.4.10",
|
||||
"@t3-oss/env-core": "^0.13.8",
|
||||
"@typescript/native-preview": "7.0.0-dev.20251210.1",
|
||||
"async-retry": "^1.3.3",
|
||||
"consola": "^3.4.2",
|
||||
|
|
@ -57,10 +69,13 @@
|
|||
"diff": "^8.0.2",
|
||||
"electron": "^38.7.2",
|
||||
"electron-builder": "^26.0.12",
|
||||
"electron-devtools-installer": "^3.2.0",
|
||||
"electron-is": "^3.0.0",
|
||||
"electron-log": "^5.4.3",
|
||||
"electron-store": "^8.2.0",
|
||||
"electron-vite": "^4.0.1",
|
||||
"es-toolkit": "^1.43.0",
|
||||
"eslint": "^8.57.1",
|
||||
"execa": "^9.6.1",
|
||||
"fast-glob": "^3.3.3",
|
||||
"fix-path": "^5.0.0",
|
||||
|
|
@ -69,17 +84,19 @@
|
|||
"https-proxy-agent": "^7.0.6",
|
||||
"i18next": "^25.7.2",
|
||||
"just-diff": "^6.0.2",
|
||||
"lodash": "^4.17.21",
|
||||
"lodash-es": "^4.17.21",
|
||||
"prettier": "^3.7.4",
|
||||
"remark-cli": "^12.0.1",
|
||||
"resolve": "^1.22.11",
|
||||
"semver": "^7.7.3",
|
||||
"set-cookie-parser": "^2.7.2",
|
||||
"stylelint": "^15.11.0",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.9.3",
|
||||
"undici": "^7.16.0",
|
||||
"uuid": "^13.0.0",
|
||||
"vite": "^7.2.7",
|
||||
"vitest": "^3.2.4"
|
||||
"vitest": "^3.2.4",
|
||||
"zod": "^3.25.76"
|
||||
},
|
||||
"pnpm": {
|
||||
"onlyBuiltDependencies": [
|
||||
|
|
@ -87,4 +104,4 @@
|
|||
"electron-builder"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2,4 +2,5 @@ packages:
|
|||
- '../../packages/electron-server-ipc'
|
||||
- '../../packages/electron-client-ipc'
|
||||
- '../../packages/file-loaders'
|
||||
- '../../packages/desktop-bridge'
|
||||
- '.'
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { consola } from 'consola';
|
||||
import { colors } from 'consola/utils';
|
||||
import { unset } from 'es-toolkit/compat';
|
||||
import { diff } from 'just-diff';
|
||||
import { unset } from 'lodash';
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
import {
|
||||
|
|
@ -34,7 +34,7 @@ export const genDiff = () => {
|
|||
continue;
|
||||
}
|
||||
|
||||
const clearLocals = [];
|
||||
const clearLocals: string[] = [];
|
||||
|
||||
for (const locale of [i18nConfig.entryLocale, ...i18nConfig.outputLocales]) {
|
||||
const localeFilepath = outputLocaleJsonFilepath(locale, `${ns}.json`);
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ export const appBrowsers = {
|
|||
identifier: 'chat',
|
||||
keepAlive: true,
|
||||
minWidth: 400,
|
||||
path: '/chat',
|
||||
path: '/agent',
|
||||
showOnInit: true,
|
||||
titleBarStyle: 'hidden',
|
||||
vibrancy: 'under-window',
|
||||
|
|
@ -72,7 +72,7 @@ export const windowTemplates = {
|
|||
allowMultipleInstances: true,
|
||||
autoHideMenuBar: true,
|
||||
baseIdentifier: 'chatSingle',
|
||||
basePath: '/chat',
|
||||
basePath: '/agent',
|
||||
height: 600,
|
||||
keepAlive: false, // Multi-instance windows don't need to stay alive
|
||||
minWidth: 400,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { app } from 'electron';
|
||||
import { pathExistsSync } from 'fs-extra';
|
||||
import { join } from 'node:path';
|
||||
|
||||
export const mainDir = join(__dirname);
|
||||
|
|
@ -11,7 +12,12 @@ export const buildDir = join(mainDir, '../../build');
|
|||
|
||||
const appPath = app.getAppPath();
|
||||
|
||||
export const nextStandaloneDir = join(appPath, 'dist', 'next');
|
||||
const nextExportOutDir = join(appPath, 'dist', 'next', 'out');
|
||||
const nextExportDefaultDir = join(appPath, 'dist', 'next');
|
||||
|
||||
export const nextExportDir = pathExistsSync(nextExportOutDir)
|
||||
? nextExportOutDir
|
||||
: nextExportDefaultDir;
|
||||
|
||||
export const userDataDir = app.getPath('userData');
|
||||
|
||||
|
|
@ -19,10 +25,6 @@ export const appStorageDir = join(userDataDir, 'lobehub-storage');
|
|||
|
||||
// ------ Application storage directory ---- //
|
||||
|
||||
// db schema hash
|
||||
export const DB_SCHEMA_HASH_FILENAME = 'lobehub-local-db-schema-hash';
|
||||
// pglite database dir
|
||||
export const LOCAL_DATABASE_DIR = 'lobehub-local-db';
|
||||
// 本地存储文件(模拟 S3)
|
||||
export const FILE_STORAGE_DIR = 'file-storage';
|
||||
// Plugin 安装目录
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import { dev, linux, macOS, windows } from 'electron-is';
|
||||
import os from 'node:os';
|
||||
|
||||
import { getDesktopEnv } from '@/env';
|
||||
|
||||
export const isDev = dev();
|
||||
|
||||
export const OFFICIAL_CLOUD_SERVER = process.env.OFFICIAL_CLOUD_SERVER || 'https://lobechat.com';
|
||||
export const OFFICIAL_CLOUD_SERVER = getDesktopEnv().OFFICIAL_CLOUD_SERVER;
|
||||
|
||||
export const isMac = macOS();
|
||||
export const isWindows = windows();
|
||||
|
|
|
|||
1
apps/desktop/src/main/const/protocol.ts
Normal file
1
apps/desktop/src/main/const/protocol.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export const ELECTRON_BE_PROTOCOL_SCHEME = 'lobe-backend';
|
||||
|
|
@ -25,7 +25,7 @@ export const defaultProxySettings: NetworkProxySettings = {
|
|||
* 存储默认值
|
||||
*/
|
||||
export const STORE_DEFAULTS: ElectronMainStore = {
|
||||
dataSyncConfig: { storageMode: 'local' },
|
||||
dataSyncConfig: { storageMode: 'cloud' },
|
||||
encryptedTokens: {},
|
||||
locale: 'auto',
|
||||
networkProxy: defaultProxySettings,
|
||||
|
|
|
|||
|
|
@ -563,7 +563,7 @@ export default class AuthCtr extends ControllerModule {
|
|||
// Hash codeVerifier using SHA-256
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(codeVerifier);
|
||||
const digest = await crypto.subtle.digest('SHA-256', data);
|
||||
const digest = await crypto.subtle.digest('SHA-256', data as unknown as NodeJS.BufferSource);
|
||||
|
||||
// Convert hash result to base64url encoding
|
||||
const challenge = Buffer.from(digest)
|
||||
|
|
|
|||
|
|
@ -25,25 +25,20 @@ export default class BrowserWindowsCtr extends ControllerModule {
|
|||
console.log('[BrowserWindowsCtr] Received request to open settings', normalizedOptions);
|
||||
|
||||
try {
|
||||
const query = new URLSearchParams();
|
||||
if (normalizedOptions.searchParams) {
|
||||
Object.entries(normalizedOptions.searchParams).forEach(([key, value]) => {
|
||||
if (value !== undefined) query.set(key, value);
|
||||
});
|
||||
}
|
||||
let fullPath: string;
|
||||
|
||||
const tab = normalizedOptions.tab;
|
||||
if (tab && tab !== 'common' && !query.has('active')) {
|
||||
query.set('active', tab);
|
||||
// If direct path is provided, use it directly
|
||||
if (normalizedOptions.path) {
|
||||
fullPath = normalizedOptions.path;
|
||||
} else {
|
||||
// Legacy support for tab and searchParams
|
||||
const tab = normalizedOptions.tab;
|
||||
fullPath = tab ? `/settings/${tab}` : '/settings/common';
|
||||
}
|
||||
|
||||
const queryString = query.toString();
|
||||
const subPath = tab && !queryString ? `/${tab}` : '';
|
||||
const fullPath = `/settings${subPath}${queryString ? `?${queryString}` : ''}`;
|
||||
|
||||
const mainWindow = this.app.browserManager.getMainWindow();
|
||||
await mainWindow.loadUrl(fullPath);
|
||||
mainWindow.show();
|
||||
mainWindow.broadcast('navigate', { path: fullPath });
|
||||
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
|
|
|
|||
579
apps/desktop/src/main/controllers/McpCtr.ts
Normal file
579
apps/desktop/src/main/controllers/McpCtr.ts
Normal file
|
|
@ -0,0 +1,579 @@
|
|||
import { exec } from 'node:child_process';
|
||||
import { createHash, randomUUID } from 'node:crypto';
|
||||
import path from 'node:path';
|
||||
import { promisify } from 'node:util';
|
||||
import superjson from 'superjson';
|
||||
|
||||
import FileService from '@/services/fileSrv';
|
||||
import { createLogger } from '@/utils/logger';
|
||||
|
||||
import { MCPClient } from '../libs/mcp/client';
|
||||
import type { MCPClientParams, ToolCallContent, ToolCallResult } from '../libs/mcp/types';
|
||||
import { ControllerModule, IpcMethod } from './index';
|
||||
|
||||
const execPromise = promisify(exec);
|
||||
const logger = createLogger('controllers:McpCtr');
|
||||
|
||||
/**
|
||||
* Desktop-only copy of `@lobechat/types`'s `CheckMcpInstallResult`.
|
||||
*
|
||||
* We intentionally keep it local to avoid pulling the web app's path-alias
|
||||
* expectations (e.g. `@/config/*`) into the desktop `tsgo` typecheck.
|
||||
*/
|
||||
interface CheckMcpInstallResult {
|
||||
allDependenciesMet?: boolean;
|
||||
allOptions?: Array<{
|
||||
allDependenciesMet?: boolean;
|
||||
connection?: {
|
||||
args?: string[];
|
||||
command?: string;
|
||||
installationMethod: string;
|
||||
packageName?: string;
|
||||
repositoryUrl?: string;
|
||||
};
|
||||
isRecommended?: boolean;
|
||||
packageInstalled?: boolean;
|
||||
systemDependencies?: Array<{
|
||||
error?: string;
|
||||
installed: boolean;
|
||||
meetRequirement: boolean;
|
||||
name: string;
|
||||
version?: string;
|
||||
}>;
|
||||
}>;
|
||||
configSchema?: any;
|
||||
connection?: {
|
||||
args?: string[];
|
||||
command?: string;
|
||||
type: 'stdio' | 'http';
|
||||
url?: string;
|
||||
};
|
||||
error?: string;
|
||||
isRecommended?: boolean;
|
||||
needsConfig?: boolean;
|
||||
packageInstalled?: boolean;
|
||||
platform: string;
|
||||
success: boolean;
|
||||
systemDependencies?: Array<{
|
||||
error?: string;
|
||||
installed: boolean;
|
||||
meetRequirement: boolean;
|
||||
name: string;
|
||||
version?: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface CustomPluginMetadata {
|
||||
avatar?: string;
|
||||
description?: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
interface GetStdioMcpServerManifestInput {
|
||||
args?: string[];
|
||||
command: string;
|
||||
env?: Record<string, string>;
|
||||
metadata?: CustomPluginMetadata;
|
||||
name: string;
|
||||
type?: 'stdio';
|
||||
}
|
||||
|
||||
interface GetStreamableMcpServerManifestInput {
|
||||
auth?: {
|
||||
accessToken?: string;
|
||||
token?: string;
|
||||
type: 'none' | 'bearer' | 'oauth2';
|
||||
};
|
||||
headers?: Record<string, string>;
|
||||
identifier: string;
|
||||
metadata?: CustomPluginMetadata;
|
||||
url: string;
|
||||
}
|
||||
|
||||
interface CallToolInput {
|
||||
args: any;
|
||||
env: any;
|
||||
params: GetStdioMcpServerManifestInput;
|
||||
toolName: string;
|
||||
}
|
||||
|
||||
interface SuperJSONSerialized<T = unknown> {
|
||||
json: T;
|
||||
meta?: any;
|
||||
}
|
||||
|
||||
const isSuperJSONSerialized = (value: unknown): value is SuperJSONSerialized => {
|
||||
if (!value || typeof value !== 'object') return false;
|
||||
return 'json' in value;
|
||||
};
|
||||
|
||||
const deserializePayload = <T>(payload: unknown): T => {
|
||||
// Keep backward compatibility for older renderer builds that might not serialize yet
|
||||
if (isSuperJSONSerialized(payload)) return superjson.deserialize(payload as any) as T;
|
||||
return payload as T;
|
||||
};
|
||||
|
||||
const serializePayload = <T>(payload: T): SuperJSONSerialized =>
|
||||
superjson.serialize(payload) as any;
|
||||
|
||||
const safeParseToRecord = (value: unknown): Record<string, unknown> => {
|
||||
if (value && typeof value === 'object' && !Array.isArray(value))
|
||||
return value as Record<string, unknown>;
|
||||
if (typeof value === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(value) as unknown;
|
||||
if (parsed && typeof parsed === 'object' && !Array.isArray(parsed))
|
||||
return parsed as Record<string, unknown>;
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return {};
|
||||
};
|
||||
|
||||
const getFileExtensionFromMimeType = (mimeType: string, fallback: string) => {
|
||||
const [, ext] = mimeType.split('/');
|
||||
return ext || fallback;
|
||||
};
|
||||
|
||||
const todayShard = () => new Date().toISOString().split('T')[0];
|
||||
|
||||
const toMarkdown = async (
|
||||
blocks: ToolCallContent[] | null | undefined,
|
||||
getHTTPURL: (key: string) => Promise<string>,
|
||||
) => {
|
||||
if (!blocks) return '';
|
||||
|
||||
const parts = await Promise.all(
|
||||
blocks.map(async (item) => {
|
||||
switch (item.type) {
|
||||
case 'text': {
|
||||
return item.text;
|
||||
}
|
||||
case 'image': {
|
||||
const url = await getHTTPURL(item.data);
|
||||
return ``;
|
||||
}
|
||||
case 'audio': {
|
||||
const url = await getHTTPURL(item.data);
|
||||
return `<resource type="${item.type}" url="${url}" />`;
|
||||
}
|
||||
case 'resource': {
|
||||
return `<resource type="${item.type}">${JSON.stringify(item.resource)}</resource>}`;
|
||||
}
|
||||
default: {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
return parts.filter(Boolean).join('\n\n');
|
||||
};
|
||||
|
||||
/**
|
||||
* MCP Controller (Desktop Main Process)
|
||||
* Implements the same routes as `src/server/routers/desktop/mcp.ts`, but via IPC.
|
||||
*/
|
||||
export default class McpCtr extends ControllerModule {
|
||||
static override readonly groupName = 'mcp';
|
||||
|
||||
private get fileService() {
|
||||
return this.app.getService(FileService);
|
||||
}
|
||||
|
||||
private async createClient(params: MCPClientParams) {
|
||||
const client = new MCPClient(params);
|
||||
await client.initialize();
|
||||
return client;
|
||||
}
|
||||
|
||||
private async processContentBlocks(blocks: ToolCallContent[]): Promise<ToolCallContent[]> {
|
||||
return Promise.all(
|
||||
blocks.map(async (block) => {
|
||||
if (block.type !== 'image' && block.type !== 'audio') return block;
|
||||
|
||||
const ext = getFileExtensionFromMimeType(
|
||||
block.mimeType,
|
||||
block.type === 'image' ? 'png' : 'mp3',
|
||||
);
|
||||
|
||||
const base64 = block.data;
|
||||
const buffer = Buffer.from(base64, 'base64');
|
||||
const hash = createHash('sha256').update(buffer).digest('hex');
|
||||
const id = randomUUID();
|
||||
const filePath = path.posix.join('mcp', `${block.type}s`, todayShard(), `${id}.${ext}`);
|
||||
|
||||
const { metadata } = await this.fileService.uploadFile({
|
||||
content: base64,
|
||||
filename: `${id}.${ext}`,
|
||||
hash,
|
||||
path: filePath,
|
||||
type: block.mimeType,
|
||||
});
|
||||
|
||||
return { ...block, data: metadata.path };
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async getStdioMcpServerManifest(payload: SuperJSONSerialized<GetStdioMcpServerManifestInput>) {
|
||||
const input = deserializePayload<GetStdioMcpServerManifestInput>(payload);
|
||||
const params: MCPClientParams = {
|
||||
args: input.args || [],
|
||||
command: input.command,
|
||||
env: input.env,
|
||||
name: input.name,
|
||||
type: 'stdio',
|
||||
};
|
||||
|
||||
const client = await this.createClient(params);
|
||||
try {
|
||||
const manifest = await client.listManifests();
|
||||
const identifier = input.name;
|
||||
|
||||
const tools = manifest.tools || [];
|
||||
|
||||
return serializePayload({
|
||||
api: tools.map((item) => ({
|
||||
description: item.description,
|
||||
name: item.name,
|
||||
parameters: item.inputSchema as any,
|
||||
})),
|
||||
identifier,
|
||||
meta: {
|
||||
avatar: input.metadata?.avatar || 'MCP_AVATAR',
|
||||
description:
|
||||
input.metadata?.description ||
|
||||
`${identifier} MCP server has ` +
|
||||
Object.entries(manifest)
|
||||
.filter(([key]) => ['tools', 'prompts', 'resources'].includes(key))
|
||||
.map(([key, item]) => `${(item as Array<any>)?.length} ${key}`)
|
||||
.join(','),
|
||||
title: input.metadata?.name || identifier,
|
||||
},
|
||||
...manifest,
|
||||
mcpParams: params,
|
||||
type: 'mcp' as any,
|
||||
});
|
||||
} finally {
|
||||
await client.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async getStreamableMcpServerManifest(
|
||||
payload: SuperJSONSerialized<GetStreamableMcpServerManifestInput>,
|
||||
) {
|
||||
const input = deserializePayload<GetStreamableMcpServerManifestInput>(payload);
|
||||
const params: MCPClientParams = {
|
||||
auth: input.auth,
|
||||
headers: input.headers,
|
||||
name: input.identifier,
|
||||
type: 'http',
|
||||
url: input.url,
|
||||
};
|
||||
|
||||
const client = await this.createClient(params);
|
||||
try {
|
||||
const tools = await client.listTools();
|
||||
const identifier = input.identifier;
|
||||
|
||||
return serializePayload({
|
||||
api: tools.map((item) => ({
|
||||
description: item.description,
|
||||
name: item.name,
|
||||
parameters: item.inputSchema as any,
|
||||
})),
|
||||
identifier,
|
||||
mcpParams: params,
|
||||
meta: {
|
||||
avatar: input.metadata?.avatar || 'MCP_AVATAR',
|
||||
description:
|
||||
input.metadata?.description ||
|
||||
`${identifier} MCP server has ${tools.length} tools, like "${tools[0]?.name}"`,
|
||||
title: identifier,
|
||||
},
|
||||
type: 'mcp' as any,
|
||||
});
|
||||
} finally {
|
||||
await client.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async callTool(payload: SuperJSONSerialized<CallToolInput>) {
|
||||
const input = deserializePayload<CallToolInput>(payload);
|
||||
const params: MCPClientParams = {
|
||||
args: input.params.args || [],
|
||||
command: input.params.command,
|
||||
env: input.env,
|
||||
name: input.params.name,
|
||||
type: 'stdio',
|
||||
};
|
||||
|
||||
const client = await this.createClient(params);
|
||||
try {
|
||||
const args = safeParseToRecord(input.args);
|
||||
|
||||
const raw = (await client.callTool(input.toolName, args)) as ToolCallResult;
|
||||
const processed = raw.isError ? raw.content : await this.processContentBlocks(raw.content);
|
||||
|
||||
const content = await toMarkdown(processed, (key) => this.fileService.getFileHTTPURL(key));
|
||||
|
||||
return serializePayload({
|
||||
content,
|
||||
state: { ...raw, content: processed },
|
||||
success: true,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('callTool failed:', error);
|
||||
throw error;
|
||||
} finally {
|
||||
await client.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// ---------- MCP Install Check (local system) ----------
|
||||
|
||||
private getInstallInstructions(installInstructions: any) {
|
||||
if (!installInstructions) return undefined;
|
||||
|
||||
let current: string | undefined;
|
||||
|
||||
switch (process.platform) {
|
||||
case 'darwin': {
|
||||
current = installInstructions.macos;
|
||||
break;
|
||||
}
|
||||
case 'linux': {
|
||||
current = installInstructions.linux_debian || installInstructions.linux;
|
||||
break;
|
||||
}
|
||||
case 'win32': {
|
||||
current = installInstructions.windows;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return { current, manual: installInstructions.manual };
|
||||
}
|
||||
|
||||
private async checkSystemDependency(dependency: any) {
|
||||
try {
|
||||
const checkCommand = dependency.checkCommand || `${dependency.name} --version`;
|
||||
const { stdout, stderr } = await execPromise(checkCommand);
|
||||
|
||||
if (stderr && !stdout) {
|
||||
return {
|
||||
error: stderr,
|
||||
installInstructions: this.getInstallInstructions(dependency.installInstructions),
|
||||
installed: false,
|
||||
meetRequirement: false,
|
||||
name: dependency.name,
|
||||
requiredVersion: dependency.requiredVersion,
|
||||
};
|
||||
}
|
||||
|
||||
const output = String(stdout || '').trim();
|
||||
let version = output;
|
||||
|
||||
if (dependency.versionParsingRequired) {
|
||||
const versionMatch = output.match(/[Vv]?(\d+(\.\d+)*)/);
|
||||
if (versionMatch) version = versionMatch[0];
|
||||
}
|
||||
|
||||
let meetRequirement = true;
|
||||
|
||||
if (dependency.requiredVersion) {
|
||||
const currentVersion = String(version).replace(/^[Vv]/, '');
|
||||
const currentNum = Number.parseFloat(currentVersion);
|
||||
|
||||
const requirementMatch = String(dependency.requiredVersion).match(/([<=>]+)?(\d+(\.\d+)*)/);
|
||||
if (requirementMatch) {
|
||||
const [, operator = '=', requiredVersion] = requirementMatch;
|
||||
const requiredNum = Number.parseFloat(requiredVersion);
|
||||
switch (operator) {
|
||||
case '>=': {
|
||||
meetRequirement = currentNum >= requiredNum;
|
||||
break;
|
||||
}
|
||||
case '>': {
|
||||
meetRequirement = currentNum > requiredNum;
|
||||
break;
|
||||
}
|
||||
case '<=': {
|
||||
meetRequirement = currentNum <= requiredNum;
|
||||
break;
|
||||
}
|
||||
case '<': {
|
||||
meetRequirement = currentNum < requiredNum;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
meetRequirement = currentNum === requiredNum;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
installInstructions: this.getInstallInstructions(dependency.installInstructions),
|
||||
installed: true,
|
||||
meetRequirement,
|
||||
name: dependency.name,
|
||||
requiredVersion: dependency.requiredVersion,
|
||||
version,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
installInstructions: this.getInstallInstructions(dependency.installInstructions),
|
||||
installed: false,
|
||||
meetRequirement: false,
|
||||
name: dependency.name,
|
||||
requiredVersion: dependency.requiredVersion,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async checkPackageInstalled(installationMethod: string, details: any) {
|
||||
if (installationMethod === 'npm') {
|
||||
const packageName = details?.packageName;
|
||||
if (!packageName) return { installed: false };
|
||||
|
||||
try {
|
||||
const { stdout } = await execPromise(`npm list -g ${packageName} --depth=0`);
|
||||
if (!stdout.includes('(empty)') && stdout.includes(packageName)) return { installed: true };
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
try {
|
||||
await execPromise(`npx -y ${packageName} --version`);
|
||||
return { installed: true };
|
||||
} catch (error) {
|
||||
return {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
installed: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (installationMethod === 'python') {
|
||||
const packageName = details?.packageName;
|
||||
if (!packageName) return { installed: false };
|
||||
|
||||
const pythonCommand = details?.pythonCommand || 'python';
|
||||
|
||||
try {
|
||||
const command = `${pythonCommand} -m pip list | grep -i "${packageName}"`;
|
||||
const { stdout } = await execPromise(command);
|
||||
if (stdout.trim() && stdout.toLowerCase().includes(String(packageName).toLowerCase())) {
|
||||
return { installed: true };
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
try {
|
||||
const importCommand = `${pythonCommand} -c "import ${String(packageName).replace('-', '_')}; print('Package installed')"`;
|
||||
const { stdout } = await execPromise(importCommand);
|
||||
if (stdout.includes('Package installed')) return { installed: true };
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return { installed: false };
|
||||
}
|
||||
|
||||
// manual or unknown
|
||||
return { installed: false };
|
||||
}
|
||||
|
||||
private async checkDeployOption(option: any) {
|
||||
const systemDependenciesResults = [];
|
||||
|
||||
if (Array.isArray(option.systemDependencies) && option.systemDependencies.length > 0) {
|
||||
for (const dep of option.systemDependencies) {
|
||||
systemDependenciesResults.push(await this.checkSystemDependency(dep));
|
||||
}
|
||||
}
|
||||
|
||||
const packageResult = await this.checkPackageInstalled(
|
||||
option.installationMethod,
|
||||
option.installationDetails,
|
||||
);
|
||||
const packageInstalled = Boolean((packageResult as any).installed);
|
||||
|
||||
const allDependenciesMet = systemDependenciesResults.every((dep: any) => dep.meetRequirement);
|
||||
|
||||
const configSchema = option.connection?.configSchema;
|
||||
const needsConfig = Boolean(
|
||||
configSchema &&
|
||||
((Array.isArray(configSchema.required) && configSchema.required.length > 0) ||
|
||||
(configSchema.properties &&
|
||||
Object.values(configSchema.properties).some((prop: any) => prop.required === true))),
|
||||
);
|
||||
|
||||
const connection = option.connection?.url
|
||||
? { ...option.connection, type: 'http' }
|
||||
: { ...option.connection, type: 'stdio' };
|
||||
|
||||
return {
|
||||
allDependenciesMet,
|
||||
configSchema,
|
||||
connection,
|
||||
isRecommended: option.isRecommended,
|
||||
needsConfig,
|
||||
packageInstalled,
|
||||
systemDependencies: systemDependenciesResults,
|
||||
};
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async validMcpServerInstallable(
|
||||
payload: SuperJSONSerialized<{
|
||||
deploymentOptions: any[];
|
||||
}>,
|
||||
) {
|
||||
const input = deserializePayload<{ deploymentOptions: any[] }>(payload);
|
||||
try {
|
||||
const options = input.deploymentOptions || [];
|
||||
const results = [];
|
||||
|
||||
for (const option of options) {
|
||||
results.push(await this.checkDeployOption(option));
|
||||
}
|
||||
|
||||
const recommendedResult = results.find((r: any) => r.isRecommended && r.allDependenciesMet);
|
||||
const firstInstallableResult = results.find((r: any) => r.allDependenciesMet);
|
||||
const bestResult = recommendedResult || firstInstallableResult || results[0];
|
||||
|
||||
const checkResult: CheckMcpInstallResult = {
|
||||
...(bestResult || {}),
|
||||
allOptions: results as any,
|
||||
platform: process.platform,
|
||||
success: true,
|
||||
};
|
||||
|
||||
if (bestResult?.needsConfig) {
|
||||
checkResult.needsConfig = true;
|
||||
checkResult.configSchema = bestResult.configSchema;
|
||||
}
|
||||
|
||||
return serializePayload(checkResult);
|
||||
} catch (error) {
|
||||
return serializePayload({
|
||||
error:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Unknown error when checking MCP plugin installation status',
|
||||
platform: process.platform,
|
||||
success: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
import { NetworkProxySettings } from '@lobechat/electron-client-ipc';
|
||||
import { merge } from 'lodash';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { isEqual, merge } from 'es-toolkit/compat';
|
||||
|
||||
import { defaultProxySettings } from '@/const/store';
|
||||
import { createLogger } from '@/utils/logger';
|
||||
|
|
|
|||
|
|
@ -2,9 +2,10 @@ import {
|
|||
DesktopNotificationResult,
|
||||
ShowDesktopNotificationParams,
|
||||
} from '@lobechat/electron-client-ipc';
|
||||
import { Notification, app } from 'electron';
|
||||
import { Notification, app, systemPreferences } from 'electron';
|
||||
import { macOS, windows } from 'electron-is';
|
||||
|
||||
import { getIpcContext } from '@/utils/ipc';
|
||||
import { createLogger } from '@/utils/logger';
|
||||
|
||||
import { ControllerModule, IpcMethod } from './index';
|
||||
|
|
@ -13,6 +14,54 @@ const logger = createLogger('controllers:NotificationCtr');
|
|||
|
||||
export default class NotificationCtr extends ControllerModule {
|
||||
static override readonly groupName = 'notification';
|
||||
|
||||
@IpcMethod()
|
||||
async getNotificationPermissionStatus(): Promise<string> {
|
||||
if (!Notification.isSupported()) return 'denied';
|
||||
// Keep a stable status string for renderer-side UI mapping.
|
||||
// Screen3 expects macOS to return 'authorized' when granted.
|
||||
if (!macOS()) return 'authorized';
|
||||
|
||||
// Electron 38 no longer exposes `systemPreferences.getNotificationSettings()` in types,
|
||||
// and some runtimes don't provide it at all. Use the renderer's Notification.permission
|
||||
// as a reliable fallback.
|
||||
const context = getIpcContext();
|
||||
const sender = context?.sender;
|
||||
if (!sender) return 'notDetermined';
|
||||
const permission = await sender.executeJavaScript('Notification.permission', true);
|
||||
return permission === 'granted' ? 'authorized' : 'denied';
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async requestNotificationPermission(): Promise<void> {
|
||||
logger.debug('Requesting notification permission by sending a test notification');
|
||||
|
||||
if (!Notification.isSupported()) {
|
||||
logger.warn('System does not support desktop notifications');
|
||||
return;
|
||||
}
|
||||
|
||||
// On macOS, ask permission via Web Notification API first when possible.
|
||||
// This helps keep `Notification.permission` in sync for subsequent status checks.
|
||||
if (macOS()) {
|
||||
try {
|
||||
const mainWindow = this.app.browserManager.getMainWindow().browserWindow;
|
||||
await mainWindow.webContents.executeJavaScript('Notification.requestPermission()', true);
|
||||
} catch (error) {
|
||||
logger.debug(
|
||||
'Notification.requestPermission() failed or is unavailable, continuing with test notification',
|
||||
error,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const notification = new Notification({
|
||||
body: 'LobeHub can now send you notifications.',
|
||||
title: 'Notification Permission',
|
||||
});
|
||||
|
||||
notification.show();
|
||||
}
|
||||
/**
|
||||
* Set up desktop notifications after the application is ready
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -45,6 +45,24 @@ export default class RemoteServerConfigCtr extends ControllerModule {
|
|||
*/
|
||||
private readonly encryptedTokensKey = 'encryptedTokens';
|
||||
|
||||
/**
|
||||
* Normalize legacy config that used local storageMode.
|
||||
* Local mode has been removed; fall back to cloud.
|
||||
*/
|
||||
private normalizeConfig = (config: DataSyncConfig): DataSyncConfig => {
|
||||
if (config.storageMode !== 'local') return config;
|
||||
|
||||
const nextConfig: DataSyncConfig = {
|
||||
...config,
|
||||
remoteServerUrl: config.remoteServerUrl || OFFICIAL_CLOUD_SERVER,
|
||||
storageMode: 'cloud',
|
||||
};
|
||||
|
||||
this.app.storeManager.set('dataSyncConfig', nextConfig);
|
||||
|
||||
return nextConfig;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get remote server configuration
|
||||
*/
|
||||
|
|
@ -54,12 +72,13 @@ export default class RemoteServerConfigCtr extends ControllerModule {
|
|||
const { storeManager } = this.app;
|
||||
|
||||
const config: DataSyncConfig = storeManager.get('dataSyncConfig');
|
||||
const normalized = this.normalizeConfig(config);
|
||||
|
||||
logger.debug(
|
||||
`Remote server config: active=${config.active}, storageMode=${config.storageMode}, url=${config.remoteServerUrl}`,
|
||||
`Remote server config: active=${normalized.active}, storageMode=${normalized.storageMode}, url=${normalized.remoteServerUrl}`,
|
||||
);
|
||||
|
||||
return config;
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -73,8 +92,9 @@ export default class RemoteServerConfigCtr extends ControllerModule {
|
|||
const { storeManager } = this.app;
|
||||
const prev: DataSyncConfig = storeManager.get('dataSyncConfig');
|
||||
|
||||
// Save configuration
|
||||
storeManager.set('dataSyncConfig', { ...prev, ...config });
|
||||
// Save configuration with legacy local storage fallback
|
||||
const merged = this.normalizeConfig({ ...prev, ...config });
|
||||
storeManager.set('dataSyncConfig', merged);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
@ -88,7 +108,7 @@ export default class RemoteServerConfigCtr extends ControllerModule {
|
|||
const { storeManager } = this.app;
|
||||
|
||||
// Clear instance configuration
|
||||
storeManager.set('dataSyncConfig', { storageMode: 'local' });
|
||||
storeManager.set('dataSyncConfig', { active: false, storageMode: 'cloud' });
|
||||
|
||||
// Clear tokens (if any)
|
||||
await this.clearTokens();
|
||||
|
|
@ -468,7 +488,7 @@ export default class RemoteServerConfigCtr extends ControllerModule {
|
|||
}
|
||||
|
||||
async getRemoteServerUrl(config?: DataSyncConfig) {
|
||||
const dataConfig = config ? config : await this.getRemoteServerConfig();
|
||||
const dataConfig = this.normalizeConfig(config ? config : await this.getRemoteServerConfig());
|
||||
|
||||
return dataConfig.storageMode === 'cloud' ? OFFICIAL_CLOUD_SERVER : dataConfig.remoteServerUrl;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,4 @@
|
|||
import {
|
||||
ProxyTRPCRequestParams,
|
||||
ProxyTRPCRequestResult,
|
||||
ProxyTRPCStreamRequestParams,
|
||||
} from '@lobechat/electron-client-ipc';
|
||||
import { ProxyTRPCStreamRequestParams } from '@lobechat/electron-client-ipc';
|
||||
import { IpcMainEvent, WebContents, ipcMain } from 'electron';
|
||||
import { HttpProxyAgent } from 'http-proxy-agent';
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
||||
|
|
@ -15,7 +11,7 @@ import { defaultProxySettings } from '@/const/store';
|
|||
import { createLogger } from '@/utils/logger';
|
||||
|
||||
import RemoteServerConfigCtr from './RemoteServerConfigCtr';
|
||||
import { ControllerModule, IpcMethod } from './index';
|
||||
import { ControllerModule } from './index';
|
||||
|
||||
// Create logger
|
||||
const logger = createLogger('controllers:RemoteServerSyncCtr');
|
||||
|
|
@ -174,129 +170,12 @@ export default class RemoteServerSyncCtr extends ControllerModule {
|
|||
});
|
||||
|
||||
if (requestBody) {
|
||||
clientReq.write(Buffer.from(requestBody));
|
||||
clientReq.write(Buffer.from(requestBody as string));
|
||||
}
|
||||
|
||||
clientReq.end();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to perform the actual request forwarding to the remote server.
|
||||
* Accepts arguments from IPC and returns response details.
|
||||
*/
|
||||
private async forwardRequest(args: {
|
||||
accessToken: string | null;
|
||||
body?: string | ArrayBuffer;
|
||||
headers: Record<string, string>;
|
||||
method: string;
|
||||
remoteServerUrl: string;
|
||||
urlPath: string; // Pass the base URL
|
||||
}): Promise<{
|
||||
// Node headers type
|
||||
body: Buffer;
|
||||
headers: Record<string, string | string[] | undefined>;
|
||||
status: number;
|
||||
statusText: string; // Return body as Buffer
|
||||
}> {
|
||||
const {
|
||||
urlPath,
|
||||
method,
|
||||
headers: originalHeaders,
|
||||
body: requestBody,
|
||||
accessToken,
|
||||
remoteServerUrl,
|
||||
} = args;
|
||||
|
||||
const pathname = new URL(urlPath, remoteServerUrl).pathname; // Extract pathname from URL
|
||||
const logPrefix = `[ForwardRequest ${method} ${pathname}]`; // Add prefix for easier correlation
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error(`${logPrefix} No access token provided`); // Enhanced log
|
||||
return {
|
||||
body: Buffer.from(''),
|
||||
headers: {},
|
||||
status: 401,
|
||||
statusText: 'Authentication required, missing token',
|
||||
};
|
||||
}
|
||||
|
||||
// 1. Determine target URL and prepare request options
|
||||
const targetUrl = new URL(urlPath, remoteServerUrl); // Combine base URL and path
|
||||
const { requestOptions, requester } = this.createRequester({
|
||||
accessToken,
|
||||
headers: originalHeaders,
|
||||
method,
|
||||
url: targetUrl,
|
||||
});
|
||||
|
||||
// 2. Make the request and capture response
|
||||
return new Promise((resolve) => {
|
||||
const clientReq = requester.request(requestOptions, (clientRes: IncomingMessage) => {
|
||||
const chunks: Buffer[] = [];
|
||||
clientRes.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
clientRes.on('end', () => {
|
||||
const responseBody = Buffer.concat(chunks);
|
||||
resolve({
|
||||
// These are IncomingHttpHeaders
|
||||
body: responseBody,
|
||||
|
||||
headers: clientRes.headers,
|
||||
|
||||
status: clientRes.statusCode || 500,
|
||||
statusText: clientRes.statusMessage || 'Unknown Status',
|
||||
});
|
||||
});
|
||||
|
||||
clientRes.on('error', (error) => {
|
||||
// Error during response streaming
|
||||
logger.error(
|
||||
`${logPrefix} Error reading response stream from ${targetUrl.toString()}:`,
|
||||
error,
|
||||
); // Enhanced log
|
||||
// Rejecting might be better, but we need to resolve the outer promise for proxyTRPCRequest
|
||||
resolve({
|
||||
body: Buffer.from(`Error reading response stream: ${error.message}`),
|
||||
headers: {},
|
||||
|
||||
status: 502,
|
||||
// Bad Gateway
|
||||
statusText: 'Error reading response stream',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
clientReq.on('error', (error) => {
|
||||
logger.error(`${logPrefix} Error forwarding request to ${targetUrl.toString()}:`, error); // Enhanced log
|
||||
// Reject or resolve with error status for the outer promise
|
||||
resolve({
|
||||
body: Buffer.from(`Error forwarding request: ${error.message}`),
|
||||
headers: {},
|
||||
|
||||
status: 502,
|
||||
// Bad Gateway
|
||||
statusText: 'Error forwarding request',
|
||||
});
|
||||
});
|
||||
|
||||
// 3. Send request body if present
|
||||
if (requestBody) {
|
||||
if (typeof requestBody === 'string') {
|
||||
clientReq.write(requestBody, 'utf8'); // Specify encoding for strings
|
||||
} else if (requestBody instanceof ArrayBuffer) {
|
||||
clientReq.write(Buffer.from(requestBody)); // Convert ArrayBuffer to Buffer
|
||||
} else {
|
||||
// Should not happen based on type, but handle defensively
|
||||
logger.warn(`${logPrefix} Unsupported request body type received:`, typeof requestBody); // Enhanced log
|
||||
}
|
||||
}
|
||||
|
||||
clientReq.end(); // Finalize the request
|
||||
});
|
||||
}
|
||||
|
||||
private createRequester({
|
||||
headers,
|
||||
accessToken,
|
||||
|
|
@ -341,144 +220,4 @@ export default class RemoteServerSyncCtr extends ControllerModule {
|
|||
const requester = url.protocol === 'https:' ? https : http;
|
||||
return { requestOptions, requester };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the 'proxy-trpc-request' IPC call from the renderer process.
|
||||
* This method should be invoked by the ipcMain.handle setup in your main process entry point.
|
||||
*/
|
||||
@IpcMethod()
|
||||
public async proxyTRPCRequest(args: ProxyTRPCRequestParams): Promise<ProxyTRPCRequestResult> {
|
||||
logger.debug('Received proxyTRPCRequest IPC call:', {
|
||||
headers: args.headers,
|
||||
method: args.method,
|
||||
urlPath: args.urlPath, // Log headers too for context
|
||||
});
|
||||
|
||||
const url = new URL(args.urlPath, 'http://a.b');
|
||||
const logPrefix = `[ProxyTRPC ${args.method} ${url.pathname}]`; // Prefix for this specific request
|
||||
|
||||
try {
|
||||
const config = await this.remoteServerConfigCtr.getRemoteServerConfig();
|
||||
if (!config.active || (config.storageMode === 'selfHost' && !config.remoteServerUrl)) {
|
||||
logger.warn(
|
||||
`${logPrefix} Remote server sync not active or configured. Rejecting proxy request.`,
|
||||
); // Enhanced log
|
||||
return {
|
||||
body: Buffer.from('Remote server sync not active or configured').buffer,
|
||||
headers: {},
|
||||
|
||||
status: 503,
|
||||
// Service Unavailable
|
||||
statusText: 'Remote server sync not active or configured', // Return ArrayBuffer
|
||||
};
|
||||
}
|
||||
const remoteServerUrl = await this.remoteServerConfigCtr.getRemoteServerUrl();
|
||||
|
||||
// Get initial token
|
||||
let token = await this.remoteServerConfigCtr.getAccessToken();
|
||||
logger.debug(
|
||||
`${logPrefix} Initial token check: ${token ? 'Token exists' : 'No token found'}`,
|
||||
); // Added log
|
||||
|
||||
logger.info(`${logPrefix} Attempting to forward request...`); // Added log
|
||||
let response = await this.forwardRequest({ ...args, accessToken: token, remoteServerUrl });
|
||||
|
||||
// Handle 401: Refresh token and retry if necessary
|
||||
if (response.status === 401) {
|
||||
logger.info(`${logPrefix} Received 401 from forwarded request. Attempting token refresh.`); // Enhanced log
|
||||
const refreshed = await this.refreshTokenIfNeeded(logPrefix); // Pass prefix for context
|
||||
|
||||
if (refreshed) {
|
||||
const newToken = await this.remoteServerConfigCtr.getAccessToken();
|
||||
if (newToken) {
|
||||
logger.info(`${logPrefix} Token refreshed successfully, retrying the request.`); // Enhanced log
|
||||
response = await this.forwardRequest({
|
||||
...args,
|
||||
accessToken: newToken,
|
||||
remoteServerUrl,
|
||||
});
|
||||
} else {
|
||||
logger.error(
|
||||
`${logPrefix} Token refresh reported success, but failed to retrieve new token. Keeping original 401 response.`,
|
||||
); // Enhanced log
|
||||
// Keep the original 401 response
|
||||
}
|
||||
} else {
|
||||
logger.error(`${logPrefix} Token refresh failed. Keeping original 401 response.`); // Enhanced log
|
||||
// Keep the original 401 response
|
||||
}
|
||||
}
|
||||
|
||||
// Convert headers and body to format defined in IPC event
|
||||
const responseHeaders: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(response.headers)) {
|
||||
if (value !== undefined) {
|
||||
responseHeaders[key.toLowerCase()] = Array.isArray(value) ? value.join(', ') : value;
|
||||
}
|
||||
}
|
||||
|
||||
// Return the final response, ensuring body is serializable (string or ArrayBuffer)
|
||||
const responseBody = response.body; // Buffer
|
||||
|
||||
// IMPORTANT: Check IPC limits. Large bodies might fail. Consider chunking if needed.
|
||||
// Convert Buffer to ArrayBuffer for IPC
|
||||
const finalBody = responseBody.buffer.slice(
|
||||
responseBody.byteOffset,
|
||||
responseBody.byteOffset + responseBody.byteLength,
|
||||
);
|
||||
|
||||
logger.debug(`${logPrefix} Forwarding successful. Status: ${response.status}`); // Added log
|
||||
return {
|
||||
body: finalBody as ArrayBuffer,
|
||||
headers: responseHeaders,
|
||||
status: response.status,
|
||||
statusText: response.statusText, // Return ArrayBuffer
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${logPrefix} Unhandled error processing proxyTRPCRequest:`, error); // Enhanced log
|
||||
// Ensure a serializable error response is returned
|
||||
return {
|
||||
body: Buffer.from(
|
||||
`Internal Server Error: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
).buffer,
|
||||
headers: {},
|
||||
status: 500,
|
||||
statusText: 'Internal Server Error during proxy', // Return ArrayBuffer
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to refresh the access token by calling the RemoteServerConfigCtr.
|
||||
* @returns Whether token refresh was successful
|
||||
*/
|
||||
private async refreshTokenIfNeeded(callerLogPrefix: string = '[RefreshToken]'): Promise<boolean> {
|
||||
// Added prefix parameter
|
||||
const logPrefix = `${callerLogPrefix} [RefreshTrigger]`; // Updated prefix
|
||||
logger.debug(`${logPrefix} Entered refreshTokenIfNeeded.`);
|
||||
|
||||
try {
|
||||
logger.info(`${logPrefix} Triggering refreshAccessToken in RemoteServerConfigCtr.`);
|
||||
const result = await this.remoteServerConfigCtr.refreshAccessToken();
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`${logPrefix} refreshAccessToken call completed successfully.`);
|
||||
return true;
|
||||
} else {
|
||||
logger.error(`${logPrefix} refreshAccessToken call failed: ${result.error}`);
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`${logPrefix} Exception occurred while calling refreshAccessToken:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up resources - No protocol handler to unregister anymore
|
||||
*/
|
||||
destroy() {
|
||||
logger.info('Destroying RemoteServerSyncCtr');
|
||||
// Nothing specific to clean up here regarding request handling now
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -51,15 +51,44 @@ export default class SystemController extends ControllerModule {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查可用性
|
||||
*/
|
||||
@IpcMethod()
|
||||
checkAccessibilityForMacOS() {
|
||||
if (!macOS()) return;
|
||||
requestAccessibilityAccess() {
|
||||
if (!macOS()) return true;
|
||||
return systemPreferences.isTrustedAccessibilityClient(true);
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
getAccessibilityStatus() {
|
||||
if (!macOS()) return true;
|
||||
return systemPreferences.isTrustedAccessibilityClient(false);
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async getMediaAccessStatus(mediaType: 'microphone' | 'screen'): Promise<string> {
|
||||
if (!macOS()) return 'granted';
|
||||
return systemPreferences.getMediaAccessStatus(mediaType);
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async requestMicrophoneAccess(): Promise<boolean> {
|
||||
if (!macOS()) return true;
|
||||
return systemPreferences.askForMediaAccess('microphone');
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async requestScreenAccess(): Promise<void> {
|
||||
if (!macOS()) return;
|
||||
shell.openExternal(
|
||||
'x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture',
|
||||
);
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
openFullDiskAccessSettings() {
|
||||
if (!macOS()) return;
|
||||
shell.openExternal('x-apple.systempreferences:com.apple.preference.security?Privacy_AllFiles');
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
openExternalLink(url: string) {
|
||||
return shell.openExternal(url);
|
||||
|
|
@ -87,6 +116,19 @@ export default class SystemController extends ControllerModule {
|
|||
|
||||
// Apply visual effects to all browser windows when theme mode changes
|
||||
this.app.browserManager.handleAppThemeChange();
|
||||
// Set app theme mode to the system theme mode
|
||||
|
||||
this.setSystemThemeMode(themeMode);
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async getSystemThemeMode() {
|
||||
return nativeTheme.themeSource;
|
||||
}
|
||||
|
||||
@IpcMethod()
|
||||
async setSystemThemeMode(themeMode: ThemeMode) {
|
||||
nativeTheme.themeSource = themeMode === 'auto' ? 'system' : themeMode;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,38 +0,0 @@
|
|||
import { readFileSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { DB_SCHEMA_HASH_FILENAME, LOCAL_DATABASE_DIR, userDataDir } from '@/const/dir';
|
||||
|
||||
import { ControllerModule, IpcServerMethod } from './index';
|
||||
|
||||
export default class SystemServerCtr extends ControllerModule {
|
||||
static override readonly groupName = 'system';
|
||||
|
||||
@IpcServerMethod()
|
||||
async getDatabasePath() {
|
||||
return join(this.app.appStoragePath, LOCAL_DATABASE_DIR);
|
||||
}
|
||||
|
||||
@IpcServerMethod()
|
||||
async getDatabaseSchemaHash() {
|
||||
try {
|
||||
return readFileSync(this.DB_SCHEMA_HASH_PATH, 'utf8');
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@IpcServerMethod()
|
||||
async getUserDataPath() {
|
||||
return userDataDir;
|
||||
}
|
||||
|
||||
@IpcServerMethod()
|
||||
async setDatabaseSchemaHash(hash: string) {
|
||||
writeFileSync(this.DB_SCHEMA_HASH_PATH, hash, 'utf8');
|
||||
}
|
||||
|
||||
private get DB_SCHEMA_HASH_PATH() {
|
||||
return join(this.app.appStoragePath, DB_SCHEMA_HASH_FILENAME);
|
||||
}
|
||||
}
|
||||
|
|
@ -22,6 +22,7 @@ vi.mock('electron', () => ({
|
|||
const mockToggleVisible = vi.fn();
|
||||
const mockLoadUrl = vi.fn();
|
||||
const mockShow = vi.fn();
|
||||
const mockBroadcast = vi.fn();
|
||||
const mockRedirectToPage = vi.fn();
|
||||
const mockCloseWindow = vi.fn();
|
||||
const mockMinimizeWindow = vi.fn();
|
||||
|
|
@ -34,6 +35,7 @@ const mockGetMainWindow = vi.fn(() => ({
|
|||
toggleVisible: mockToggleVisible,
|
||||
loadUrl: mockLoadUrl,
|
||||
show: mockShow,
|
||||
broadcast: mockBroadcast,
|
||||
}));
|
||||
const mockShowOther = vi.fn();
|
||||
|
||||
|
|
@ -81,19 +83,23 @@ describe('BrowserWindowsCtr', () => {
|
|||
});
|
||||
|
||||
describe('openSettingsWindow', () => {
|
||||
it('should navigate to settings in main window with the specified tab', async () => {
|
||||
const tab = 'appearance';
|
||||
const result = await browserWindowsCtr.openSettingsWindow(tab);
|
||||
it('should navigate to settings in main window with the specified path', async () => {
|
||||
const path = '/settings/common';
|
||||
const result = await browserWindowsCtr.openSettingsWindow({ path });
|
||||
expect(mockGetMainWindow).toHaveBeenCalled();
|
||||
expect(mockLoadUrl).toHaveBeenCalledWith('/settings?active=appearance');
|
||||
expect(mockShow).toHaveBeenCalled();
|
||||
expect(mockBroadcast).toHaveBeenCalledWith('navigate', {
|
||||
path: '/settings/common',
|
||||
});
|
||||
expect(result).toEqual({ success: true });
|
||||
});
|
||||
|
||||
it('should return error if navigation fails', async () => {
|
||||
const errorMessage = 'Failed to navigate';
|
||||
mockLoadUrl.mockRejectedValueOnce(new Error(errorMessage));
|
||||
const result = await browserWindowsCtr.openSettingsWindow('display');
|
||||
mockBroadcast.mockImplementationOnce(() => {
|
||||
throw new Error(errorMessage);
|
||||
});
|
||||
const result = await browserWindowsCtr.openSettingsWindow({ path: '/settings/common' });
|
||||
expect(result).toEqual({ error: errorMessage, success: false });
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -105,7 +105,10 @@ describe('RemoteServerConfigCtr', () => {
|
|||
const result = await controller.clearRemoteServerConfig();
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockStoreManager.set).toHaveBeenCalledWith('dataSyncConfig', { storageMode: 'local' });
|
||||
expect(mockStoreManager.set).toHaveBeenCalledWith('dataSyncConfig', {
|
||||
active: false,
|
||||
storageMode: 'cloud',
|
||||
});
|
||||
expect(mockStoreManager.delete).toHaveBeenCalledWith('encryptedTokens');
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,373 +0,0 @@
|
|||
import { ProxyTRPCRequestParams } from '@lobechat/electron-client-ipc';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import type { App } from '@/core/App';
|
||||
|
||||
import RemoteServerSyncCtr from '../RemoteServerSyncCtr';
|
||||
|
||||
// Mock logger
|
||||
vi.mock('@/utils/logger', () => ({
|
||||
createLogger: () => ({
|
||||
debug: vi.fn(),
|
||||
error: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock electron
|
||||
vi.mock('electron', () => ({
|
||||
app: {
|
||||
getAppPath: vi.fn(() => '/mock/app/path'),
|
||||
getPath: vi.fn(() => '/mock/user/data'),
|
||||
},
|
||||
ipcMain: {
|
||||
handle: vi.fn(),
|
||||
on: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock electron-is
|
||||
vi.mock('electron-is', () => ({
|
||||
dev: vi.fn(() => false),
|
||||
linux: vi.fn(() => false),
|
||||
macOS: vi.fn(() => false),
|
||||
windows: vi.fn(() => false),
|
||||
}));
|
||||
|
||||
// Mock http and https modules
|
||||
vi.mock('node:http', () => ({
|
||||
default: {
|
||||
request: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('node:https', () => ({
|
||||
default: {
|
||||
request: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock proxy agents
|
||||
vi.mock('http-proxy-agent', () => ({
|
||||
HttpProxyAgent: vi.fn().mockImplementation(() => ({})),
|
||||
}));
|
||||
|
||||
vi.mock('https-proxy-agent', () => ({
|
||||
HttpsProxyAgent: vi.fn().mockImplementation(() => ({})),
|
||||
}));
|
||||
|
||||
// Mock RemoteServerConfigCtr
|
||||
const mockRemoteServerConfigCtr = {
|
||||
getRemoteServerConfig: vi.fn(),
|
||||
getRemoteServerUrl: vi.fn(),
|
||||
getAccessToken: vi.fn(),
|
||||
refreshAccessToken: vi.fn(),
|
||||
};
|
||||
|
||||
const mockStoreManager = {
|
||||
get: vi.fn().mockReturnValue({
|
||||
enableProxy: false,
|
||||
proxyServer: '',
|
||||
proxyPort: '',
|
||||
proxyType: 'http',
|
||||
}),
|
||||
};
|
||||
|
||||
const mockApp = {
|
||||
getController: vi.fn(() => mockRemoteServerConfigCtr),
|
||||
storeManager: mockStoreManager,
|
||||
} as unknown as App;
|
||||
|
||||
describe('RemoteServerSyncCtr', () => {
|
||||
let controller: RemoteServerSyncCtr;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
controller = new RemoteServerSyncCtr(mockApp);
|
||||
});
|
||||
|
||||
describe('proxyTRPCRequest', () => {
|
||||
const baseParams: ProxyTRPCRequestParams = {
|
||||
urlPath: '/trpc/test.query',
|
||||
method: 'GET',
|
||||
headers: { 'content-type': 'application/json' },
|
||||
};
|
||||
|
||||
it('should return 503 when remote server sync is not active', async () => {
|
||||
mockRemoteServerConfigCtr.getRemoteServerConfig.mockResolvedValue({
|
||||
active: false,
|
||||
storageMode: 'cloud',
|
||||
});
|
||||
|
||||
const result = await controller.proxyTRPCRequest(baseParams);
|
||||
|
||||
expect(result.status).toBe(503);
|
||||
expect(result.statusText).toBe('Remote server sync not active or configured');
|
||||
});
|
||||
|
||||
it('should return 503 when selfHost mode without remoteServerUrl', async () => {
|
||||
mockRemoteServerConfigCtr.getRemoteServerConfig.mockResolvedValue({
|
||||
active: true,
|
||||
storageMode: 'selfHost',
|
||||
remoteServerUrl: '',
|
||||
});
|
||||
|
||||
const result = await controller.proxyTRPCRequest(baseParams);
|
||||
|
||||
expect(result.status).toBe(503);
|
||||
expect(result.statusText).toBe('Remote server sync not active or configured');
|
||||
});
|
||||
|
||||
it('should return 401 when no access token is available', async () => {
|
||||
mockRemoteServerConfigCtr.getRemoteServerConfig.mockResolvedValue({
|
||||
active: true,
|
||||
storageMode: 'cloud',
|
||||
});
|
||||
mockRemoteServerConfigCtr.getRemoteServerUrl.mockResolvedValue('https://api.example.com');
|
||||
mockRemoteServerConfigCtr.getAccessToken.mockResolvedValue(null);
|
||||
|
||||
// Mock https.request to simulate the forwardRequest behavior
|
||||
const https = await import('node:https');
|
||||
const mockRequest = vi.fn().mockImplementation((options, callback) => {
|
||||
// Simulate response
|
||||
const mockResponse = {
|
||||
statusCode: 401,
|
||||
statusMessage: 'Authentication required, missing token',
|
||||
headers: {},
|
||||
on: vi.fn((event, handler) => {
|
||||
if (event === 'data') {
|
||||
handler(Buffer.from(''));
|
||||
}
|
||||
if (event === 'end') {
|
||||
handler();
|
||||
}
|
||||
}),
|
||||
};
|
||||
callback(mockResponse);
|
||||
return {
|
||||
on: vi.fn(),
|
||||
write: vi.fn(),
|
||||
end: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mocked(https.default.request).mockImplementation(mockRequest);
|
||||
|
||||
const result = await controller.proxyTRPCRequest(baseParams);
|
||||
|
||||
expect(result.status).toBe(401);
|
||||
});
|
||||
|
||||
it('should forward request successfully when configured properly', async () => {
|
||||
mockRemoteServerConfigCtr.getRemoteServerConfig.mockResolvedValue({
|
||||
active: true,
|
||||
storageMode: 'cloud',
|
||||
});
|
||||
mockRemoteServerConfigCtr.getRemoteServerUrl.mockResolvedValue('https://api.example.com');
|
||||
mockRemoteServerConfigCtr.getAccessToken.mockResolvedValue('valid-token');
|
||||
|
||||
const https = await import('node:https');
|
||||
const mockRequest = vi.fn().mockImplementation((options, callback) => {
|
||||
const mockResponse = {
|
||||
statusCode: 200,
|
||||
statusMessage: 'OK',
|
||||
headers: { 'content-type': 'application/json' },
|
||||
on: vi.fn((event, handler) => {
|
||||
if (event === 'data') {
|
||||
handler(Buffer.from('{"success":true}'));
|
||||
}
|
||||
if (event === 'end') {
|
||||
handler();
|
||||
}
|
||||
}),
|
||||
};
|
||||
callback(mockResponse);
|
||||
return {
|
||||
on: vi.fn(),
|
||||
write: vi.fn(),
|
||||
end: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mocked(https.default.request).mockImplementation(mockRequest);
|
||||
|
||||
const result = await controller.proxyTRPCRequest(baseParams);
|
||||
|
||||
expect(result.status).toBe(200);
|
||||
expect(result.statusText).toBe('OK');
|
||||
});
|
||||
|
||||
it('should retry request after token refresh on 401', async () => {
|
||||
mockRemoteServerConfigCtr.getRemoteServerConfig.mockResolvedValue({
|
||||
active: true,
|
||||
storageMode: 'cloud',
|
||||
});
|
||||
mockRemoteServerConfigCtr.getRemoteServerUrl.mockResolvedValue('https://api.example.com');
|
||||
mockRemoteServerConfigCtr.getAccessToken
|
||||
.mockResolvedValueOnce('expired-token')
|
||||
.mockResolvedValueOnce('new-valid-token');
|
||||
mockRemoteServerConfigCtr.refreshAccessToken.mockResolvedValue({ success: true });
|
||||
|
||||
const https = await import('node:https');
|
||||
let callCount = 0;
|
||||
const mockRequest = vi.fn().mockImplementation((options, callback) => {
|
||||
callCount++;
|
||||
const mockResponse = {
|
||||
statusCode: callCount === 1 ? 401 : 200,
|
||||
statusMessage: callCount === 1 ? 'Unauthorized' : 'OK',
|
||||
headers: { 'content-type': 'application/json' },
|
||||
on: vi.fn((event, handler) => {
|
||||
if (event === 'data') {
|
||||
handler(Buffer.from(callCount === 1 ? '' : '{"success":true}'));
|
||||
}
|
||||
if (event === 'end') {
|
||||
handler();
|
||||
}
|
||||
}),
|
||||
};
|
||||
callback(mockResponse);
|
||||
return {
|
||||
on: vi.fn(),
|
||||
write: vi.fn(),
|
||||
end: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mocked(https.default.request).mockImplementation(mockRequest);
|
||||
|
||||
const result = await controller.proxyTRPCRequest(baseParams);
|
||||
|
||||
expect(mockRemoteServerConfigCtr.refreshAccessToken).toHaveBeenCalled();
|
||||
expect(result.status).toBe(200);
|
||||
});
|
||||
|
||||
it('should keep 401 response when token refresh fails', async () => {
|
||||
mockRemoteServerConfigCtr.getRemoteServerConfig.mockResolvedValue({
|
||||
active: true,
|
||||
storageMode: 'cloud',
|
||||
});
|
||||
mockRemoteServerConfigCtr.getRemoteServerUrl.mockResolvedValue('https://api.example.com');
|
||||
mockRemoteServerConfigCtr.getAccessToken.mockResolvedValue('expired-token');
|
||||
mockRemoteServerConfigCtr.refreshAccessToken.mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Refresh failed',
|
||||
});
|
||||
|
||||
const https = await import('node:https');
|
||||
const mockRequest = vi.fn().mockImplementation((options, callback) => {
|
||||
const mockResponse = {
|
||||
statusCode: 401,
|
||||
statusMessage: 'Unauthorized',
|
||||
headers: {},
|
||||
on: vi.fn((event, handler) => {
|
||||
if (event === 'data') {
|
||||
handler(Buffer.from(''));
|
||||
}
|
||||
if (event === 'end') {
|
||||
handler();
|
||||
}
|
||||
}),
|
||||
};
|
||||
callback(mockResponse);
|
||||
return {
|
||||
on: vi.fn(),
|
||||
write: vi.fn(),
|
||||
end: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mocked(https.default.request).mockImplementation(mockRequest);
|
||||
|
||||
const result = await controller.proxyTRPCRequest(baseParams);
|
||||
|
||||
expect(mockRemoteServerConfigCtr.refreshAccessToken).toHaveBeenCalled();
|
||||
expect(result.status).toBe(401);
|
||||
});
|
||||
|
||||
it('should handle request error gracefully', async () => {
|
||||
mockRemoteServerConfigCtr.getRemoteServerConfig.mockResolvedValue({
|
||||
active: true,
|
||||
storageMode: 'cloud',
|
||||
});
|
||||
mockRemoteServerConfigCtr.getRemoteServerUrl.mockResolvedValue('https://api.example.com');
|
||||
mockRemoteServerConfigCtr.getAccessToken.mockResolvedValue('valid-token');
|
||||
|
||||
const https = await import('node:https');
|
||||
const mockRequest = vi.fn().mockImplementation((options, callback) => {
|
||||
return {
|
||||
on: vi.fn((event, handler) => {
|
||||
if (event === 'error') {
|
||||
handler(new Error('Network error'));
|
||||
}
|
||||
}),
|
||||
write: vi.fn(),
|
||||
end: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mocked(https.default.request).mockImplementation(mockRequest);
|
||||
|
||||
const result = await controller.proxyTRPCRequest(baseParams);
|
||||
|
||||
expect(result.status).toBe(502);
|
||||
expect(result.statusText).toBe('Error forwarding request');
|
||||
});
|
||||
|
||||
it('should include request body when provided', async () => {
|
||||
mockRemoteServerConfigCtr.getRemoteServerConfig.mockResolvedValue({
|
||||
active: true,
|
||||
storageMode: 'cloud',
|
||||
});
|
||||
mockRemoteServerConfigCtr.getRemoteServerUrl.mockResolvedValue('https://api.example.com');
|
||||
mockRemoteServerConfigCtr.getAccessToken.mockResolvedValue('valid-token');
|
||||
|
||||
const https = await import('node:https');
|
||||
const mockWrite = vi.fn();
|
||||
const mockRequest = vi.fn().mockImplementation((options, callback) => {
|
||||
const mockResponse = {
|
||||
statusCode: 200,
|
||||
statusMessage: 'OK',
|
||||
headers: {},
|
||||
on: vi.fn((event, handler) => {
|
||||
if (event === 'data') {
|
||||
handler(Buffer.from('{"success":true}'));
|
||||
}
|
||||
if (event === 'end') {
|
||||
handler();
|
||||
}
|
||||
}),
|
||||
};
|
||||
callback(mockResponse);
|
||||
return {
|
||||
on: vi.fn(),
|
||||
write: mockWrite,
|
||||
end: vi.fn(),
|
||||
};
|
||||
});
|
||||
vi.mocked(https.default.request).mockImplementation(mockRequest);
|
||||
|
||||
const paramsWithBody: ProxyTRPCRequestParams = {
|
||||
...baseParams,
|
||||
method: 'POST',
|
||||
body: '{"data":"test"}',
|
||||
};
|
||||
|
||||
await controller.proxyTRPCRequest(paramsWithBody);
|
||||
|
||||
expect(mockWrite).toHaveBeenCalledWith('{"data":"test"}', 'utf8');
|
||||
});
|
||||
});
|
||||
|
||||
describe('afterAppReady', () => {
|
||||
it('should register stream:start IPC handler', async () => {
|
||||
const { ipcMain } = await import('electron');
|
||||
|
||||
controller.afterAppReady();
|
||||
|
||||
expect(ipcMain.on).toHaveBeenCalledWith('stream:start', expect.any(Function));
|
||||
});
|
||||
});
|
||||
|
||||
describe('destroy', () => {
|
||||
it('should clean up resources', () => {
|
||||
// destroy method doesn't throw
|
||||
expect(() => controller.destroy()).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -139,22 +139,24 @@ describe('SystemController', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('checkAccessibilityForMacOS', () => {
|
||||
it('should check accessibility on macOS', async () => {
|
||||
describe('accessibility', () => {
|
||||
it('should request accessibility access on macOS', async () => {
|
||||
const { systemPreferences } = await import('electron');
|
||||
|
||||
await invokeIpc('system.checkAccessibilityForMacOS');
|
||||
await invokeIpc('system.requestAccessibilityAccess');
|
||||
|
||||
expect(systemPreferences.isTrustedAccessibilityClient).toHaveBeenCalledWith(true);
|
||||
});
|
||||
|
||||
it('should return undefined on non-macOS', async () => {
|
||||
it('should return true on non-macOS when requesting accessibility access', async () => {
|
||||
const { macOS } = await import('electron-is');
|
||||
const { systemPreferences } = await import('electron');
|
||||
vi.mocked(macOS).mockReturnValue(false);
|
||||
|
||||
const result = await invokeIpc('system.checkAccessibilityForMacOS');
|
||||
const result = await invokeIpc('system.requestAccessibilityAccess');
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(result).toBe(true);
|
||||
expect(systemPreferences.isTrustedAccessibilityClient).not.toHaveBeenCalled();
|
||||
|
||||
// Reset
|
||||
vi.mocked(macOS).mockReturnValue(true);
|
||||
|
|
|
|||
|
|
@ -1,75 +0,0 @@
|
|||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import type { App } from '@/core/App';
|
||||
|
||||
import SystemServerCtr from '../SystemServerCtr';
|
||||
|
||||
vi.mock('@/utils/logger', () => ({
|
||||
createLogger: () => ({
|
||||
debug: vi.fn(),
|
||||
error: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('node:fs', () => ({
|
||||
readFileSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('@/const/dir', () => ({
|
||||
DB_SCHEMA_HASH_FILENAME: 'db-schema-hash.txt',
|
||||
LOCAL_DATABASE_DIR: 'database',
|
||||
userDataDir: '/mock/user/data',
|
||||
}));
|
||||
|
||||
const mockApp = {
|
||||
appStoragePath: '/mock/storage',
|
||||
} as unknown as App;
|
||||
|
||||
describe('SystemServerCtr', () => {
|
||||
let controller: SystemServerCtr;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
controller = new SystemServerCtr(mockApp);
|
||||
});
|
||||
|
||||
it('returns database path', async () => {
|
||||
await expect(controller.getDatabasePath()).resolves.toBe('/mock/storage/database');
|
||||
});
|
||||
|
||||
it('reads schema hash when file exists', async () => {
|
||||
const { readFileSync } = await import('node:fs');
|
||||
vi.mocked(readFileSync).mockReturnValue('hash123');
|
||||
|
||||
await expect(controller.getDatabaseSchemaHash()).resolves.toBe('hash123');
|
||||
expect(readFileSync).toHaveBeenCalledWith('/mock/storage/db-schema-hash.txt', 'utf8');
|
||||
});
|
||||
|
||||
it('returns undefined when schema hash file missing', async () => {
|
||||
const { readFileSync } = await import('node:fs');
|
||||
vi.mocked(readFileSync).mockImplementation(() => {
|
||||
throw new Error('missing');
|
||||
});
|
||||
|
||||
await expect(controller.getDatabaseSchemaHash()).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns user data path', async () => {
|
||||
await expect(controller.getUserDataPath()).resolves.toBe('/mock/user/data');
|
||||
});
|
||||
|
||||
it('writes schema hash to disk', async () => {
|
||||
const { writeFileSync } = await import('node:fs');
|
||||
|
||||
await controller.setDatabaseSchemaHash('newhash');
|
||||
|
||||
expect(writeFileSync).toHaveBeenCalledWith(
|
||||
'/mock/storage/db-schema-hash.txt',
|
||||
'newhash',
|
||||
'utf8',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
@ -4,6 +4,7 @@ import AuthCtr from './AuthCtr';
|
|||
import BrowserWindowsCtr from './BrowserWindowsCtr';
|
||||
import DevtoolsCtr from './DevtoolsCtr';
|
||||
import LocalFileCtr from './LocalFileCtr';
|
||||
import McpCtr from './McpCtr';
|
||||
import McpInstallCtr from './McpInstallCtr';
|
||||
import MenuController from './MenuCtr';
|
||||
import NetworkProxyCtr from './NetworkProxyCtr';
|
||||
|
|
@ -13,7 +14,6 @@ import RemoteServerSyncCtr from './RemoteServerSyncCtr';
|
|||
import ShellCommandCtr from './ShellCommandCtr';
|
||||
import ShortcutController from './ShortcutCtr';
|
||||
import SystemController from './SystemCtr';
|
||||
import SystemServerCtr from './SystemServerCtr';
|
||||
import TrayMenuCtr from './TrayMenuCtr';
|
||||
import UpdaterCtr from './UpdaterCtr';
|
||||
import UploadFileCtr from './UploadFileCtr';
|
||||
|
|
@ -24,6 +24,7 @@ export const controllerIpcConstructors = [
|
|||
BrowserWindowsCtr,
|
||||
DevtoolsCtr,
|
||||
LocalFileCtr,
|
||||
McpCtr,
|
||||
McpInstallCtr,
|
||||
MenuController,
|
||||
NetworkProxyCtr,
|
||||
|
|
@ -43,7 +44,6 @@ type DesktopControllerServices = CreateServicesResult<DesktopControllerIpcConstr
|
|||
export type DesktopIpcServices = MergeIpcService<DesktopControllerServices>;
|
||||
|
||||
export const controllerServerIpcConstructors = [
|
||||
SystemServerCtr,
|
||||
UploadFileServerCtr,
|
||||
] as const satisfies readonly IpcServiceConstructor[];
|
||||
|
||||
|
|
|
|||
|
|
@ -1,23 +1,33 @@
|
|||
import {
|
||||
DEFAULT_VARIANTS,
|
||||
LOBE_LOCALE_COOKIE,
|
||||
LOBE_THEME_APPEARANCE,
|
||||
Locales,
|
||||
RouteVariants,
|
||||
} from '@lobechat/desktop-bridge';
|
||||
import { ElectronIPCEventHandler, ElectronIPCServer } from '@lobechat/electron-server-ipc';
|
||||
import { Session, app, protocol } from 'electron';
|
||||
import { app, protocol, session } from 'electron';
|
||||
import installExtension, { REACT_DEVELOPER_TOOLS } from 'electron-devtools-installer';
|
||||
import { macOS, windows } from 'electron-is';
|
||||
import { pathExistsSync, remove } from 'fs-extra';
|
||||
import { pathExistsSync } from 'fs-extra';
|
||||
import os from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { extname, join } from 'node:path';
|
||||
|
||||
import { name } from '@/../../package.json';
|
||||
import { LOCAL_DATABASE_DIR, buildDir, nextStandaloneDir } from '@/const/dir';
|
||||
import { buildDir, nextExportDir } from '@/const/dir';
|
||||
import { isDev } from '@/const/env';
|
||||
import { ELECTRON_BE_PROTOCOL_SCHEME } from '@/const/protocol';
|
||||
import { IControlModule } from '@/controllers';
|
||||
import { getDesktopEnv } from '@/env';
|
||||
import { IServiceModule } from '@/services';
|
||||
import { getServerMethodMetadata } from '@/utils/ipc';
|
||||
import { createLogger } from '@/utils/logger';
|
||||
import { CustomRequestHandler, createHandler } from '@/utils/next-electron-rsc';
|
||||
|
||||
import { BrowserManager } from './browser/BrowserManager';
|
||||
import { I18nManager } from './infrastructure/I18nManager';
|
||||
import { IoCContainer } from './infrastructure/IoCContainer';
|
||||
import { ProtocolManager } from './infrastructure/ProtocolManager';
|
||||
import { RendererProtocolManager } from './infrastructure/RendererProtocolManager';
|
||||
import { StaticFileServerManager } from './infrastructure/StaticFileServerManager';
|
||||
import { StoreManager } from './infrastructure/StoreManager';
|
||||
import { UpdaterManager } from './infrastructure/UpdaterManager';
|
||||
|
|
@ -35,8 +45,10 @@ type Class<T> = new (...args: any[]) => T;
|
|||
|
||||
const importAll = (r: any) => Object.values(r).map((v: any) => v.default);
|
||||
|
||||
const devDefaultRendererUrl = 'http://localhost:3015';
|
||||
|
||||
export class App {
|
||||
nextServerUrl = 'http://localhost:3015';
|
||||
rendererLoadedUrl: string;
|
||||
|
||||
browserManager: BrowserManager;
|
||||
menuManager: MenuManager;
|
||||
|
|
@ -47,7 +59,12 @@ export class App {
|
|||
trayManager: TrayManager;
|
||||
staticFileServerManager: StaticFileServerManager;
|
||||
protocolManager: ProtocolManager;
|
||||
rendererProtocolManager: RendererProtocolManager;
|
||||
chromeFlags: string[] = ['OverlayScrollbar', 'FluentOverlayScrollbar', 'FluentScrollbar'];
|
||||
/**
|
||||
* Escape hatch: allow testing static renderer in dev via env
|
||||
*/
|
||||
private readonly rendererStaticOverride = getDesktopEnv().DESKTOP_RENDERER_STATIC;
|
||||
|
||||
/**
|
||||
* whether app is in quiting
|
||||
|
|
@ -79,6 +96,27 @@ export class App {
|
|||
// Initialize store manager
|
||||
this.storeManager = new StoreManager(this);
|
||||
|
||||
this.rendererProtocolManager = new RendererProtocolManager({
|
||||
nextExportDir,
|
||||
resolveRendererFilePath: this.resolveRendererFilePath.bind(this),
|
||||
});
|
||||
protocol.registerSchemesAsPrivileged([
|
||||
{
|
||||
privileges: {
|
||||
allowServiceWorkers: true,
|
||||
corsEnabled: true,
|
||||
secure: true,
|
||||
standard: true,
|
||||
supportFetchAPI: true,
|
||||
},
|
||||
scheme: ELECTRON_BE_PROTOCOL_SCHEME,
|
||||
},
|
||||
this.rendererProtocolManager.protocolScheme,
|
||||
]);
|
||||
|
||||
// Initialize rendererLoadedUrl from RendererProtocolManager
|
||||
this.rendererLoadedUrl = this.rendererProtocolManager.getRendererUrl();
|
||||
|
||||
// load controllers
|
||||
const controllers: IControlModule[] = importAll(
|
||||
import.meta.glob('@/controllers/*Ctr.ts', { eager: true }),
|
||||
|
|
@ -106,9 +144,9 @@ export class App {
|
|||
this.staticFileServerManager = new StaticFileServerManager(this);
|
||||
this.protocolManager = new ProtocolManager(this);
|
||||
|
||||
// register the schema to interceptor url
|
||||
// it should register before app ready
|
||||
this.registerNextHandler();
|
||||
// Configure renderer loading strategy (dev server vs static export)
|
||||
// should register before app ready
|
||||
this.configureRendererLoader();
|
||||
|
||||
// initialize protocol handlers
|
||||
this.protocolManager.initialize();
|
||||
|
|
@ -130,9 +168,6 @@ export class App {
|
|||
|
||||
this.initDevBranding();
|
||||
|
||||
// Clean up stale database lock file before starting IPC server
|
||||
await this.cleanupDatabaseLock();
|
||||
|
||||
// ==============
|
||||
await this.ipcServer.start();
|
||||
logger.debug('IPC server started');
|
||||
|
|
@ -243,6 +278,8 @@ export class App {
|
|||
await app.whenReady();
|
||||
logger.debug('Application ready');
|
||||
|
||||
await this.installReactDevtools();
|
||||
|
||||
this.controllers.forEach((controller) => {
|
||||
if (typeof controller.afterAppReady === 'function') {
|
||||
try {
|
||||
|
|
@ -256,6 +293,21 @@ export class App {
|
|||
logger.info('Application ready state completed');
|
||||
};
|
||||
|
||||
/**
|
||||
* Development only: install React DevTools extension into Electron's devtools.
|
||||
*/
|
||||
private installReactDevtools = async () => {
|
||||
if (!isDev) return;
|
||||
|
||||
try {
|
||||
const name = await installExtension(REACT_DEVELOPER_TOOLS);
|
||||
|
||||
logger.info(`Installed DevTools extension: ${name}`);
|
||||
} catch (error) {
|
||||
logger.warn('Failed to install React DevTools extension', error);
|
||||
}
|
||||
};
|
||||
|
||||
// ============= helper ============= //
|
||||
|
||||
/**
|
||||
|
|
@ -272,53 +324,6 @@ export class App {
|
|||
shortcutMethodMap: ShortcutMethodMap = new Map();
|
||||
protocolHandlerMap: ProtocolHandlerMap = new Map();
|
||||
|
||||
/**
|
||||
* use in next router interceptor in prod browser render
|
||||
*/
|
||||
nextInterceptor: (params: { session: Session }) => () => void;
|
||||
|
||||
/**
|
||||
* Collection of unregister functions for custom request handlers
|
||||
*/
|
||||
private customHandlerUnregisterFns: Array<() => void> = [];
|
||||
|
||||
/**
|
||||
* Function to register custom request handler
|
||||
*/
|
||||
private registerCustomHandlerFn?: (handler: CustomRequestHandler) => () => void;
|
||||
|
||||
/**
|
||||
* Register custom request handler
|
||||
* @param handler Custom request handler function
|
||||
* @returns Function to unregister the handler
|
||||
*/
|
||||
registerRequestHandler = (handler: CustomRequestHandler): (() => void) => {
|
||||
if (!this.registerCustomHandlerFn) {
|
||||
logger.warn('Custom request handler registration is not available');
|
||||
return () => {};
|
||||
}
|
||||
|
||||
logger.debug('Registering custom request handler');
|
||||
const unregisterFn = this.registerCustomHandlerFn(handler);
|
||||
this.customHandlerUnregisterFns.push(unregisterFn);
|
||||
|
||||
return () => {
|
||||
unregisterFn();
|
||||
const index = this.customHandlerUnregisterFns.indexOf(unregisterFn);
|
||||
if (index !== -1) {
|
||||
this.customHandlerUnregisterFns.splice(index, 1);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Unregister all custom request handlers
|
||||
*/
|
||||
unregisterAllRequestHandlers = () => {
|
||||
this.customHandlerUnregisterFns.forEach((unregister) => unregister());
|
||||
this.customHandlerUnregisterFns = [];
|
||||
};
|
||||
|
||||
private addController = (ControllerClass: IControlModule) => {
|
||||
const controller = new ControllerClass(this);
|
||||
this.controllers.set(ControllerClass, controller);
|
||||
|
|
@ -362,56 +367,166 @@ export class App {
|
|||
}
|
||||
};
|
||||
|
||||
private resolveExportFilePath(pathname: string) {
|
||||
// Normalize by removing leading/trailing slashes so extname works as expected
|
||||
const normalizedPath = decodeURIComponent(pathname).replace(/^\/+/, '').replace(/\/$/, '');
|
||||
|
||||
if (!normalizedPath) return join(nextExportDir, 'index.html');
|
||||
|
||||
const basePath = join(nextExportDir, normalizedPath);
|
||||
const ext = extname(normalizedPath);
|
||||
|
||||
// If the request explicitly includes an extension (e.g. html, ico, txt),
|
||||
// treat it as a direct asset without variant injection.
|
||||
if (ext) {
|
||||
return pathExistsSync(basePath) ? basePath : null;
|
||||
}
|
||||
|
||||
const candidates = [`${basePath}.html`, join(basePath, 'index.html'), basePath];
|
||||
|
||||
for (const candidate of candidates) {
|
||||
if (pathExistsSync(candidate)) return candidate;
|
||||
}
|
||||
|
||||
const fallback404 = join(nextExportDir, '404.html');
|
||||
if (pathExistsSync(fallback404)) return fallback404;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up stale database lock file from previous crashes or abnormal exits
|
||||
* Configure renderer loading strategy for dev/prod
|
||||
*/
|
||||
private cleanupDatabaseLock = async () => {
|
||||
try {
|
||||
const dbPath = join(this.appStoragePath, LOCAL_DATABASE_DIR);
|
||||
const lockPath = `${dbPath}.lock`;
|
||||
private configureRendererLoader() {
|
||||
if (isDev && !this.rendererStaticOverride) {
|
||||
this.rendererLoadedUrl = devDefaultRendererUrl;
|
||||
this.setupDevRenderer();
|
||||
return;
|
||||
}
|
||||
|
||||
if (pathExistsSync(lockPath)) {
|
||||
logger.info(`Cleaning up stale database lock file: ${lockPath}`);
|
||||
await remove(lockPath);
|
||||
logger.info('Database lock file removed successfully');
|
||||
} else {
|
||||
logger.debug('No database lock file found, skipping cleanup');
|
||||
if (isDev && this.rendererStaticOverride) {
|
||||
logger.warn('Dev mode: DESKTOP_RENDERER_STATIC enabled, using static renderer handler');
|
||||
}
|
||||
|
||||
this.setupProdRenderer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Development: use Next dev server directly
|
||||
*/
|
||||
private setupDevRenderer() {
|
||||
logger.info('Development mode: renderer served from Next dev server, no protocol hook');
|
||||
}
|
||||
|
||||
/**
|
||||
* Production: serve static Next export assets
|
||||
*/
|
||||
private setupProdRenderer() {
|
||||
// Use the URL from RendererProtocolManager
|
||||
this.rendererLoadedUrl = this.rendererProtocolManager.getRendererUrl();
|
||||
this.rendererProtocolManager.registerHandler();
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve renderer file path in production by combining variant prefix and pathname.
|
||||
* Falls back to default variant when cookies are missing or invalid.
|
||||
*/
|
||||
private async resolveRendererFilePath(url: URL) {
|
||||
const pathname = url.pathname;
|
||||
const normalizedPathname = pathname.endsWith('/') ? pathname.slice(0, -1) : pathname;
|
||||
|
||||
// Static assets should be resolved from root (no variant prefix)
|
||||
if (
|
||||
pathname.startsWith('/_next/') ||
|
||||
pathname.startsWith('/static/') ||
|
||||
pathname === '/favicon.ico' ||
|
||||
pathname === '/manifest.json'
|
||||
) {
|
||||
return this.resolveExportFilePath(pathname);
|
||||
}
|
||||
|
||||
// If the incoming path already contains an extension (like .html or .ico),
|
||||
// treat it as a direct asset lookup to avoid double variant prefixes.
|
||||
const extension = extname(normalizedPathname);
|
||||
if (extension) {
|
||||
const directPath = this.resolveExportFilePath(pathname);
|
||||
if (directPath) return directPath;
|
||||
|
||||
// Next.js RSC payloads are emitted under variant folders (e.g. /en-US__0__light/__next._tree.txt),
|
||||
// but the runtime may request them without the variant prefix. For missing .txt requests,
|
||||
// retry resolution with variant injection.
|
||||
if (extension === '.txt' && normalizedPathname.includes('__next.')) {
|
||||
const variant = await this.getRouteVariantFromCookies();
|
||||
|
||||
return (
|
||||
this.resolveExportFilePath(`/${variant}${pathname}`) ||
|
||||
this.resolveExportFilePath(`/${this.defaultRouteVariant}${pathname}`) ||
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const variant = await this.getRouteVariantFromCookies();
|
||||
const variantPrefixedPath = `/${variant}${pathname}`;
|
||||
|
||||
// Try variant-specific path first, then default variant as fallback
|
||||
return (
|
||||
this.resolveExportFilePath(variantPrefixedPath) ||
|
||||
this.resolveExportFilePath(`/${this.defaultRouteVariant}${pathname}`) ||
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
private readonly defaultRouteVariant = RouteVariants.serializeVariants(DEFAULT_VARIANTS);
|
||||
private readonly localeCookieName = LOBE_LOCALE_COOKIE;
|
||||
private readonly themeCookieName = LOBE_THEME_APPEARANCE;
|
||||
|
||||
/**
|
||||
* Build variant string from Electron session cookies to match Next export structure.
|
||||
* Desktop is always treated as non-mobile (0).
|
||||
*/
|
||||
private async getRouteVariantFromCookies(): Promise<string> {
|
||||
try {
|
||||
const cookies = await session.defaultSession.cookies.get({
|
||||
url: `${this.rendererLoadedUrl}/`,
|
||||
});
|
||||
const locale = cookies.find((c) => c.name === this.localeCookieName)?.value;
|
||||
const themeCookie = cookies.find((c) => c.name === this.themeCookieName)?.value;
|
||||
|
||||
const serialized = RouteVariants.serializeVariants(
|
||||
RouteVariants.createVariants({
|
||||
isMobile: false,
|
||||
locale: locale as Locales | undefined,
|
||||
theme: themeCookie === 'dark' || themeCookie === 'light' ? themeCookie : undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
return RouteVariants.serializeVariants(RouteVariants.deserializeVariants(serialized));
|
||||
} catch (error) {
|
||||
logger.error('Failed to cleanup database lock file:', error);
|
||||
// Non-fatal error, allow application to continue
|
||||
logger.warn('Failed to read route variant cookies, using default', error);
|
||||
return this.defaultRouteVariant;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private registerNextHandler() {
|
||||
logger.debug('Registering Next.js handler');
|
||||
const handler = createHandler({
|
||||
debug: true,
|
||||
localhostUrl: this.nextServerUrl,
|
||||
protocol,
|
||||
standaloneDir: nextStandaloneDir,
|
||||
});
|
||||
/**
|
||||
* Build renderer URL with variant prefix injected into the path.
|
||||
* In dev mode (without static override), Next.js dev server handles routing automatically.
|
||||
* In prod or dev with static override, we need to inject variant to match export structure: /[variants]/path
|
||||
*/
|
||||
async buildRendererUrl(path: string): Promise<string> {
|
||||
// Ensure path starts with /
|
||||
const cleanPath = path.startsWith('/') ? path : `/${path}`;
|
||||
|
||||
// Log output based on development or production mode
|
||||
if (isDev) {
|
||||
logger.info(
|
||||
`Development mode: Custom request handler enabled, but Next.js interception disabled`,
|
||||
);
|
||||
} else {
|
||||
logger.info(
|
||||
`Production mode: ${this.nextServerUrl} will be intercepted to ${nextStandaloneDir}`,
|
||||
);
|
||||
// In dev mode without static override, use dev server directly (no variant needed)
|
||||
if (isDev && !this.rendererStaticOverride) {
|
||||
return `${this.rendererLoadedUrl}${cleanPath}`;
|
||||
}
|
||||
|
||||
this.nextInterceptor = handler.createInterceptor;
|
||||
|
||||
// Save custom handler registration function
|
||||
if (handler.registerCustomHandler) {
|
||||
this.registerCustomHandlerFn = handler.registerCustomHandler;
|
||||
logger.debug('Custom request handler registration is available');
|
||||
} else {
|
||||
logger.warn('Custom request handler registration is not available');
|
||||
}
|
||||
// In prod or dev with static override, inject variant for static export structure
|
||||
const variant = await this.getRouteVariantFromCookies();
|
||||
return `${this.rendererLoadedUrl}/${variant}.html${cleanPath}`;
|
||||
}
|
||||
|
||||
private initializeServerIpcEvents() {
|
||||
|
|
@ -445,6 +560,5 @@ export class App {
|
|||
|
||||
// 执行清理操作
|
||||
this.staticFileServerManager.destroy();
|
||||
this.unregisterAllRequestHandlers();
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,10 @@
|
|||
import { app } from 'electron';
|
||||
import { pathExistsSync, remove } from 'fs-extra';
|
||||
import { join } from 'node:path';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { LOCAL_DATABASE_DIR } from '@/const/dir';
|
||||
|
||||
// Import after mocks are set up
|
||||
import { App } from '../App';
|
||||
|
||||
const mockPathExistsSync = vi.fn();
|
||||
|
||||
// Mock electron modules
|
||||
vi.mock('electron', () => ({
|
||||
app: {
|
||||
|
|
@ -36,6 +33,24 @@ vi.mock('electron', () => ({
|
|||
protocol: {
|
||||
registerSchemesAsPrivileged: vi.fn(),
|
||||
},
|
||||
session: {
|
||||
defaultSession: {
|
||||
cookies: {
|
||||
get: vi.fn(async () => []),
|
||||
},
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
// electron-devtools-installer accesses electron.app.getPath at import-time in node env;
|
||||
// mock it to avoid side effects in unit tests
|
||||
vi.mock('electron-devtools-installer', () => ({
|
||||
REACT_DEVELOPER_TOOLS: 'REACT_DEVELOPER_TOOLS',
|
||||
default: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('fs-extra', () => ({
|
||||
pathExistsSync: (...args: any[]) => mockPathExistsSync(...args),
|
||||
}));
|
||||
|
||||
// Mock logger
|
||||
|
|
@ -48,16 +63,6 @@ vi.mock('@/utils/logger', () => ({
|
|||
}),
|
||||
}));
|
||||
|
||||
// Mock fs-extra module
|
||||
vi.mock('fs-extra', async () => {
|
||||
const actual = await vi.importActual('fs-extra');
|
||||
return {
|
||||
...actual,
|
||||
pathExistsSync: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock common/routes
|
||||
vi.mock('~common/routes', () => ({
|
||||
findMatchingRoute: vi.fn(),
|
||||
|
|
@ -80,11 +85,9 @@ vi.mock('@/const/env', () => ({
|
|||
|
||||
vi.mock('@/const/dir', () => ({
|
||||
buildDir: '/mock/build',
|
||||
nextStandaloneDir: '/mock/standalone',
|
||||
LOCAL_DATABASE_DIR: 'lobehub-local-db',
|
||||
nextExportDir: '/mock/export/out',
|
||||
appStorageDir: '/mock/storage/path',
|
||||
userDataDir: '/mock/user/data',
|
||||
DB_SCHEMA_HASH_FILENAME: 'lobehub-local-db-schema-hash',
|
||||
FILE_STORAGE_DIR: 'file-storage',
|
||||
INSTALL_PLUGINS_DIR: 'plugins',
|
||||
LOCAL_STORAGE_URL_PREFIX: '/lobe-desktop-file',
|
||||
|
|
@ -159,118 +162,25 @@ vi.mock('../ui/TrayManager', () => ({
|
|||
})),
|
||||
}));
|
||||
|
||||
vi.mock('@/utils/next-electron-rsc', () => ({
|
||||
createHandler: vi.fn(() => ({
|
||||
createInterceptor: vi.fn(),
|
||||
registerCustomHandler: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
// Mock controllers and services
|
||||
vi.mock('../../controllers/*Ctr.ts', () => ({}));
|
||||
vi.mock('../../services/*Srv.ts', () => ({}));
|
||||
|
||||
describe('App - Database Lock Cleanup', () => {
|
||||
describe('App', () => {
|
||||
let appInstance: App;
|
||||
let mockLockPath: string;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPathExistsSync.mockReset();
|
||||
|
||||
// Mock glob imports to return empty arrays
|
||||
import.meta.glob = vi.fn(() => ({}));
|
||||
|
||||
mockLockPath = join('/mock/storage/path', LOCAL_DATABASE_DIR) + '.lock';
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('bootstrap - database lock cleanup', () => {
|
||||
it('should remove stale lock file if it exists during bootstrap', async () => {
|
||||
// Setup: simulate existing lock file
|
||||
vi.mocked(pathExistsSync).mockReturnValue(true);
|
||||
vi.mocked(remove).mockResolvedValue(undefined);
|
||||
|
||||
// Create app instance
|
||||
appInstance = new App();
|
||||
|
||||
// Call bootstrap which should trigger cleanup
|
||||
await appInstance.bootstrap();
|
||||
|
||||
// Verify: lock file check was called
|
||||
expect(pathExistsSync).toHaveBeenCalledWith(mockLockPath);
|
||||
|
||||
// Verify: lock file was removed
|
||||
expect(remove).toHaveBeenCalledWith(mockLockPath);
|
||||
});
|
||||
|
||||
it('should not attempt to remove lock file if it does not exist', async () => {
|
||||
// Setup: no lock file exists
|
||||
vi.mocked(pathExistsSync).mockReturnValue(false);
|
||||
|
||||
// Create app instance
|
||||
appInstance = new App();
|
||||
|
||||
// Call bootstrap
|
||||
await appInstance.bootstrap();
|
||||
|
||||
// Verify: lock file check was called
|
||||
expect(pathExistsSync).toHaveBeenCalledWith(mockLockPath);
|
||||
|
||||
// Verify: remove was NOT called since file doesn't exist
|
||||
expect(remove).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should continue bootstrap even if lock cleanup fails', async () => {
|
||||
// Setup: simulate lock file exists but cleanup fails
|
||||
vi.mocked(pathExistsSync).mockReturnValue(true);
|
||||
vi.mocked(remove).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
// Create app instance
|
||||
appInstance = new App();
|
||||
|
||||
// Bootstrap should not throw even if cleanup fails
|
||||
await expect(appInstance.bootstrap()).resolves.not.toThrow();
|
||||
|
||||
// Verify: cleanup was attempted
|
||||
expect(pathExistsSync).toHaveBeenCalledWith(mockLockPath);
|
||||
expect(remove).toHaveBeenCalledWith(mockLockPath);
|
||||
});
|
||||
|
||||
it('should clean up lock file before starting IPC server', async () => {
|
||||
// Setup
|
||||
vi.mocked(pathExistsSync).mockReturnValue(true);
|
||||
const callOrder: string[] = [];
|
||||
|
||||
vi.mocked(remove).mockImplementation(async () => {
|
||||
callOrder.push('remove');
|
||||
});
|
||||
|
||||
// Mock IPC server start to track call order
|
||||
const { ElectronIPCServer } = await import('@lobechat/electron-server-ipc');
|
||||
const mockStart = vi.fn().mockImplementation(() => {
|
||||
callOrder.push('ipcServer.start');
|
||||
return Promise.resolve();
|
||||
});
|
||||
|
||||
vi.mocked(ElectronIPCServer).mockImplementation(
|
||||
() =>
|
||||
({
|
||||
start: mockStart,
|
||||
}) as any,
|
||||
);
|
||||
|
||||
// Create app instance and bootstrap
|
||||
appInstance = new App();
|
||||
await appInstance.bootstrap();
|
||||
|
||||
// Verify: cleanup happens before IPC server starts
|
||||
expect(callOrder).toEqual(['remove', 'ipcServer.start']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('appStoragePath', () => {
|
||||
it('should return storage path from store manager', () => {
|
||||
appInstance = new App();
|
||||
|
|
@ -280,4 +190,46 @@ describe('App - Database Lock Cleanup', () => {
|
|||
expect(storagePath).toBe('/mock/storage/path');
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveRendererFilePath', () => {
|
||||
it('should retry missing .txt requests with variant-prefixed lookup', async () => {
|
||||
appInstance = new App();
|
||||
|
||||
// Avoid touching the electron session cookie code path in this unit test
|
||||
(appInstance as any).getRouteVariantFromCookies = vi.fn(async () => 'en-US__0__light');
|
||||
|
||||
mockPathExistsSync.mockImplementation((p: string) => {
|
||||
// root miss
|
||||
if (p === '/mock/export/out/__next._tree.txt') return false;
|
||||
// variant hit
|
||||
if (p === '/mock/export/out/en-US__0__light/__next._tree.txt') return true;
|
||||
return false;
|
||||
});
|
||||
|
||||
const resolved = await (appInstance as any).resolveRendererFilePath(
|
||||
new URL('app://next/__next._tree.txt'),
|
||||
);
|
||||
|
||||
expect(resolved).toBe('/mock/export/out/en-US__0__light/__next._tree.txt');
|
||||
});
|
||||
|
||||
it('should keep direct lookup for existing root .txt assets (no variant retry)', async () => {
|
||||
appInstance = new App();
|
||||
|
||||
(appInstance as any).getRouteVariantFromCookies = vi.fn(async () => {
|
||||
throw new Error('should not be called');
|
||||
});
|
||||
|
||||
mockPathExistsSync.mockImplementation((p: string) => {
|
||||
if (p === '/mock/export/out/en-US__0__light.txt') return true;
|
||||
return false;
|
||||
});
|
||||
|
||||
const resolved = await (appInstance as any).resolveRendererFilePath(
|
||||
new URL('app://next/en-US__0__light.txt'),
|
||||
);
|
||||
|
||||
expect(resolved).toBe('/mock/export/out/en-US__0__light.txt');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,14 +2,17 @@ import { MainBroadcastEventKey, MainBroadcastParams } from '@lobechat/electron-c
|
|||
import {
|
||||
BrowserWindow,
|
||||
BrowserWindowConstructorOptions,
|
||||
session as electronSession,
|
||||
ipcMain,
|
||||
nativeTheme,
|
||||
screen,
|
||||
} from 'electron';
|
||||
import console from 'node:console';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import { buildDir, preloadDir, resourcesDir } from '@/const/dir';
|
||||
import { isDev, isMac, isWindows } from '@/const/env';
|
||||
import { ELECTRON_BE_PROTOCOL_SCHEME } from '@/const/protocol';
|
||||
import {
|
||||
BACKGROUND_DARK,
|
||||
BACKGROUND_LIGHT,
|
||||
|
|
@ -18,6 +21,8 @@ import {
|
|||
THEME_CHANGE_DELAY,
|
||||
TITLE_BAR_HEIGHT,
|
||||
} from '@/const/theme';
|
||||
import RemoteServerConfigCtr from '@/controllers/RemoteServerConfigCtr';
|
||||
import { backendProxyProtocolManager } from '@/core/infrastructure/BackendProxyProtocolManager';
|
||||
import { createLogger } from '@/utils/logger';
|
||||
|
||||
import type { App } from '../App';
|
||||
|
|
@ -41,7 +46,6 @@ export default class Browser {
|
|||
private app: App;
|
||||
private _browserWindow?: BrowserWindow;
|
||||
private themeListenerSetup = false;
|
||||
private stopInterceptHandler;
|
||||
identifier: string;
|
||||
options: BrowserWindowOpts;
|
||||
private readonly windowStateKey: string;
|
||||
|
|
@ -167,11 +171,14 @@ export default class Browser {
|
|||
}
|
||||
|
||||
loadUrl = async (path: string) => {
|
||||
const initUrl = this.app.nextServerUrl + path;
|
||||
const initUrl = await this.app.buildRendererUrl(path);
|
||||
|
||||
console.log('[Browser] initUrl', initUrl);
|
||||
|
||||
try {
|
||||
logger.debug(`[${this.identifier}] Attempting to load URL: ${initUrl}`);
|
||||
await this._browserWindow.loadURL(initUrl);
|
||||
|
||||
logger.debug(`[${this.identifier}] Successfully loaded URL: ${initUrl}`);
|
||||
} catch (error) {
|
||||
logger.error(`[${this.identifier}] Failed to load URL (${initUrl}):`, error);
|
||||
|
|
@ -295,7 +302,6 @@ export default class Browser {
|
|||
*/
|
||||
destroy() {
|
||||
logger.debug(`Destroying window instance: ${this.identifier}`);
|
||||
this.stopInterceptHandler?.();
|
||||
this.cleanupThemeListener();
|
||||
this._browserWindow = undefined;
|
||||
}
|
||||
|
|
@ -339,6 +345,7 @@ export default class Browser {
|
|||
backgroundThrottling: false,
|
||||
contextIsolation: true,
|
||||
preload: join(preloadDir, 'index.js'),
|
||||
sandbox: false,
|
||||
},
|
||||
width: savedState?.width || width,
|
||||
...this.getPlatformThemeConfig(isDarkMode),
|
||||
|
|
@ -354,13 +361,10 @@ export default class Browser {
|
|||
// Apply initial visual effects
|
||||
this.applyVisualEffects();
|
||||
|
||||
logger.debug(`[${this.identifier}] Setting up nextInterceptor.`);
|
||||
this.stopInterceptHandler = this.app.nextInterceptor({
|
||||
session: browserWindow.webContents.session,
|
||||
});
|
||||
|
||||
// Setup CORS bypass for local file server
|
||||
this.setupCORSBypass(browserWindow);
|
||||
// Setup request hook for remote server sync (base URL rewrite + OIDC header)
|
||||
this.setupRemoteServerRequestHook(browserWindow);
|
||||
|
||||
logger.debug(`[${this.identifier}] Initiating placeholder and URL loading sequence.`);
|
||||
this.loadPlaceholder().then(() => {
|
||||
|
|
@ -409,8 +413,7 @@ export default class Browser {
|
|||
} catch (error) {
|
||||
logger.error(`[${this.identifier}] Failed to save window state on quit:`, error);
|
||||
}
|
||||
// Need to clean up intercept handler and theme manager
|
||||
this.stopInterceptHandler?.();
|
||||
// Need to clean up theme manager
|
||||
this.cleanupThemeListener();
|
||||
return;
|
||||
}
|
||||
|
|
@ -445,8 +448,7 @@ export default class Browser {
|
|||
} catch (error) {
|
||||
logger.error(`[${this.identifier}] Failed to save window state on close:`, error);
|
||||
}
|
||||
// Need to clean up intercept handler and theme manager
|
||||
this.stopInterceptHandler?.();
|
||||
// Need to clean up theme manager
|
||||
this.cleanupThemeListener();
|
||||
}
|
||||
});
|
||||
|
|
@ -528,4 +530,27 @@ export default class Browser {
|
|||
|
||||
logger.debug(`[${this.identifier}] CORS bypass setup completed`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrite tRPC requests to remote server and inject OIDC token via webRequest hooks.
|
||||
* Replaces the previous proxyTRPCRequest IPC forwarding.
|
||||
*/
|
||||
private setupRemoteServerRequestHook(browserWindow: BrowserWindow) {
|
||||
const session = browserWindow.webContents.session;
|
||||
const remoteServerConfigCtr = this.app.getController(RemoteServerConfigCtr);
|
||||
|
||||
const targetSession = session || electronSession.defaultSession;
|
||||
if (!targetSession) return;
|
||||
|
||||
backendProxyProtocolManager.registerWithRemoteBaseUrl(targetSession, {
|
||||
getAccessToken: () => remoteServerConfigCtr.getAccessToken(),
|
||||
getRemoteBaseUrl: async () => {
|
||||
const config = await remoteServerConfigCtr.getRemoteServerConfig();
|
||||
const remoteServerUrl = await remoteServerConfigCtr.getRemoteServerUrl(config);
|
||||
return remoteServerUrl || null;
|
||||
},
|
||||
scheme: ELECTRON_BE_PROTOCOL_SCHEME,
|
||||
source: this.identifier,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -107,7 +107,11 @@ describe('Browser', () => {
|
|||
let mockApp: AppCore;
|
||||
let mockStoreManagerGet: ReturnType<typeof vi.fn>;
|
||||
let mockStoreManagerSet: ReturnType<typeof vi.fn>;
|
||||
let mockNextInterceptor: ReturnType<typeof vi.fn>;
|
||||
let mockRemoteServerConfigCtr: {
|
||||
getAccessToken: ReturnType<typeof vi.fn>;
|
||||
getRemoteServerConfig: ReturnType<typeof vi.fn>;
|
||||
};
|
||||
let autoLoadUrlSpy: ReturnType<typeof vi.spyOn> | undefined;
|
||||
|
||||
const defaultOptions: BrowserWindowOpts = {
|
||||
height: 600,
|
||||
|
|
@ -133,14 +137,34 @@ describe('Browser', () => {
|
|||
// Create mock App
|
||||
mockStoreManagerGet = vi.fn().mockReturnValue(undefined);
|
||||
mockStoreManagerSet = vi.fn();
|
||||
mockNextInterceptor = vi.fn().mockReturnValue(vi.fn());
|
||||
|
||||
// Browser setup now installs protocol handlers that depend on RemoteServerConfigCtr
|
||||
mockRemoteServerConfigCtr = {
|
||||
getAccessToken: vi.fn().mockResolvedValue(null),
|
||||
getRemoteServerConfig: vi.fn().mockResolvedValue({
|
||||
remoteServerUrl: 'http://localhost:3000',
|
||||
}),
|
||||
};
|
||||
|
||||
// Ensure Browser can register protocol handlers on the session
|
||||
(mockBrowserWindow.webContents.session as any).protocol = {
|
||||
handle: vi.fn(),
|
||||
};
|
||||
|
||||
mockApp = {
|
||||
browserManager: {
|
||||
retrieveByIdentifier: vi.fn(),
|
||||
},
|
||||
buildRendererUrl: vi.fn(async (path: string) => {
|
||||
const cleanPath = path.startsWith('/') ? path : `/${path}`;
|
||||
return `http://localhost:3000${cleanPath}`;
|
||||
}),
|
||||
getController: vi.fn((ctr: any) => {
|
||||
// Only the remote server config controller is required in these unit tests
|
||||
if (ctr?.name === 'RemoteServerConfigCtr') return mockRemoteServerConfigCtr;
|
||||
throw new Error(`Unexpected controller requested in Browser tests: ${ctr?.name ?? ctr}`);
|
||||
}),
|
||||
isQuiting: false,
|
||||
nextInterceptor: mockNextInterceptor,
|
||||
nextServerUrl: 'http://localhost:3000',
|
||||
storeManager: {
|
||||
get: mockStoreManagerGet,
|
||||
|
|
@ -149,6 +173,8 @@ describe('Browser', () => {
|
|||
} as unknown as AppCore;
|
||||
|
||||
browser = new Browser(defaultOptions, mockApp);
|
||||
// The constructor triggers an async placeholder->loadUrl chain; stub it to avoid cross-test flakiness.
|
||||
autoLoadUrlSpy = vi.spyOn(browser, 'loadUrl').mockResolvedValue(undefined as any);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -164,10 +190,6 @@ describe('Browser', () => {
|
|||
it('should create BrowserWindow on construction', () => {
|
||||
expect(MockBrowserWindow).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should setup next interceptor', () => {
|
||||
expect(mockNextInterceptor).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('browserWindow getter', () => {
|
||||
|
|
@ -344,12 +366,14 @@ describe('Browser', () => {
|
|||
|
||||
describe('loadUrl', () => {
|
||||
it('should load full URL successfully', async () => {
|
||||
autoLoadUrlSpy?.mockRestore();
|
||||
await browser.loadUrl('/test-path');
|
||||
|
||||
expect(mockBrowserWindow.loadURL).toHaveBeenCalledWith('http://localhost:3000/test-path');
|
||||
});
|
||||
|
||||
it('should load error page on failure', async () => {
|
||||
autoLoadUrlSpy?.mockRestore();
|
||||
mockBrowserWindow.loadURL.mockRejectedValueOnce(new Error('Load failed'));
|
||||
|
||||
await browser.loadUrl('/test-path');
|
||||
|
|
@ -358,6 +382,7 @@ describe('Browser', () => {
|
|||
});
|
||||
|
||||
it('should setup retry handler on error', async () => {
|
||||
autoLoadUrlSpy?.mockRestore();
|
||||
mockBrowserWindow.loadURL.mockRejectedValueOnce(new Error('Load failed'));
|
||||
|
||||
await browser.loadUrl('/test-path');
|
||||
|
|
@ -367,9 +392,13 @@ describe('Browser', () => {
|
|||
});
|
||||
|
||||
it('should load fallback HTML when error page fails', async () => {
|
||||
autoLoadUrlSpy?.mockRestore();
|
||||
mockBrowserWindow.loadURL.mockRejectedValueOnce(new Error('Load failed'));
|
||||
mockBrowserWindow.loadFile.mockRejectedValueOnce(new Error('Error page failed'));
|
||||
mockBrowserWindow.loadURL.mockResolvedValueOnce(undefined);
|
||||
mockBrowserWindow.loadFile.mockImplementation(async (filePath: string) => {
|
||||
if (filePath === '/mock/resources/error.html') throw new Error('Error page failed');
|
||||
return undefined;
|
||||
});
|
||||
|
||||
await browser.loadUrl('/test-path');
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,149 @@
|
|||
import type { Session } from 'electron';
|
||||
|
||||
import { createLogger } from '@/utils/logger';
|
||||
|
||||
interface BackendProxyProtocolManagerOptions {
|
||||
getAccessToken: () => Promise<string | undefined | null>;
|
||||
rewriteUrl: (rawUrl: string) => Promise<string | null>;
|
||||
scheme: string;
|
||||
/**
|
||||
* Used for log prefixes. e.g. window identifier
|
||||
*/
|
||||
source?: string;
|
||||
}
|
||||
|
||||
interface BackendProxyProtocolManagerRemoteBaseOptions {
|
||||
getAccessToken: () => Promise<string | undefined | null>;
|
||||
getRemoteBaseUrl: () => Promise<string | undefined | null>;
|
||||
scheme: string;
|
||||
/**
|
||||
* Used for log prefixes. e.g. window identifier
|
||||
*/
|
||||
source?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Manage `lobe-backend://` (or any custom scheme) transparent proxy handler registration.
|
||||
* Keeps a WeakSet per session to avoid duplicate handler registration.
|
||||
*/
|
||||
export class BackendProxyProtocolManager {
|
||||
private readonly handledSessions = new WeakSet<Session>();
|
||||
private readonly logger = createLogger('core:BackendProxyProtocolManager');
|
||||
|
||||
registerWithRemoteBaseUrl(
|
||||
session: Session,
|
||||
options: BackendProxyProtocolManagerRemoteBaseOptions,
|
||||
) {
|
||||
let lastRemoteBaseUrl: string | undefined;
|
||||
|
||||
const rewriteUrl = async (rawUrl: string) => {
|
||||
lastRemoteBaseUrl = undefined;
|
||||
try {
|
||||
const requestUrl = new URL(rawUrl);
|
||||
|
||||
const remoteBaseUrl = await options.getRemoteBaseUrl();
|
||||
if (!remoteBaseUrl) return null;
|
||||
lastRemoteBaseUrl = remoteBaseUrl;
|
||||
|
||||
const remoteBase = new URL(remoteBaseUrl);
|
||||
if (requestUrl.origin === remoteBase.origin) return null;
|
||||
|
||||
const rewrittenUrl = new URL(
|
||||
requestUrl.pathname + requestUrl.search,
|
||||
remoteBase,
|
||||
).toString();
|
||||
this.logger.debug(
|
||||
`${options.source ? `[${options.source}] ` : ''}BackendProxy rewrite ${rawUrl} -> ${rewrittenUrl}`,
|
||||
);
|
||||
return rewrittenUrl;
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`${options.source ? `[${options.source}] ` : ''}BackendProxy rewriteUrl error (rawUrl=${rawUrl}, remoteBaseUrl=${lastRemoteBaseUrl})`,
|
||||
error,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
this.register(session, {
|
||||
getAccessToken: options.getAccessToken,
|
||||
rewriteUrl,
|
||||
scheme: options.scheme,
|
||||
source: options.source,
|
||||
});
|
||||
}
|
||||
|
||||
register(session: Session, options: BackendProxyProtocolManagerOptions) {
|
||||
if (!session || this.handledSessions.has(session)) return;
|
||||
|
||||
const logPrefix = options.source ? `[${options.source}] BackendProxy` : '[BackendProxy]';
|
||||
|
||||
session.protocol.handle(options.scheme, async (request: Request): Promise<Response | null> => {
|
||||
try {
|
||||
const rewrittenUrl = await options.rewriteUrl(request.url);
|
||||
if (!rewrittenUrl) return null;
|
||||
|
||||
const headers = new Headers(request.headers);
|
||||
const token = await options.getAccessToken();
|
||||
if (token) headers.set('Oidc-Auth', token);
|
||||
|
||||
// eslint-disable-next-line no-undef
|
||||
const requestInit: RequestInit & { duplex?: 'half' } = {
|
||||
headers,
|
||||
method: request.method,
|
||||
};
|
||||
|
||||
// Only forward body for non-GET/HEAD requests
|
||||
if (request.method !== 'GET' && request.method !== 'HEAD') {
|
||||
const body = request.body ?? undefined;
|
||||
if (body) {
|
||||
requestInit.body = body;
|
||||
// Node.js (undici) requires `duplex` when sending a streaming body
|
||||
requestInit.duplex = 'half';
|
||||
}
|
||||
}
|
||||
|
||||
let upstreamResponse: Response;
|
||||
try {
|
||||
upstreamResponse = await fetch(rewrittenUrl, requestInit);
|
||||
} catch (error) {
|
||||
this.logger.error(`${logPrefix} upstream fetch failed: ${rewrittenUrl}`, error);
|
||||
|
||||
return new Response('Upstream fetch failed, target url: ' + rewrittenUrl, {
|
||||
headers: {
|
||||
'Content-Type': 'text/plain; charset=utf-8',
|
||||
},
|
||||
status: 502,
|
||||
statusText: 'Bad Gateway',
|
||||
});
|
||||
}
|
||||
|
||||
const responseHeaders = new Headers(upstreamResponse.headers);
|
||||
const allowOrigin = request.headers.get('Origin') || undefined;
|
||||
|
||||
if (allowOrigin) {
|
||||
responseHeaders.set('Access-Control-Allow-Origin', allowOrigin);
|
||||
responseHeaders.set('Access-Control-Allow-Credentials', 'true');
|
||||
}
|
||||
|
||||
responseHeaders.set('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS');
|
||||
responseHeaders.set('Access-Control-Allow-Headers', '*');
|
||||
responseHeaders.set('X-Src-Url', rewrittenUrl);
|
||||
|
||||
return new Response(upstreamResponse.body, {
|
||||
headers: responseHeaders,
|
||||
status: upstreamResponse.status,
|
||||
statusText: upstreamResponse.statusText,
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error(`${logPrefix} protocol.handle error:`, error);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
this.logger.debug(`${logPrefix} protocol handler registered for ${options.scheme}`);
|
||||
this.handledSessions.add(session);
|
||||
}
|
||||
}
|
||||
|
||||
export const backendProxyProtocolManager = new BackendProxyProtocolManager();
|
||||
|
|
@ -0,0 +1,250 @@
|
|||
import { app, protocol } from 'electron';
|
||||
import { pathExistsSync } from 'fs-extra';
|
||||
import { readFile, stat } from 'node:fs/promises';
|
||||
import { basename, extname } from 'node:path';
|
||||
|
||||
import { createLogger } from '@/utils/logger';
|
||||
|
||||
import { getExportMimeType } from '../../utils/mime';
|
||||
|
||||
type ResolveRendererFilePath = (url: URL) => Promise<string | null>;
|
||||
|
||||
const RENDERER_PROTOCOL_PRIVILEGES = {
|
||||
allowServiceWorkers: true,
|
||||
corsEnabled: true,
|
||||
secure: true,
|
||||
standard: true,
|
||||
supportFetchAPI: true,
|
||||
} as const;
|
||||
|
||||
interface RendererProtocolManagerOptions {
|
||||
host?: string;
|
||||
nextExportDir: string;
|
||||
resolveRendererFilePath: ResolveRendererFilePath;
|
||||
scheme?: string;
|
||||
}
|
||||
|
||||
const RENDERER_DIR = 'next';
|
||||
export class RendererProtocolManager {
|
||||
private readonly scheme: string;
|
||||
private readonly host: string;
|
||||
private readonly nextExportDir: string;
|
||||
private readonly resolveRendererFilePath: ResolveRendererFilePath;
|
||||
private handlerRegistered = false;
|
||||
|
||||
constructor(options: RendererProtocolManagerOptions) {
|
||||
const { nextExportDir, resolveRendererFilePath } = options;
|
||||
|
||||
this.scheme = 'app';
|
||||
this.host = RENDERER_DIR;
|
||||
this.nextExportDir = nextExportDir;
|
||||
this.resolveRendererFilePath = resolveRendererFilePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the full renderer URL with scheme and host
|
||||
*/
|
||||
getRendererUrl(): string {
|
||||
return `${this.scheme}://${this.host}`;
|
||||
}
|
||||
|
||||
get protocolScheme() {
|
||||
return {
|
||||
privileges: RENDERER_PROTOCOL_PRIVILEGES,
|
||||
scheme: this.scheme,
|
||||
};
|
||||
}
|
||||
registerHandler() {
|
||||
if (this.handlerRegistered) return;
|
||||
|
||||
if (!pathExistsSync(this.nextExportDir)) {
|
||||
createLogger('core:RendererProtocolManager').warn(
|
||||
`Next export directory not found, skip static handler: ${this.nextExportDir}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const logger = createLogger('core:RendererProtocolManager');
|
||||
logger.debug(
|
||||
`Registering renderer ${this.scheme}:// handler for production export at host ${this.host}`,
|
||||
);
|
||||
|
||||
const register = () => {
|
||||
if (this.handlerRegistered) return;
|
||||
|
||||
protocol.handle(this.scheme, async (request) => {
|
||||
const url = new URL(request.url);
|
||||
const hostname = url.hostname;
|
||||
const pathname = url.pathname;
|
||||
const isAssetRequest = this.isAssetRequest(pathname);
|
||||
const isExplicit404HtmlRequest = pathname.endsWith('/404.html');
|
||||
|
||||
if (hostname !== this.host) {
|
||||
return new Response('Not Found', { status: 404 });
|
||||
}
|
||||
|
||||
const buildFileResponse = async (targetPath: string) => {
|
||||
const fileStat = await stat(targetPath);
|
||||
const totalSize = fileStat.size;
|
||||
|
||||
const buffer = await readFile(targetPath);
|
||||
const headers = new Headers();
|
||||
const mimeType = getExportMimeType(targetPath);
|
||||
|
||||
if (mimeType) headers.set('Content-Type', mimeType);
|
||||
|
||||
// Chromium media pipeline relies on byte ranges for video/audio.
|
||||
headers.set('Accept-Ranges', 'bytes');
|
||||
|
||||
const method = request.method?.toUpperCase?.() || 'GET';
|
||||
const rangeHeader = request.headers.get('range') || request.headers.get('Range');
|
||||
|
||||
// HEAD (no range): return only headers
|
||||
if (method === 'HEAD' && !rangeHeader) {
|
||||
headers.set('Content-Length', String(totalSize));
|
||||
return new Response(null, { headers, status: 200 });
|
||||
}
|
||||
|
||||
// No Range: return entire file
|
||||
if (!rangeHeader) {
|
||||
headers.set('Content-Length', String(buffer.byteLength));
|
||||
return new Response(buffer, { headers, status: 200 });
|
||||
}
|
||||
|
||||
// Range: bytes=start-end | bytes=-suffixLength
|
||||
const match = /^bytes=(\d*)-(\d*)$/i.exec(rangeHeader.trim());
|
||||
if (!match) {
|
||||
headers.set('Content-Range', `bytes */${totalSize}`);
|
||||
return new Response(null, {
|
||||
headers,
|
||||
status: 416,
|
||||
statusText: 'Range Not Satisfiable',
|
||||
});
|
||||
}
|
||||
|
||||
const [, startRaw, endRaw] = match;
|
||||
let start = startRaw ? Number(startRaw) : NaN;
|
||||
let end = endRaw ? Number(endRaw) : NaN;
|
||||
|
||||
// Suffix range: bytes=-N (last N bytes)
|
||||
if (!startRaw && endRaw) {
|
||||
const suffixLength = Number(endRaw);
|
||||
if (!Number.isFinite(suffixLength) || suffixLength <= 0) {
|
||||
headers.set('Content-Range', `bytes */${totalSize}`);
|
||||
return new Response(null, {
|
||||
headers,
|
||||
status: 416,
|
||||
statusText: 'Range Not Satisfiable',
|
||||
});
|
||||
}
|
||||
start = Math.max(totalSize - suffixLength, 0);
|
||||
end = totalSize - 1;
|
||||
} else {
|
||||
if (!Number.isFinite(start)) start = 0;
|
||||
if (!Number.isFinite(end)) end = totalSize - 1;
|
||||
}
|
||||
|
||||
if (start < 0 || end < 0 || start > end || start >= totalSize) {
|
||||
headers.set('Content-Range', `bytes */${totalSize}`);
|
||||
return new Response(null, {
|
||||
headers,
|
||||
status: 416,
|
||||
statusText: 'Range Not Satisfiable',
|
||||
});
|
||||
}
|
||||
|
||||
end = Math.min(end, totalSize - 1);
|
||||
const sliced = buffer.subarray(start, end + 1);
|
||||
|
||||
headers.set('Content-Range', `bytes ${start}-${end}/${totalSize}`);
|
||||
headers.set('Content-Length', String(sliced.byteLength));
|
||||
|
||||
if (method === 'HEAD') {
|
||||
return new Response(null, { headers, status: 206, statusText: 'Partial Content' });
|
||||
}
|
||||
|
||||
return new Response(sliced, { headers, status: 206, statusText: 'Partial Content' });
|
||||
};
|
||||
|
||||
const resolveEntryFilePath = () =>
|
||||
this.resolveRendererFilePath(new URL(`${this.scheme}://${this.host}/`));
|
||||
|
||||
let filePath = await this.resolveRendererFilePath(url);
|
||||
|
||||
// If the resolved file is the export 404 page, treat it as missing so we can
|
||||
// fall back to the entry HTML for SPA routing (unless explicitly requested).
|
||||
if (filePath && this.is404Html(filePath) && !isExplicit404HtmlRequest) {
|
||||
filePath = null;
|
||||
}
|
||||
|
||||
if (!filePath) {
|
||||
if (isAssetRequest) {
|
||||
return new Response('File Not Found', { status: 404 });
|
||||
}
|
||||
|
||||
// Fallback to entry HTML for unknown routes (SPA-like behavior)
|
||||
filePath = await resolveEntryFilePath();
|
||||
if (!filePath || this.is404Html(filePath)) {
|
||||
return new Response('Render file Not Found', { status: 404 });
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return await buildFileResponse(filePath);
|
||||
} catch (error) {
|
||||
const code = (error as any).code;
|
||||
|
||||
if (code === 'ENOENT') {
|
||||
logger.warn(`Export asset missing on disk ${filePath}, falling back`, error);
|
||||
|
||||
if (isAssetRequest) {
|
||||
return new Response('File Not Found', { status: 404 });
|
||||
}
|
||||
|
||||
const fallbackPath = await resolveEntryFilePath();
|
||||
if (!fallbackPath || this.is404Html(fallbackPath)) {
|
||||
return new Response('Render file Not Found', { status: 404 });
|
||||
}
|
||||
|
||||
try {
|
||||
return await buildFileResponse(fallbackPath);
|
||||
} catch (fallbackError) {
|
||||
logger.error(`Failed to serve fallback entry ${fallbackPath}:`, fallbackError);
|
||||
return new Response('Internal Server Error', { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(`Failed to serve export asset ${filePath}:`, error);
|
||||
return new Response('Internal Server Error', { status: 500 });
|
||||
}
|
||||
});
|
||||
|
||||
this.handlerRegistered = true;
|
||||
};
|
||||
|
||||
if (app.isReady()) {
|
||||
register();
|
||||
} else {
|
||||
// protocol.handle needs the default session, which is only available after ready
|
||||
|
||||
app.whenReady().then(register);
|
||||
}
|
||||
}
|
||||
|
||||
private isAssetRequest(pathname: string) {
|
||||
const normalizedPathname = pathname.endsWith('/') ? pathname.slice(0, -1) : pathname;
|
||||
const ext = extname(normalizedPathname);
|
||||
|
||||
return (
|
||||
pathname.startsWith('/_next/') ||
|
||||
pathname.startsWith('/static/') ||
|
||||
pathname === '/favicon.ico' ||
|
||||
pathname === '/manifest.json' ||
|
||||
!!ext
|
||||
);
|
||||
}
|
||||
|
||||
private is404Html(filePath: string) {
|
||||
return basename(filePath) === '404.html';
|
||||
}
|
||||
}
|
||||
|
|
@ -25,14 +25,12 @@ const getAllowedOrigin = (rawOrigin?: string) => {
|
|||
};
|
||||
|
||||
export class StaticFileServerManager {
|
||||
private app: App;
|
||||
private fileService: FileService;
|
||||
private httpServer: any = null;
|
||||
private serverPort: number = 0;
|
||||
private isInitialized = false;
|
||||
|
||||
constructor(app: App) {
|
||||
this.app = app;
|
||||
this.fileService = app.getService(FileService);
|
||||
logger.debug('StaticFileServerManager initialized');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,206 @@
|
|||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { BackendProxyProtocolManager } from '../BackendProxyProtocolManager';
|
||||
|
||||
interface RequestInitWithDuplex extends RequestInit {
|
||||
duplex?: 'half';
|
||||
}
|
||||
|
||||
type FetchMock = (input: RequestInfo | URL, init?: RequestInitWithDuplex) => Promise<Response>;
|
||||
|
||||
const { mockProtocol, protocolHandlerRef } = vi.hoisted(() => {
|
||||
const protocolHandlerRef = { current: null as any };
|
||||
|
||||
return {
|
||||
mockProtocol: {
|
||||
handle: vi.fn((_scheme: string, handler: any) => {
|
||||
protocolHandlerRef.current = handler;
|
||||
}),
|
||||
},
|
||||
protocolHandlerRef,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('@/utils/logger', () => ({
|
||||
createLogger: () => ({
|
||||
debug: vi.fn(),
|
||||
error: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('BackendProxyProtocolManager', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
protocolHandlerRef.current = null;
|
||||
});
|
||||
|
||||
it('should rewrite url to remote base and inject Oidc-Auth token', async () => {
|
||||
const manager = new BackendProxyProtocolManager();
|
||||
const session = { protocol: mockProtocol } as any;
|
||||
|
||||
const fetchMock = vi.fn<FetchMock>(async () => {
|
||||
return new Response('ok', {
|
||||
headers: { 'Content-Type': 'text/plain' },
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
});
|
||||
});
|
||||
vi.stubGlobal('fetch', fetchMock as any);
|
||||
|
||||
manager.registerWithRemoteBaseUrl(session, {
|
||||
getAccessToken: async () => 'token-123',
|
||||
getRemoteBaseUrl: async () => 'https://remote.example.com',
|
||||
scheme: 'lobe-backend',
|
||||
source: 'main',
|
||||
});
|
||||
|
||||
const handler = protocolHandlerRef.current;
|
||||
expect(mockProtocol.handle).toHaveBeenCalledWith('lobe-backend', expect.any(Function));
|
||||
|
||||
const response = await handler({
|
||||
headers: new Headers({ 'Origin': 'app://desktop', 'X-Test': '1' }),
|
||||
method: 'GET',
|
||||
url: 'lobe-backend://app/trpc/hello?batch=1',
|
||||
} as any);
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledTimes(1);
|
||||
const [calledUrl, init] = fetchMock.mock.calls[0]!;
|
||||
expect(calledUrl).toBe('https://remote.example.com/trpc/hello?batch=1');
|
||||
expect(init).toBeDefined();
|
||||
if (!init) throw new Error('Expected fetch init to be defined');
|
||||
|
||||
expect(init.method).toBe('GET');
|
||||
const headers = init.headers as Headers;
|
||||
expect(headers.get('Oidc-Auth')).toBe('token-123');
|
||||
expect(headers.get('X-Test')).toBe('1');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers.get('X-Src-Url')).toBe('https://remote.example.com/trpc/hello?batch=1');
|
||||
expect(response.headers.get('Access-Control-Allow-Origin')).toBe('app://desktop');
|
||||
expect(response.headers.get('Access-Control-Allow-Credentials')).toBe('true');
|
||||
expect(await response.text()).toBe('ok');
|
||||
});
|
||||
|
||||
it('should forward body and set duplex for non-GET requests', async () => {
|
||||
const manager = new BackendProxyProtocolManager();
|
||||
const session = { protocol: mockProtocol } as any;
|
||||
|
||||
const fetchMock = vi.fn<FetchMock>(async () => new Response('ok', { status: 200 }));
|
||||
vi.stubGlobal('fetch', fetchMock as any);
|
||||
|
||||
manager.registerWithRemoteBaseUrl(session, {
|
||||
getAccessToken: async () => null,
|
||||
getRemoteBaseUrl: async () => 'https://remote.example.com',
|
||||
scheme: 'lobe-backend',
|
||||
});
|
||||
|
||||
const handler = protocolHandlerRef.current;
|
||||
|
||||
await handler({
|
||||
headers: new Headers(),
|
||||
method: 'POST',
|
||||
// body doesn't have to be a real stream for this unit test; manager only checks truthiness
|
||||
body: 'payload' as any,
|
||||
url: 'lobe-backend://app/api/upload',
|
||||
} as any);
|
||||
|
||||
const [, init] = fetchMock.mock.calls[0]!;
|
||||
expect(init).toBeDefined();
|
||||
if (!init) throw new Error('Expected fetch init to be defined');
|
||||
|
||||
expect(init.method).toBe('POST');
|
||||
expect(init.body).toBe('payload');
|
||||
expect(init.duplex).toBe('half');
|
||||
});
|
||||
|
||||
it('should return null when remote base url is missing', async () => {
|
||||
const manager = new BackendProxyProtocolManager();
|
||||
const session = { protocol: mockProtocol } as any;
|
||||
|
||||
const fetchMock = vi.fn();
|
||||
vi.stubGlobal('fetch', fetchMock as any);
|
||||
|
||||
manager.registerWithRemoteBaseUrl(session, {
|
||||
getAccessToken: async () => 'token',
|
||||
getRemoteBaseUrl: async () => null,
|
||||
scheme: 'lobe-backend',
|
||||
});
|
||||
|
||||
const handler = protocolHandlerRef.current;
|
||||
const res = await handler({ method: 'GET', url: 'lobe-backend://app/trpc' } as any);
|
||||
|
||||
expect(res).toBeNull();
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return null when request url is already the remote origin', async () => {
|
||||
const manager = new BackendProxyProtocolManager();
|
||||
const session = { protocol: mockProtocol } as any;
|
||||
|
||||
const fetchMock = vi.fn();
|
||||
vi.stubGlobal('fetch', fetchMock as any);
|
||||
|
||||
manager.registerWithRemoteBaseUrl(session, {
|
||||
getAccessToken: async () => null,
|
||||
getRemoteBaseUrl: async () => 'https://remote.example.com',
|
||||
scheme: 'lobe-backend',
|
||||
});
|
||||
|
||||
const handler = protocolHandlerRef.current;
|
||||
const res = await handler({
|
||||
method: 'GET',
|
||||
url: 'https://remote.example.com/trpc/hello?x=1',
|
||||
} as any);
|
||||
|
||||
expect(res).toBeNull();
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return null when rewrite fails (invalid remote base url)', async () => {
|
||||
const manager = new BackendProxyProtocolManager();
|
||||
const session = { protocol: mockProtocol } as any;
|
||||
|
||||
const fetchMock = vi.fn();
|
||||
vi.stubGlobal('fetch', fetchMock as any);
|
||||
|
||||
manager.registerWithRemoteBaseUrl(session, {
|
||||
getAccessToken: async () => null,
|
||||
getRemoteBaseUrl: async () => 'not-a-url',
|
||||
scheme: 'lobe-backend',
|
||||
});
|
||||
|
||||
const handler = protocolHandlerRef.current;
|
||||
const res = await handler({ method: 'GET', url: 'lobe-backend://app/trpc' } as any);
|
||||
|
||||
expect(res).toBeNull();
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should respond with 502 when upstream fetch throws', async () => {
|
||||
const manager = new BackendProxyProtocolManager();
|
||||
const session = { protocol: mockProtocol } as any;
|
||||
|
||||
const fetchMock = vi.fn(async () => {
|
||||
throw new Error('network down');
|
||||
});
|
||||
vi.stubGlobal('fetch', fetchMock as any);
|
||||
|
||||
manager.registerWithRemoteBaseUrl(session, {
|
||||
getAccessToken: async () => null,
|
||||
getRemoteBaseUrl: async () => 'https://remote.example.com',
|
||||
scheme: 'lobe-backend',
|
||||
});
|
||||
|
||||
const handler = protocolHandlerRef.current;
|
||||
const response = await handler({
|
||||
headers: new Headers(),
|
||||
method: 'GET',
|
||||
url: 'lobe-backend://app/trpc/hello',
|
||||
} as any);
|
||||
|
||||
expect(response.status).toBe(502);
|
||||
expect(await response.text()).toContain('Upstream fetch failed');
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,169 @@
|
|||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { RendererProtocolManager } from '../RendererProtocolManager';
|
||||
|
||||
const { mockApp, mockPathExistsSync, mockProtocol, mockReadFile, mockStat, protocolHandlerRef } =
|
||||
vi.hoisted(() => {
|
||||
const protocolHandlerRef = { current: null as any };
|
||||
|
||||
return {
|
||||
mockApp: {
|
||||
isReady: vi.fn().mockReturnValue(true),
|
||||
whenReady: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
mockPathExistsSync: vi.fn().mockReturnValue(true),
|
||||
mockProtocol: {
|
||||
handle: vi.fn((_scheme: string, handler: any) => {
|
||||
protocolHandlerRef.current = handler;
|
||||
}),
|
||||
},
|
||||
mockReadFile: vi.fn(),
|
||||
mockStat: vi.fn(),
|
||||
protocolHandlerRef,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('electron', () => ({
|
||||
app: mockApp,
|
||||
protocol: mockProtocol,
|
||||
}));
|
||||
|
||||
vi.mock('fs-extra', () => ({
|
||||
pathExistsSync: mockPathExistsSync,
|
||||
}));
|
||||
|
||||
vi.mock('node:fs/promises', () => ({
|
||||
readFile: mockReadFile,
|
||||
stat: mockStat,
|
||||
}));
|
||||
|
||||
vi.mock('@/utils/logger', () => ({
|
||||
createLogger: () => ({
|
||||
debug: vi.fn(),
|
||||
error: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('RendererProtocolManager', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
protocolHandlerRef.current = null;
|
||||
mockApp.isReady.mockReturnValue(true);
|
||||
mockPathExistsSync.mockReturnValue(true);
|
||||
mockStat.mockImplementation(async () => ({ size: 1024 }));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
protocolHandlerRef.current = null;
|
||||
});
|
||||
|
||||
it('should fall back to entry HTML when resolve returns 404.html for non-asset routes', async () => {
|
||||
const resolveRendererFilePath = vi.fn(async (url: URL) => {
|
||||
if (url.pathname === '/missing') return '/export/404.html';
|
||||
if (url.pathname === '/') return '/export/index.html';
|
||||
return null;
|
||||
});
|
||||
mockReadFile.mockImplementation(async (path: string) => Buffer.from(`content:${path}`));
|
||||
|
||||
const manager = new RendererProtocolManager({
|
||||
nextExportDir: '/export',
|
||||
resolveRendererFilePath,
|
||||
});
|
||||
|
||||
manager.registerHandler();
|
||||
expect(mockProtocol.handle).toHaveBeenCalled();
|
||||
const handler = protocolHandlerRef.current;
|
||||
|
||||
const response = await handler({
|
||||
headers: new Headers(),
|
||||
method: 'GET',
|
||||
url: 'app://next/missing',
|
||||
} as any);
|
||||
const body = await response.text();
|
||||
|
||||
expect(resolveRendererFilePath).toHaveBeenCalledTimes(2);
|
||||
expect(resolveRendererFilePath.mock.calls[0][0].pathname).toBe('/missing');
|
||||
expect(resolveRendererFilePath.mock.calls[1][0].pathname).toBe('/');
|
||||
|
||||
expect(mockReadFile).toHaveBeenCalledWith('/export/index.html');
|
||||
expect(body).toContain('/export/index.html');
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it('should serve 404.html when explicitly requested', async () => {
|
||||
const resolveRendererFilePath = vi.fn(async (url: URL) => {
|
||||
if (url.pathname === '/404.html') return '/export/404.html';
|
||||
if (url.pathname === '/') return '/export/index.html';
|
||||
return null;
|
||||
});
|
||||
mockReadFile.mockImplementation(async (path: string) => Buffer.from(`content:${path}`));
|
||||
|
||||
const manager = new RendererProtocolManager({
|
||||
nextExportDir: '/export',
|
||||
resolveRendererFilePath,
|
||||
});
|
||||
|
||||
manager.registerHandler();
|
||||
const handler = protocolHandlerRef.current;
|
||||
|
||||
const response = await handler({
|
||||
headers: new Headers(),
|
||||
method: 'GET',
|
||||
url: 'app://next/404.html',
|
||||
} as any);
|
||||
|
||||
expect(resolveRendererFilePath).toHaveBeenCalledTimes(1);
|
||||
expect(mockReadFile).toHaveBeenCalledWith('/export/404.html');
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it('should return 404 for missing asset requests without fallback', async () => {
|
||||
const resolveRendererFilePath = vi.fn(async (_url: URL) => null);
|
||||
|
||||
const manager = new RendererProtocolManager({
|
||||
nextExportDir: '/export',
|
||||
resolveRendererFilePath,
|
||||
});
|
||||
|
||||
manager.registerHandler();
|
||||
const handler = protocolHandlerRef.current;
|
||||
|
||||
const response = await handler({ url: 'app://next/logo.png' } as any);
|
||||
|
||||
expect(resolveRendererFilePath).toHaveBeenCalledTimes(1);
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
it('should support Range requests for media assets', async () => {
|
||||
const resolveRendererFilePath = vi.fn(async (_url: URL) => '/export/intro-video.mp4');
|
||||
const payload = Buffer.from('0123456789');
|
||||
|
||||
mockStat.mockImplementation(async () => ({ size: payload.length }));
|
||||
mockReadFile.mockImplementation(async () => payload);
|
||||
|
||||
const manager = new RendererProtocolManager({
|
||||
nextExportDir: '/export',
|
||||
resolveRendererFilePath,
|
||||
});
|
||||
|
||||
manager.registerHandler();
|
||||
const handler = protocolHandlerRef.current;
|
||||
|
||||
const response = await handler({
|
||||
headers: new Headers({ Range: 'bytes=0-1' }),
|
||||
method: 'GET',
|
||||
url: 'app://next/_next/static/media/intro-video.mp4',
|
||||
} as any);
|
||||
|
||||
expect(response.status).toBe(206);
|
||||
expect(response.headers.get('Accept-Ranges')).toBe('bytes');
|
||||
expect(response.headers.get('Content-Range')).toBe('bytes 0-1/10');
|
||||
expect(response.headers.get('Content-Length')).toBe('2');
|
||||
expect(response.headers.get('Content-Type')).toBe('video/mp4');
|
||||
|
||||
const buf = Buffer.from(await response.arrayBuffer());
|
||||
expect(buf.toString()).toBe('01');
|
||||
});
|
||||
});
|
||||
87
apps/desktop/src/main/env.ts
Normal file
87
apps/desktop/src/main/env.ts
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
import { createEnv } from '@t3-oss/env-core';
|
||||
import { memoize } from 'es-toolkit';
|
||||
import { z } from 'zod';
|
||||
|
||||
const normalizeEnvString = (input: unknown) => {
|
||||
if (typeof input !== 'string') return undefined;
|
||||
const trimmed = input.trim();
|
||||
if (!trimmed) return undefined;
|
||||
return trimmed;
|
||||
};
|
||||
|
||||
const envBoolean = (defaultValue: boolean) =>
|
||||
z
|
||||
.preprocess((input) => {
|
||||
const str = normalizeEnvString(input);
|
||||
if (!str) return undefined;
|
||||
|
||||
switch (str.toLowerCase()) {
|
||||
case '1':
|
||||
case 'true':
|
||||
case 'yes':
|
||||
case 'y':
|
||||
case 'on': {
|
||||
return true;
|
||||
}
|
||||
|
||||
case '0':
|
||||
case 'false':
|
||||
case 'no':
|
||||
case 'n':
|
||||
case 'off': {
|
||||
return false;
|
||||
}
|
||||
|
||||
default: {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
}, z.boolean().optional())
|
||||
.default(defaultValue);
|
||||
|
||||
const envNumber = (defaultValue: number) =>
|
||||
z
|
||||
.preprocess((input) => {
|
||||
const str = normalizeEnvString(input);
|
||||
if (!str) return undefined;
|
||||
const num = Number(str);
|
||||
if (!Number.isFinite(num)) return undefined;
|
||||
return num;
|
||||
}, z.number().optional())
|
||||
.default(defaultValue);
|
||||
|
||||
/**
|
||||
* Desktop (Electron main process) runtime env access.
|
||||
*
|
||||
* Important:
|
||||
* - Keep schemas tolerant (optional + defaults) to avoid throwing in tests/dev.
|
||||
* - Prefer reading env at call-time (factory) so tests can mutate process.env safely.
|
||||
*/
|
||||
export const getDesktopEnv = memoize(() =>
|
||||
createEnv({
|
||||
server: {
|
||||
DEBUG_VERBOSE: envBoolean(false),
|
||||
|
||||
// keep optional to preserve existing behavior:
|
||||
// - unset NODE_ENV should behave like "not production" in logger runtime paths
|
||||
NODE_ENV: z.enum(['development', 'production', 'test']).optional(),
|
||||
|
||||
// escape hatch: allow testing static renderer in dev via env
|
||||
DESKTOP_RENDERER_STATIC: envBoolean(false),
|
||||
|
||||
// updater
|
||||
UPDATE_CHANNEL: z.string().optional(),
|
||||
|
||||
// mcp client
|
||||
MCP_TOOL_TIMEOUT: envNumber(60_000),
|
||||
|
||||
// cloud server url (can be overridden for selfhost/dev)
|
||||
OFFICIAL_CLOUD_SERVER: z.string().optional().default('https://lobechat.com'),
|
||||
},
|
||||
clientPrefix: 'PUBLIC_',
|
||||
client: {},
|
||||
runtimeEnv: process.env,
|
||||
emptyStringAsUndefined: true,
|
||||
isServer: true,
|
||||
}),
|
||||
);
|
||||
129
apps/desktop/src/main/libs/mcp/client.ts
Normal file
129
apps/desktop/src/main/libs/mcp/client.ts
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import {
|
||||
StdioClientTransport,
|
||||
getDefaultEnvironment,
|
||||
} from '@modelcontextprotocol/sdk/client/stdio.js';
|
||||
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
|
||||
import type { Transport } from '@modelcontextprotocol/sdk/shared/transport.js';
|
||||
import type { Progress } from '@modelcontextprotocol/sdk/types.js';
|
||||
|
||||
import { getDesktopEnv } from '@/env';
|
||||
|
||||
import type { MCPClientParams, McpPrompt, McpResource, McpTool, ToolCallResult } from './types';
|
||||
|
||||
export class MCPClient {
|
||||
private readonly mcp: Client;
|
||||
|
||||
private transport: Transport;
|
||||
|
||||
constructor(params: MCPClientParams) {
|
||||
this.mcp = new Client({ name: 'lobehub-desktop-mcp-client', version: '1.0.0' });
|
||||
|
||||
switch (params.type) {
|
||||
case 'http': {
|
||||
const headers: Record<string, string> = { ...params.headers };
|
||||
|
||||
if (params.auth) {
|
||||
if (params.auth.type === 'bearer' && params.auth.token) {
|
||||
headers['Authorization'] = `Bearer ${params.auth.token}`;
|
||||
}
|
||||
|
||||
if (params.auth.type === 'oauth2' && params.auth.accessToken) {
|
||||
headers['Authorization'] = `Bearer ${params.auth.accessToken}`;
|
||||
}
|
||||
}
|
||||
|
||||
this.transport = new StreamableHTTPClientTransport(new URL(params.url), {
|
||||
requestInit: { headers },
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'stdio': {
|
||||
this.transport = new StdioClientTransport({
|
||||
args: params.args,
|
||||
command: params.command,
|
||||
env: {
|
||||
...getDefaultEnvironment(),
|
||||
...params.env,
|
||||
},
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
// Exhaustive check
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const _never: never = params;
|
||||
throw new Error(`Unsupported MCP connection type: ${(params as any).type}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private isMethodNotFoundError(error: unknown) {
|
||||
const err = error as any;
|
||||
if (!err) return false;
|
||||
if (err.code === -32601) return true;
|
||||
if (typeof err.message === 'string' && err.message.includes('Method not found')) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
async initialize(options: { onProgress?: (progress: Progress) => void } = {}) {
|
||||
await this.mcp.connect(this.transport, { onprogress: options.onProgress });
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
if (typeof (this.mcp as any).disconnect === 'function') {
|
||||
await (this.mcp as any).disconnect();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.transport && typeof (this.transport as any).close === 'function') {
|
||||
(this.transport as any).close();
|
||||
}
|
||||
}
|
||||
|
||||
async listTools() {
|
||||
const { tools } = await this.mcp.listTools();
|
||||
return (tools || []) as McpTool[];
|
||||
}
|
||||
|
||||
async listResources() {
|
||||
const { resources } = await this.mcp.listResources();
|
||||
return (resources || []) as McpResource[];
|
||||
}
|
||||
|
||||
async listPrompts() {
|
||||
const { prompts } = await this.mcp.listPrompts();
|
||||
return (prompts || []) as McpPrompt[];
|
||||
}
|
||||
|
||||
async listManifests() {
|
||||
const [tools, prompts, resources] = await Promise.all([
|
||||
this.listTools(),
|
||||
this.listPrompts().catch((error) => {
|
||||
if (this.isMethodNotFoundError(error)) return [] as McpPrompt[];
|
||||
throw error;
|
||||
}),
|
||||
this.listResources().catch((error) => {
|
||||
if (this.isMethodNotFoundError(error)) return [] as McpResource[];
|
||||
throw error;
|
||||
}),
|
||||
]);
|
||||
|
||||
return {
|
||||
prompts: prompts.length === 0 ? undefined : prompts,
|
||||
resources: resources.length === 0 ? undefined : resources,
|
||||
title: this.mcp.getServerVersion()?.title,
|
||||
tools: tools.length === 0 ? undefined : tools,
|
||||
version: this.mcp.getServerVersion()?.version?.replace('v', ''),
|
||||
};
|
||||
}
|
||||
|
||||
async callTool(toolName: string, args: any): Promise<ToolCallResult> {
|
||||
const result = await this.mcp.callTool({ arguments: args, name: toolName }, undefined, {
|
||||
timeout: getDesktopEnv().MCP_TOOL_TIMEOUT,
|
||||
});
|
||||
return result as ToolCallResult;
|
||||
}
|
||||
}
|
||||
120
apps/desktop/src/main/libs/mcp/types.ts
Normal file
120
apps/desktop/src/main/libs/mcp/types.ts
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
export interface McpTool {
|
||||
description: string;
|
||||
inputSchema: {
|
||||
[k: string]: unknown;
|
||||
properties?: unknown | null;
|
||||
type: 'object';
|
||||
};
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface McpResource {
|
||||
description?: string;
|
||||
mimeType?: string;
|
||||
name: string;
|
||||
uri: string;
|
||||
}
|
||||
|
||||
export interface McpPromptArgument {
|
||||
description?: string;
|
||||
name: string;
|
||||
required?: boolean;
|
||||
}
|
||||
|
||||
export interface McpPrompt {
|
||||
arguments?: McpPromptArgument[];
|
||||
description?: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface TextContent {
|
||||
_meta?: any;
|
||||
text: string;
|
||||
type: 'text';
|
||||
}
|
||||
|
||||
export interface ImageContent {
|
||||
_meta?: any;
|
||||
/**
|
||||
* Usually base64 data from MCP server (without data: prefix)
|
||||
*/
|
||||
data: string;
|
||||
mimeType: string;
|
||||
type: 'image';
|
||||
}
|
||||
|
||||
export interface AudioContent {
|
||||
_meta?: any;
|
||||
/**
|
||||
* Usually base64 data from MCP server (without data: prefix)
|
||||
*/
|
||||
data: string;
|
||||
mimeType: string;
|
||||
type: 'audio';
|
||||
}
|
||||
|
||||
export interface ResourceContent {
|
||||
_meta?: any;
|
||||
resource: {
|
||||
_meta?: any;
|
||||
blob?: string;
|
||||
mimeType?: string;
|
||||
text?: string;
|
||||
uri: string;
|
||||
};
|
||||
type: 'resource';
|
||||
}
|
||||
|
||||
export interface ResourceLinkContent {
|
||||
_meta?: any;
|
||||
description?: string;
|
||||
icons?: Array<{
|
||||
mimeType?: string;
|
||||
sizes?: string[];
|
||||
src: string;
|
||||
}>;
|
||||
name: string;
|
||||
title?: string;
|
||||
type: 'resource_link';
|
||||
uri: string;
|
||||
}
|
||||
|
||||
export type ToolCallContent =
|
||||
| TextContent
|
||||
| ImageContent
|
||||
| AudioContent
|
||||
| ResourceContent
|
||||
| ResourceLinkContent;
|
||||
|
||||
export interface ToolCallResult {
|
||||
content: ToolCallContent[];
|
||||
isError?: boolean;
|
||||
structuredContent?: any;
|
||||
}
|
||||
|
||||
export interface AuthConfig {
|
||||
accessToken?: string;
|
||||
token?: string;
|
||||
type: 'none' | 'bearer' | 'oauth2';
|
||||
}
|
||||
|
||||
export interface HttpMCPClientParams {
|
||||
auth?: AuthConfig;
|
||||
headers?: Record<string, string>;
|
||||
name: string;
|
||||
type: 'http';
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface StdioMCPClientParams {
|
||||
args: string[];
|
||||
command: string;
|
||||
env?: Record<string, string>;
|
||||
name: string;
|
||||
type: 'stdio';
|
||||
}
|
||||
|
||||
export type MCPClientParams = HttpMCPClientParams | StdioMCPClientParams;
|
||||
|
||||
|
||||
|
||||
|
|
@ -89,6 +89,7 @@ const createMockApp = () => {
|
|||
},
|
||||
browserManager: {
|
||||
getMainWindow: vi.fn(() => ({
|
||||
broadcast: vi.fn(),
|
||||
loadUrl: vi.fn(),
|
||||
show: vi.fn(),
|
||||
})),
|
||||
|
|
|
|||
|
|
@ -83,8 +83,8 @@ export class MacOSMenu extends BaseMenuPlatform implements IMenuPlatform {
|
|||
accelerator: 'Command+,',
|
||||
click: async () => {
|
||||
const mainWindow = this.app.browserManager.getMainWindow();
|
||||
await mainWindow.loadUrl('/settings');
|
||||
mainWindow.show();
|
||||
mainWindow.broadcast('navigate', { path: '/settings' });
|
||||
},
|
||||
label: t('macOS.preferences'),
|
||||
},
|
||||
|
|
@ -341,8 +341,8 @@ export class MacOSMenu extends BaseMenuPlatform implements IMenuPlatform {
|
|||
{
|
||||
click: async () => {
|
||||
const mainWindow = this.app.browserManager.getMainWindow();
|
||||
await mainWindow.loadUrl('/settings');
|
||||
mainWindow.show();
|
||||
mainWindow.broadcast('navigate', { path: '/settings' });
|
||||
},
|
||||
label: t('file.preferences'),
|
||||
},
|
||||
|
|
|
|||
|
|
@ -24,6 +24,17 @@ describe.skipIf(process.platform !== 'darwin')('MacOSSearchServiceImpl Integrati
|
|||
return false;
|
||||
};
|
||||
|
||||
const ensureResultsOrSkipAssertions = (results: unknown[], hint: string) => {
|
||||
if (results.length > 0) return true;
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(
|
||||
`⚠️ Spotlight returned 0 results for "${hint}". This usually means indexing is incomplete/disabled. Skipping strict assertions.`,
|
||||
);
|
||||
// Keep a minimal assertion so we still validate the call didn't throw.
|
||||
expect(Array.isArray(results)).toBe(true);
|
||||
return false;
|
||||
};
|
||||
|
||||
describe('checkSearchServiceStatus', () => {
|
||||
it('should verify Spotlight is available on macOS', async () => {
|
||||
const isAvailable = await searchService.checkSearchServiceStatus();
|
||||
|
|
@ -40,7 +51,7 @@ describe.skipIf(process.platform !== 'darwin')('MacOSSearchServiceImpl Integrati
|
|||
onlyIn: repoRoot,
|
||||
});
|
||||
|
||||
if (!ensureResults(results, 'package.json search')) return;
|
||||
if (!ensureResultsOrSkipAssertions(results, 'package.json')) return;
|
||||
|
||||
// Should find at least one package.json
|
||||
const packageJson = results.find((r) => r.name === 'package.json');
|
||||
|
|
@ -55,7 +66,7 @@ describe.skipIf(process.platform !== 'darwin')('MacOSSearchServiceImpl Integrati
|
|||
limit: 10,
|
||||
onlyIn: repoRoot,
|
||||
});
|
||||
if (!ensureResults(results, 'README search')) return;
|
||||
if (!ensureResultsOrSkipAssertions(results, 'README')) return;
|
||||
|
||||
// Should contain markdown files
|
||||
const mdFile = results.find((r) => r.type === 'md');
|
||||
|
|
@ -70,7 +81,7 @@ describe.skipIf(process.platform !== 'darwin')('MacOSSearchServiceImpl Integrati
|
|||
onlyIn: repoRoot,
|
||||
});
|
||||
|
||||
if (!ensureResults(results, 'TypeScript file search')) return;
|
||||
if (!ensureResultsOrSkipAssertions(results, 'macOS')) return;
|
||||
|
||||
// Should find the macOS.ts implementation file
|
||||
const macOSFile = results.find((r) => r.name.includes('macOS') && r.type === 'ts');
|
||||
|
|
@ -112,7 +123,7 @@ describe.skipIf(process.platform !== 'darwin')('MacOSSearchServiceImpl Integrati
|
|||
onlyIn: repoRoot,
|
||||
});
|
||||
|
||||
if (!ensureResults(results, 'test file search')) return;
|
||||
if (!ensureResultsOrSkipAssertions(results, 'test.ts')) return;
|
||||
|
||||
// Should find test files (can be in __tests__ directory or co-located with source files)
|
||||
const testFile = results.find((r) => r.name.endsWith('.test.ts'));
|
||||
|
|
@ -230,7 +241,7 @@ describe.skipIf(process.platform !== 'darwin')('MacOSSearchServiceImpl Integrati
|
|||
onlyIn: repoRoot,
|
||||
});
|
||||
|
||||
if (!ensureResults(results, 'file metadata read')) return;
|
||||
if (!ensureResultsOrSkipAssertions(results, 'package.json (metadata)')) return;
|
||||
|
||||
const file = results[0];
|
||||
|
||||
|
|
@ -288,7 +299,7 @@ describe.skipIf(process.platform !== 'darwin')('MacOSSearchServiceImpl Integrati
|
|||
onlyIn: repoRoot,
|
||||
});
|
||||
|
||||
if (!ensureResults(results, 'fuzzy search accuracy')) return;
|
||||
if (!ensureResultsOrSkipAssertions(results, 'LocalFile')) return;
|
||||
|
||||
// Should find LocalFileCtr.ts or similar files
|
||||
const found = results.some(
|
||||
|
|
@ -328,8 +339,8 @@ describe.skipIf(process.platform !== 'darwin')('MacOSSearchServiceImpl Integrati
|
|||
});
|
||||
|
||||
// Both searches should find similar files
|
||||
if (!ensureResults(lowerResults, 'case-insensitive search (lower)')) return;
|
||||
if (!ensureResults(upperResults, 'case-insensitive search (upper)')) return;
|
||||
if (!ensureResultsOrSkipAssertions(lowerResults, 'readme')) return;
|
||||
if (!ensureResultsOrSkipAssertions(upperResults, 'README (case-insensitive)')) return;
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import { isDev } from '@/const/env';
|
||||
import { getDesktopEnv } from '@/env';
|
||||
|
||||
// 更新频道(stable, beta, alpha 等)
|
||||
export const UPDATE_CHANNEL = process.env.UPDATE_CHANNEL;
|
||||
export const UPDATE_CHANNEL = getDesktopEnv().UPDATE_CHANNEL;
|
||||
|
||||
export const updaterConfig = {
|
||||
// 应用更新配置
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
import debug from 'debug';
|
||||
import electronLog from 'electron-log';
|
||||
|
||||
import { getDesktopEnv } from '@/env';
|
||||
|
||||
// 配置 electron-log
|
||||
electronLog.transports.file.level = 'info'; // 生产环境记录 info 及以上级别
|
||||
electronLog.transports.console.level =
|
||||
process.env.NODE_ENV === 'development'
|
||||
getDesktopEnv().NODE_ENV === 'development'
|
||||
? 'debug' // 开发环境显示更多日志
|
||||
: 'warn'; // 生产环境只显示警告和错误
|
||||
|
||||
|
|
@ -17,14 +19,14 @@ export const createLogger = (namespace: string) => {
|
|||
debugLogger(message, ...args);
|
||||
},
|
||||
error: (message, ...args) => {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (getDesktopEnv().NODE_ENV === 'production') {
|
||||
electronLog.error(message, ...args);
|
||||
} else {
|
||||
console.error(message, ...args);
|
||||
}
|
||||
},
|
||||
info: (message, ...args) => {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (getDesktopEnv().NODE_ENV === 'production') {
|
||||
electronLog.info(`[${namespace}]`, message, ...args);
|
||||
}
|
||||
|
||||
|
|
@ -32,12 +34,12 @@ export const createLogger = (namespace: string) => {
|
|||
},
|
||||
verbose: (message, ...args) => {
|
||||
electronLog.verbose(message, ...args);
|
||||
if (process.env.DEBUG_VERBOSE) {
|
||||
if (getDesktopEnv().DEBUG_VERBOSE) {
|
||||
debugLogger(`VERBOSE: ${message}`, ...args);
|
||||
}
|
||||
},
|
||||
warn: (message, ...args) => {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (getDesktopEnv().NODE_ENV === 'production') {
|
||||
electronLog.warn(message, ...args);
|
||||
}
|
||||
debugLogger(`WARN: ${message}`, ...args);
|
||||
|
|
|
|||
26
apps/desktop/src/main/utils/mime.ts
Normal file
26
apps/desktop/src/main/utils/mime.ts
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import { extname } from 'node:path';
|
||||
|
||||
export const getExportMimeType = (filePath: string) => {
|
||||
const ext = extname(filePath).toLowerCase();
|
||||
|
||||
const map: Record<string, string> = {
|
||||
'.css': 'text/css; charset=utf-8',
|
||||
'.gif': 'image/gif',
|
||||
'.html': 'text/html; charset=utf-8',
|
||||
'.ico': 'image/x-icon',
|
||||
'.jpeg': 'image/jpeg',
|
||||
'.jpg': 'image/jpeg',
|
||||
'.js': 'application/javascript; charset=utf-8',
|
||||
'.json': 'application/json; charset=utf-8',
|
||||
'.map': 'application/json; charset=utf-8',
|
||||
'.mp4': 'video/mp4',
|
||||
'.png': 'image/png',
|
||||
'.svg': 'image/svg+xml; charset=utf-8',
|
||||
'.txt': 'text/plain; charset=utf-8',
|
||||
'.webp': 'image/webp',
|
||||
'.woff': 'font/woff',
|
||||
'.woff2': 'font/woff2',
|
||||
};
|
||||
|
||||
return map[ext];
|
||||
};
|
||||
|
|
@ -1,425 +0,0 @@
|
|||
// copy from https://github.com/kirill-konshin/next-electron-rsc
|
||||
import { serialize as serializeCookie } from 'cookie';
|
||||
import { type Protocol, type Session } from 'electron';
|
||||
// @ts-ignore
|
||||
import type { NextConfig } from 'next';
|
||||
// @ts-ignore
|
||||
import type NextNodeServer from 'next/dist/server/next-server';
|
||||
import assert from 'node:assert';
|
||||
import { IncomingMessage, ServerResponse } from 'node:http';
|
||||
import { Socket } from 'node:net';
|
||||
import path from 'node:path';
|
||||
import { parse } from 'node:url';
|
||||
import resolve from 'resolve';
|
||||
import { parse as parseCookie, splitCookiesString } from 'set-cookie-parser';
|
||||
|
||||
import { LOCAL_STORAGE_URL_PREFIX } from '@/const/dir';
|
||||
import { isDev } from '@/const/env';
|
||||
import { createLogger } from '@/utils/logger';
|
||||
|
||||
// 创建日志记录器
|
||||
const logger = createLogger('utils:next-electron-rsc');
|
||||
|
||||
// 定义自定义处理器类型
|
||||
export type CustomRequestHandler = (request: Request) => Promise<Response | null | undefined>;
|
||||
|
||||
export const createRequest = async ({
|
||||
socket,
|
||||
request,
|
||||
session,
|
||||
}: {
|
||||
request: Request;
|
||||
session: Session;
|
||||
socket: Socket;
|
||||
}): Promise<IncomingMessage> => {
|
||||
const req = new IncomingMessage(socket);
|
||||
|
||||
const url = new URL(request.url);
|
||||
|
||||
// Normal Next.js URL does not contain schema and host/port, otherwise endless loops due to butchering of schema by normalizeRepeatedSlashes in resolve-routes
|
||||
req.url = url.pathname + (url.search || '');
|
||||
req.method = request.method;
|
||||
|
||||
request.headers.forEach((value, key) => {
|
||||
req.headers[key] = value;
|
||||
});
|
||||
|
||||
try {
|
||||
// @see https://github.com/electron/electron/issues/39525#issue-1852825052
|
||||
const cookies = await session.cookies.get({
|
||||
url: request.url,
|
||||
// domain: url.hostname,
|
||||
// path: url.pathname,
|
||||
// `secure: true` Cookies should not be sent via http
|
||||
// secure: url.protocol === 'http:' ? false : undefined,
|
||||
// theoretically not possible to implement sameSite because we don't know the url
|
||||
// of the website that is requesting the resource
|
||||
});
|
||||
|
||||
if (cookies.length) {
|
||||
const cookiesHeader = [];
|
||||
|
||||
for (const cookie of cookies) {
|
||||
const { name, value } = cookie;
|
||||
cookiesHeader.push(serializeCookie(name, value));
|
||||
}
|
||||
|
||||
req.headers.cookie = cookiesHeader.join('; ');
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error('Failed to parse cookies', { cause: e });
|
||||
}
|
||||
|
||||
if (request.body) {
|
||||
req.push(Buffer.from(await request.arrayBuffer()));
|
||||
}
|
||||
|
||||
req.push(null);
|
||||
req.complete = true;
|
||||
|
||||
return req;
|
||||
};
|
||||
|
||||
export class ReadableServerResponse extends ServerResponse {
|
||||
private responsePromise: Promise<Response>;
|
||||
|
||||
constructor(req: IncomingMessage) {
|
||||
super(req);
|
||||
|
||||
this.responsePromise = new Promise<Response>((resolve) => {
|
||||
const readableStream = new ReadableStream({
|
||||
cancel: () => {},
|
||||
pull: () => {
|
||||
this.emit('drain');
|
||||
},
|
||||
start: (controller) => {
|
||||
let onData;
|
||||
|
||||
this.on(
|
||||
'data',
|
||||
(onData = (chunk) => {
|
||||
controller.enqueue(chunk);
|
||||
}),
|
||||
);
|
||||
|
||||
this.once('end', (chunk) => {
|
||||
controller.enqueue(chunk);
|
||||
controller.close();
|
||||
this.off('data', onData);
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
this.once('writeHead', (statusCode) => {
|
||||
resolve(
|
||||
new Response(readableStream, {
|
||||
headers: this.getHeaders() as any,
|
||||
status: statusCode,
|
||||
statusText: this.statusMessage,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
write(chunk: any, ...args): boolean {
|
||||
this.emit('data', chunk);
|
||||
return super.write(chunk, ...args);
|
||||
}
|
||||
|
||||
end(chunk: any, ...args): this {
|
||||
this.emit('end', chunk);
|
||||
return super.end(chunk, ...args);
|
||||
}
|
||||
|
||||
writeHead(statusCode: number, ...args: any): this {
|
||||
this.emit('writeHead', statusCode);
|
||||
return super.writeHead(statusCode, ...args);
|
||||
}
|
||||
|
||||
getResponse() {
|
||||
return this.responsePromise;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* https://nextjs.org/docs/pages/building-your-application/configuring/custom-server
|
||||
* https://github.com/vercel/next.js/pull/68167/files#diff-d0d8b7158bcb066cdbbeb548a29909fe8dc4e98f682a6d88654b1684e523edac
|
||||
* https://github.com/vercel/next.js/blob/canary/examples/custom-server/server.ts
|
||||
*
|
||||
* @param {string} standaloneDir
|
||||
* @param {string} localhostUrl
|
||||
* @param {import('electron').Protocol} protocol
|
||||
* @param {boolean} debug
|
||||
*/
|
||||
export function createHandler({
|
||||
standaloneDir,
|
||||
localhostUrl,
|
||||
protocol,
|
||||
debug = false,
|
||||
}: {
|
||||
debug?: boolean;
|
||||
localhostUrl: string;
|
||||
protocol: Protocol;
|
||||
standaloneDir: string;
|
||||
}) {
|
||||
assert(standaloneDir, 'standaloneDir is required');
|
||||
assert(protocol, 'protocol is required');
|
||||
|
||||
// 存储自定义请求处理器的数组
|
||||
const customHandlers: CustomRequestHandler[] = [];
|
||||
|
||||
// 注册自定义请求处理器的方法 - 在开发和生产环境中都提供此功能
|
||||
function registerCustomHandler(handler: CustomRequestHandler) {
|
||||
logger.debug('Registering custom request handler');
|
||||
customHandlers.push(handler);
|
||||
return () => {
|
||||
const index = customHandlers.indexOf(handler);
|
||||
if (index !== -1) {
|
||||
logger.debug('Unregistering custom request handler');
|
||||
customHandlers.splice(index, 1);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let registerProtocolHandle = false;
|
||||
let interceptorCount = 0; // 追踪活跃的拦截器数量
|
||||
|
||||
protocol.registerSchemesAsPrivileged([
|
||||
{
|
||||
privileges: {
|
||||
secure: true,
|
||||
standard: true,
|
||||
supportFetchAPI: true,
|
||||
},
|
||||
scheme: 'http',
|
||||
},
|
||||
]);
|
||||
logger.debug('Registered HTTP scheme as privileged');
|
||||
|
||||
// 初始化 Next.js 应用(仅在生产环境中使用)
|
||||
let app: NextNodeServer | null = null;
|
||||
let handler: any = null;
|
||||
let preparePromise: Promise<void> | null = null;
|
||||
|
||||
if (!isDev) {
|
||||
logger.info('Initializing Next.js app for production');
|
||||
|
||||
// https://github.com/lobehub/lobe-chat/pull/9851
|
||||
// @ts-ignore
|
||||
// noinspection JSConstantReassignment
|
||||
process.env.NODE_ENV = 'production';
|
||||
const next = require(resolve.sync('next', { basedir: standaloneDir }));
|
||||
|
||||
// @see https://github.com/vercel/next.js/issues/64031#issuecomment-2078708340
|
||||
const config = require(path.join(standaloneDir, '.next', 'required-server-files.json'))
|
||||
.config as NextConfig;
|
||||
process.env.__NEXT_PRIVATE_STANDALONE_CONFIG = JSON.stringify(config);
|
||||
|
||||
app = next({ dir: standaloneDir }) as NextNodeServer;
|
||||
|
||||
handler = app.getRequestHandler();
|
||||
preparePromise = app.prepare();
|
||||
} else {
|
||||
logger.debug('Starting in development mode');
|
||||
}
|
||||
|
||||
// 通用的请求处理函数 - 开发和生产环境共用
|
||||
const handleRequest = async (
|
||||
request: Request,
|
||||
session: Session,
|
||||
socket: Socket,
|
||||
): Promise<Response> => {
|
||||
try {
|
||||
// 检查是否是本地文件服务请求,如果是则跳过处理
|
||||
const url = new URL(request.url);
|
||||
if (url.pathname.startsWith(LOCAL_STORAGE_URL_PREFIX + '/')) {
|
||||
if (debug) logger.debug(`Skipping local file service request: ${request.url}`);
|
||||
// 直接使用 fetch 转发请求到本地文件服务
|
||||
return fetch(request);
|
||||
}
|
||||
|
||||
// 先尝试使用自定义处理器处理请求
|
||||
for (const customHandler of customHandlers) {
|
||||
try {
|
||||
const response = await customHandler(request);
|
||||
if (response) {
|
||||
if (debug) logger.debug(`Custom handler processed: ${request.url}`);
|
||||
return response;
|
||||
}
|
||||
} catch (error) {
|
||||
if (debug) logger.error(`Custom handler error: ${error}`);
|
||||
// 继续尝试下一个处理器
|
||||
}
|
||||
}
|
||||
|
||||
// 创建 Node.js 请求对象
|
||||
const req = await createRequest({ request, session, socket });
|
||||
// 创建可读取响应的 Response 对象
|
||||
const res = new ReadableServerResponse(req);
|
||||
|
||||
if (isDev) {
|
||||
// 开发环境:转发请求到开发服务器
|
||||
if (debug) logger.debug(`Forwarding request to dev server: ${request.url}`);
|
||||
|
||||
// 修改 URL 以指向开发服务器
|
||||
const devUrl = new URL(req.url, localhostUrl);
|
||||
|
||||
// 使用 node:http 模块发送请求到开发服务器
|
||||
const http = require('node:http');
|
||||
const devReq = http.request(
|
||||
{
|
||||
headers: req.headers,
|
||||
hostname: devUrl.hostname,
|
||||
method: req.method,
|
||||
path: devUrl.pathname + (devUrl.search || ''),
|
||||
port: devUrl.port,
|
||||
},
|
||||
(devRes) => {
|
||||
// 设置响应状态码和头部
|
||||
res.statusCode = devRes.statusCode;
|
||||
res.statusMessage = devRes.statusMessage;
|
||||
|
||||
// 复制响应头
|
||||
Object.keys(devRes.headers).forEach((key) => {
|
||||
res.setHeader(key, devRes.headers[key]);
|
||||
});
|
||||
|
||||
// 流式传输响应内容
|
||||
devRes.pipe(res);
|
||||
},
|
||||
);
|
||||
|
||||
// 处理错误
|
||||
devReq.on('error', (err) => {
|
||||
if (debug) logger.error(`Error forwarding request: ${err}`);
|
||||
});
|
||||
|
||||
// 传输请求体
|
||||
req.pipe(devReq);
|
||||
} else {
|
||||
// 生产环境:使用 Next.js 处理请求
|
||||
if (debug) logger.debug(`Processing with Next.js handler: ${request.url}`);
|
||||
|
||||
// 确保 Next.js 已准备就绪
|
||||
if (preparePromise) await preparePromise;
|
||||
|
||||
const url = parse(req.url, true);
|
||||
handler(req, res, url);
|
||||
}
|
||||
|
||||
// 获取 Response 对象
|
||||
const response = await res.getResponse();
|
||||
|
||||
// 处理 cookies(两种环境通用处理)
|
||||
try {
|
||||
const cookies = parseCookie(
|
||||
response.headers.getSetCookie().reduce((r, c) => {
|
||||
return [...r, ...splitCookiesString(c)];
|
||||
}, []),
|
||||
);
|
||||
|
||||
for (const cookie of cookies) {
|
||||
let expirationDate: number | undefined;
|
||||
|
||||
if (cookie.expires) {
|
||||
// expires 是 Date 对象,转换为秒级时间戳
|
||||
expirationDate = Math.floor(cookie.expires.getTime() / 1000);
|
||||
} else if (cookie.maxAge) {
|
||||
// maxAge 是秒数,计算过期时间戳
|
||||
expirationDate = Math.floor(Date.now() / 1000) + cookie.maxAge;
|
||||
}
|
||||
|
||||
// 如果都没有,则为 session cookie,不设置 expirationDate
|
||||
|
||||
// 检查是否已过期
|
||||
if (expirationDate && expirationDate < Math.floor(Date.now() / 1000)) {
|
||||
await session.cookies.remove(request.url, cookie.name);
|
||||
continue;
|
||||
}
|
||||
|
||||
await session.cookies.set({
|
||||
domain: cookie.domain,
|
||||
expirationDate,
|
||||
httpOnly: cookie.httpOnly,
|
||||
name: cookie.name,
|
||||
path: cookie.path,
|
||||
secure: cookie.secure,
|
||||
url: request.url,
|
||||
value: cookie.value,
|
||||
} as any);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error('Failed to set cookies', e);
|
||||
}
|
||||
|
||||
if (debug) logger.debug(`Request processed: ${request.url}, status: ${response.status}`);
|
||||
return response;
|
||||
} catch (e) {
|
||||
if (debug) logger.error(`Error handling request: ${e}`);
|
||||
return new Response(e.message, { status: 500 });
|
||||
}
|
||||
};
|
||||
|
||||
// 创建拦截器函数
|
||||
const createInterceptor = ({ session }: { session: Session }) => {
|
||||
assert(session, 'Session is required');
|
||||
logger.debug(
|
||||
`Creating interceptor with session in ${isDev ? 'development' : 'production'} mode`,
|
||||
);
|
||||
|
||||
const socket = new Socket();
|
||||
interceptorCount++; // 增加拦截器计数
|
||||
|
||||
const closeSocket = () => socket.end();
|
||||
|
||||
process.on('SIGTERM', () => closeSocket);
|
||||
process.on('SIGINT', () => closeSocket);
|
||||
|
||||
if (!registerProtocolHandle) {
|
||||
logger.debug(
|
||||
`Registering HTTP protocol handler in ${isDev ? 'development' : 'production'} mode`,
|
||||
);
|
||||
protocol.handle('http', async (request) => {
|
||||
if (!isDev) {
|
||||
// 检查是否是本地文件服务请求,如果是则允许通过
|
||||
const isLocalhost = request.url.startsWith(localhostUrl);
|
||||
|
||||
const url = new URL(request.url);
|
||||
const isLocalIP =
|
||||
request.url.startsWith('http://127.0.0.1:') ||
|
||||
request.url.startsWith('http://localhost:');
|
||||
const isLocalFileService = url.pathname.startsWith(LOCAL_STORAGE_URL_PREFIX + '/');
|
||||
|
||||
const valid = isLocalhost || (isLocalIP && isLocalFileService);
|
||||
if (!valid) {
|
||||
throw new Error('External HTTP not supported, use HTTPS');
|
||||
}
|
||||
}
|
||||
|
||||
return handleRequest(request, session, socket);
|
||||
});
|
||||
registerProtocolHandle = true;
|
||||
}
|
||||
|
||||
logger.debug(`Active interceptors count: ${interceptorCount}`);
|
||||
|
||||
return function stopIntercept() {
|
||||
interceptorCount--; // 减少拦截器计数
|
||||
logger.debug(`Stopping interceptor, remaining count: ${interceptorCount}`);
|
||||
|
||||
// 只有当没有活跃的拦截器时才取消注册协议处理器
|
||||
if (registerProtocolHandle && interceptorCount === 0) {
|
||||
logger.debug('Unregistering HTTP protocol handler (no active interceptors)');
|
||||
protocol.unhandle('http');
|
||||
registerProtocolHandle = false;
|
||||
}
|
||||
|
||||
process.off('SIGTERM', () => closeSocket);
|
||||
process.off('SIGINT', () => closeSocket);
|
||||
closeSocket();
|
||||
};
|
||||
};
|
||||
|
||||
return { createInterceptor, registerCustomHandler };
|
||||
}
|
||||
5
apps/desktop/src/main/utils/path.ts
Normal file
5
apps/desktop/src/main/utils/path.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import { pathToFileURL } from 'node:url';
|
||||
|
||||
export const filePathToAppUrl = (filePath: string) => {
|
||||
return `app://lobehub.com${pathToFileURL(filePath).pathname}`;
|
||||
};
|
||||
|
|
@ -51,10 +51,24 @@ describe('setupElectronApi', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should expose lobeEnv with darwinMajorVersion', () => {
|
||||
setupElectronApi();
|
||||
|
||||
const call = mockContextBridgeExposeInMainWorld.mock.calls.find((i) => i[0] === 'lobeEnv');
|
||||
expect(call).toBeTruthy();
|
||||
const exposedEnv = call?.[1] as any;
|
||||
|
||||
expect(Object.prototype.hasOwnProperty.call(exposedEnv, 'darwinMajorVersion')).toBe(true);
|
||||
expect(
|
||||
exposedEnv.darwinMajorVersion === undefined ||
|
||||
typeof exposedEnv.darwinMajorVersion === 'number',
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should expose both APIs in correct order', () => {
|
||||
setupElectronApi();
|
||||
|
||||
expect(mockContextBridgeExposeInMainWorld).toHaveBeenCalledTimes(2);
|
||||
expect(mockContextBridgeExposeInMainWorld).toHaveBeenCalledTimes(3);
|
||||
|
||||
// First call should be for 'electron'
|
||||
expect(mockContextBridgeExposeInMainWorld.mock.calls[0][0]).toBe('electron');
|
||||
|
|
@ -66,6 +80,9 @@ describe('setupElectronApi', () => {
|
|||
invoke: mockInvoke,
|
||||
onStreamInvoke: mockOnStreamInvoke,
|
||||
});
|
||||
|
||||
// Third call should be for 'lobeEnv'
|
||||
expect(mockContextBridgeExposeInMainWorld.mock.calls[2][0]).toBe('lobeEnv');
|
||||
});
|
||||
|
||||
it('should handle errors when exposing electron API fails', () => {
|
||||
|
|
@ -77,8 +94,8 @@ describe('setupElectronApi', () => {
|
|||
setupElectronApi();
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(error);
|
||||
// Should still try to expose electronAPI even if first one fails
|
||||
expect(mockContextBridgeExposeInMainWorld).toHaveBeenCalledTimes(2);
|
||||
// Should still try to expose electronAPI and lobeEnv even if first one fails
|
||||
expect(mockContextBridgeExposeInMainWorld).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should continue execution if exposing electronAPI fails', () => {
|
||||
|
|
@ -136,7 +153,7 @@ describe('setupElectronApi', () => {
|
|||
setupElectronApi();
|
||||
setupElectronApi();
|
||||
|
||||
// Should be called 4 times total (2 per setup call)
|
||||
expect(mockContextBridgeExposeInMainWorld).toHaveBeenCalledTimes(4);
|
||||
// Should be called 6 times total (3 per setup call)
|
||||
expect(mockContextBridgeExposeInMainWorld).toHaveBeenCalledTimes(6);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -19,4 +19,12 @@ export const setupElectronApi = () => {
|
|||
invoke,
|
||||
onStreamInvoke,
|
||||
});
|
||||
|
||||
const os = require('node:os');
|
||||
const osInfo = os.release();
|
||||
const darwinMajorVersion = osInfo.split('.')[0];
|
||||
|
||||
contextBridge.exposeInMainWorld('lobeEnv', {
|
||||
darwinMajorVersion: Number(darwinMajorVersion),
|
||||
});
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import { DispatchInvoke } from '@lobechat/electron-client-ipc';
|
||||
import { ipcRenderer } from 'electron';
|
||||
|
||||
type IpcInvoke = <T = unknown>(event: string, ...data: unknown[]) => Promise<T>;
|
||||
|
||||
/**
|
||||
* Client-side method to invoke electron main process
|
||||
*/
|
||||
export const invoke: DispatchInvoke = async (event, ...data) => ipcRenderer.invoke(event, ...data);
|
||||
export const invoke: IpcInvoke = async (event, ...data) => ipcRenderer.invoke(event, ...data);
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import type { ProxyTRPCRequestParams } from '@lobechat/electron-client-ipc';
|
||||
import type { StreamInvokeRequestParams } from '@lobechat/electron-client-ipc';
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
// Mock electron module
|
||||
|
|
@ -29,7 +29,7 @@ describe('onStreamInvoke', () => {
|
|||
});
|
||||
|
||||
it('should set up stream listeners and send start event', () => {
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
method: 'POST',
|
||||
urlPath: '/trpc/lambda/test.endpoint',
|
||||
|
|
@ -77,7 +77,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: {},
|
||||
method: 'GET',
|
||||
urlPath: '/trpc/test',
|
||||
|
|
@ -105,7 +105,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: {},
|
||||
method: 'GET',
|
||||
urlPath: '/trpc/test',
|
||||
|
|
@ -137,7 +137,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: {},
|
||||
method: 'GET',
|
||||
urlPath: '/trpc/test',
|
||||
|
|
@ -178,7 +178,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: {},
|
||||
method: 'GET',
|
||||
urlPath: '/trpc/test',
|
||||
|
|
@ -220,7 +220,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: {},
|
||||
method: 'GET',
|
||||
urlPath: '/trpc/test',
|
||||
|
|
@ -254,7 +254,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: {},
|
||||
method: 'GET',
|
||||
urlPath: '/trpc/test',
|
||||
|
|
@ -289,7 +289,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
body: JSON.stringify({
|
||||
filters: { active: true },
|
||||
query: 'complex query',
|
||||
|
|
@ -316,7 +316,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: {},
|
||||
method: 'GET',
|
||||
urlPath: '/trpc/test',
|
||||
|
|
@ -346,7 +346,7 @@ describe('onStreamInvoke', () => {
|
|||
onResponse: vi.fn(),
|
||||
};
|
||||
|
||||
const params: ProxyTRPCRequestParams = {
|
||||
const params: StreamInvokeRequestParams = {
|
||||
headers: {},
|
||||
method: 'GET',
|
||||
urlPath: '/trpc/test',
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import type { ProxyTRPCRequestParams } from '@lobechat/electron-client-ipc';
|
||||
import type { StreamInvokeRequestParams } from '@lobechat/electron-client-ipc';
|
||||
import { ipcRenderer } from 'electron';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
|
|
@ -21,7 +21,7 @@ export interface StreamerCallbacks {
|
|||
* @param callbacks The callbacks to handle stream events.
|
||||
*/
|
||||
export const onStreamInvoke = (
|
||||
params: ProxyTRPCRequestParams,
|
||||
params: StreamInvokeRequestParams,
|
||||
callbacks: StreamerCallbacks,
|
||||
): (() => void) => {
|
||||
const requestId = uuid();
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@
|
|||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noEmit": false,
|
||||
"target": "ESNext",
|
||||
"declaration": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"esModuleInterop": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
|
|
@ -13,9 +15,21 @@
|
|||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"paths": {
|
||||
"@/*": ["./src/main/*"],
|
||||
"~common/*": ["./src/common/*"]
|
||||
"@/*": [
|
||||
"./src/main/*"
|
||||
],
|
||||
"~common/*": [
|
||||
"./src/common/*"
|
||||
],
|
||||
"*": [
|
||||
"./*"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": ["src/main/**/*", "src/preload/**/*", "src/common/**/*", "electron-builder.js"]
|
||||
}
|
||||
"include": [
|
||||
"src/main/**/*",
|
||||
"src/preload/**/*",
|
||||
"src/common/**/*",
|
||||
"electron-builder.js"
|
||||
]
|
||||
}
|
||||
134
scripts/electronWorkflow/buildNextApp.mts
Normal file
134
scripts/electronWorkflow/buildNextApp.mts
Normal file
|
|
@ -0,0 +1,134 @@
|
|||
import fs from 'fs-extra';
|
||||
import { execSync } from 'node:child_process';
|
||||
import path from 'node:path';
|
||||
|
||||
import { runPrebuild } from '../prebuild.mjs';
|
||||
import { modifySourceForElectron } from './modifiers/index.mjs';
|
||||
|
||||
const PROJECT_ROOT = process.cwd();
|
||||
const TEMP_DIR = path.join(PROJECT_ROOT, 'tmp', 'desktop-build');
|
||||
|
||||
const foldersToSymlink = [
|
||||
'node_modules',
|
||||
'packages',
|
||||
'public',
|
||||
'locales',
|
||||
'docs',
|
||||
'.cursor',
|
||||
'apps',
|
||||
];
|
||||
|
||||
const foldersToCopy = ['src', 'scripts'];
|
||||
|
||||
const filesToCopy = [
|
||||
'package.json',
|
||||
'tsconfig.json',
|
||||
'next.config.ts',
|
||||
'pnpm-workspace.yaml',
|
||||
'bun.lockb',
|
||||
'.npmrc',
|
||||
'.bunfig.toml',
|
||||
'.eslintrc.js',
|
||||
'.eslintignore',
|
||||
'.prettierrc.cjs',
|
||||
'.prettierignore',
|
||||
'drizzle.config.ts',
|
||||
'postcss.config.js',
|
||||
'tailwind.config.ts',
|
||||
'tailwind.config.js',
|
||||
];
|
||||
|
||||
const build = async () => {
|
||||
console.log('🚀 Starting Electron App Build in Shadow Workspace...');
|
||||
console.log(`📂 Workspace: ${TEMP_DIR}`);
|
||||
|
||||
if (fs.existsSync(TEMP_DIR)) {
|
||||
await fs.remove(TEMP_DIR);
|
||||
}
|
||||
await fs.ensureDir(TEMP_DIR);
|
||||
|
||||
console.log('🔗 Symlinking dependencies and static assets...');
|
||||
for (const folder of foldersToSymlink) {
|
||||
const srcPath = path.join(PROJECT_ROOT, folder);
|
||||
const destPath = path.join(TEMP_DIR, folder);
|
||||
if (fs.existsSync(srcPath)) {
|
||||
await fs.ensureSymlink(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('📋 Copying source code...');
|
||||
for (const folder of foldersToCopy) {
|
||||
const srcPath = path.join(PROJECT_ROOT, folder);
|
||||
const destPath = path.join(TEMP_DIR, folder);
|
||||
if (fs.existsSync(srcPath)) {
|
||||
await fs.copy(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('📄 Copying configuration files...');
|
||||
const allFiles = await fs.readdir(PROJECT_ROOT);
|
||||
const envFiles = allFiles.filter((f) => f.startsWith('.env'));
|
||||
const files = [...filesToCopy, ...envFiles];
|
||||
for (const file of files) {
|
||||
const srcPath = path.join(PROJECT_ROOT, file);
|
||||
const destPath = path.join(TEMP_DIR, file);
|
||||
if (fs.existsSync(srcPath)) {
|
||||
await fs.copy(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('✂️ Pruning desktop-incompatible code...');
|
||||
const relativeTempSrc = path.relative(PROJECT_ROOT, path.join(TEMP_DIR, 'src'));
|
||||
await runPrebuild(relativeTempSrc);
|
||||
|
||||
await modifySourceForElectron(TEMP_DIR);
|
||||
|
||||
console.log('🏗 Running next build in shadow workspace...');
|
||||
try {
|
||||
execSync('next build --webpack', {
|
||||
cwd: TEMP_DIR,
|
||||
env: {
|
||||
...process.env,
|
||||
NODE_OPTIONS: process.env.NODE_OPTIONS || '--max-old-space-size=6144',
|
||||
},
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
console.log('📦 Extracting build artifacts...');
|
||||
const sourceOutDir = path.join(TEMP_DIR, 'out');
|
||||
const targetOutDir = path.join(PROJECT_ROOT, 'out');
|
||||
|
||||
// Clean up target directories
|
||||
if (fs.existsSync(targetOutDir)) {
|
||||
await fs.remove(targetOutDir);
|
||||
}
|
||||
|
||||
if (fs.existsSync(sourceOutDir)) {
|
||||
console.log('📦 Moving "out" directory...');
|
||||
await fs.move(sourceOutDir, targetOutDir);
|
||||
} else {
|
||||
console.warn("⚠️ 'out' directory not found. Using '.next' instead (fallback)?");
|
||||
const sourceNextDir = path.join(TEMP_DIR, '.next');
|
||||
const targetNextDir = path.join(PROJECT_ROOT, '.next');
|
||||
if (fs.existsSync(targetNextDir)) {
|
||||
await fs.remove(targetNextDir);
|
||||
}
|
||||
if (fs.existsSync(sourceNextDir)) {
|
||||
await fs.move(sourceNextDir, targetNextDir);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('✅ Build completed successfully!');
|
||||
} catch (error) {
|
||||
console.error('❌ Build failed.');
|
||||
throw error;
|
||||
} finally {
|
||||
console.log('🧹 Cleaning up workspace...');
|
||||
await fs.remove(TEMP_DIR);
|
||||
}
|
||||
};
|
||||
|
||||
await build().catch((err) => {
|
||||
console.error(err);
|
||||
throw err;
|
||||
});
|
||||
394
scripts/electronWorkflow/modifiers/appCode.mts
Normal file
394
scripts/electronWorkflow/modifiers/appCode.mts
Normal file
|
|
@ -0,0 +1,394 @@
|
|||
import { Lang, parse } from '@ast-grep/napi';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
import { isDirectRun, runStandalone } from './utils.mjs';
|
||||
|
||||
const rewriteFile = async (filePath: string, transformer: (code: string) => string) => {
|
||||
if (!fs.existsSync(filePath)) return;
|
||||
|
||||
const original = await fs.readFile(filePath, 'utf8');
|
||||
const updated = transformer(original);
|
||||
|
||||
if (updated !== original) {
|
||||
await fs.writeFile(filePath, updated);
|
||||
}
|
||||
};
|
||||
|
||||
const desktopOnlyVariantsPage = `import { DynamicLayoutProps } from '@/types/next';
|
||||
|
||||
import DesktopRouter from './router';
|
||||
|
||||
export default async (_props: DynamicLayoutProps) => {
|
||||
return <DesktopRouter />;
|
||||
};
|
||||
`;
|
||||
|
||||
const stripDevPanel = (code: string) => {
|
||||
let result = code.replace(/import DevPanel from ['"]@\/features\/DevPanel['"];\r?\n?/, '');
|
||||
|
||||
result = result.replace(
|
||||
/[\t ]*{process\.env\.NODE_ENV === 'development' && <DevPanel \/>}\s*\r?\n?/,
|
||||
'',
|
||||
);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const removeSecurityTab = (code: string) => {
|
||||
const componentEntryRegex =
|
||||
/[\t ]*\[SettingsTabs\.Security]: dynamic\(\(\) => import\('\.\.\/security'\), {[\s\S]+?}\),\s*\r?\n/;
|
||||
const securityTabRegex = /[\t ]*SettingsTabs\.Security,\s*\r?\n/;
|
||||
|
||||
return code.replace(componentEntryRegex, '').replace(securityTabRegex, '');
|
||||
};
|
||||
|
||||
const removeSpeedInsightsAndAnalytics = (code: string) => {
|
||||
const ast = parse(Lang.Tsx, code);
|
||||
const root = ast.root();
|
||||
const edits: Array<{ start: number; end: number; text: string }> = [];
|
||||
|
||||
// Remove SpeedInsights import
|
||||
const speedInsightsImport = root.find({
|
||||
rule: {
|
||||
pattern: 'import { SpeedInsights } from $SOURCE',
|
||||
},
|
||||
});
|
||||
if (speedInsightsImport) {
|
||||
const range = speedInsightsImport.range();
|
||||
edits.push({ start: range.start.index, end: range.end.index, text: '' });
|
||||
}
|
||||
|
||||
// Remove Analytics import
|
||||
const analyticsImport = root.find({
|
||||
rule: {
|
||||
pattern: 'import Analytics from $SOURCE',
|
||||
},
|
||||
});
|
||||
if (analyticsImport) {
|
||||
const range = analyticsImport.range();
|
||||
edits.push({ start: range.start.index, end: range.end.index, text: '' });
|
||||
}
|
||||
|
||||
// Remove Suspense block containing Analytics and SpeedInsights
|
||||
// Find all Suspense blocks and check which one contains Analytics or SpeedInsights
|
||||
const allSuspenseBlocks = root.findAll({
|
||||
rule: {
|
||||
pattern: '<Suspense fallback={null}>$$$</Suspense>',
|
||||
},
|
||||
});
|
||||
|
||||
for (const suspenseBlock of allSuspenseBlocks) {
|
||||
const hasAnalytics = suspenseBlock.find({
|
||||
rule: {
|
||||
pattern: '<Analytics />',
|
||||
},
|
||||
});
|
||||
|
||||
const hasSpeedInsights = suspenseBlock.find({
|
||||
rule: {
|
||||
pattern: '<SpeedInsights />',
|
||||
},
|
||||
});
|
||||
|
||||
if (hasAnalytics || hasSpeedInsights) {
|
||||
const range = suspenseBlock.range();
|
||||
edits.push({ start: range.start.index, end: range.end.index, text: '' });
|
||||
break; // Only remove the first matching Suspense block
|
||||
}
|
||||
}
|
||||
|
||||
// Remove inVercel variable if it's no longer used
|
||||
const inVercelVar = root.find({
|
||||
rule: {
|
||||
pattern: 'const inVercel = process.env.VERCEL === "1";',
|
||||
},
|
||||
});
|
||||
if (inVercelVar) {
|
||||
// Check if inVercel is still used elsewhere
|
||||
const allInVercelUsages = root.findAll({
|
||||
rule: {
|
||||
regex: 'inVercel',
|
||||
},
|
||||
});
|
||||
// If only the declaration remains, remove it
|
||||
if (allInVercelUsages.length === 1) {
|
||||
const range = inVercelVar.range();
|
||||
edits.push({ start: range.start.index, end: range.end.index, text: '' });
|
||||
}
|
||||
}
|
||||
|
||||
// Apply edits
|
||||
if (edits.length === 0) return code;
|
||||
|
||||
edits.sort((a, b) => b.start - a.start);
|
||||
let result = code;
|
||||
for (const edit of edits) {
|
||||
result = result.slice(0, edit.start) + edit.text + result.slice(edit.end);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const removeClerkLogic = (code: string) => {
|
||||
const ast = parse(Lang.Tsx, code);
|
||||
const root = ast.root();
|
||||
const edits: Array<{ start: number; end: number; text: string }> = [];
|
||||
|
||||
// Remove Clerk import - try multiple patterns
|
||||
const clerkImportPatterns = [
|
||||
{ pattern: 'import Clerk from $SOURCE' },
|
||||
{ pattern: "import Clerk from './Clerk'" },
|
||||
{ pattern: "import Clerk from './Clerk/index'" },
|
||||
];
|
||||
|
||||
for (const pattern of clerkImportPatterns) {
|
||||
const clerkImport = root.find({
|
||||
rule: pattern,
|
||||
});
|
||||
if (clerkImport) {
|
||||
const range = clerkImport.range();
|
||||
edits.push({ start: range.start.index, end: range.end.index, text: '' });
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const findClerkIfStatement = () => {
|
||||
const directMatch = root.find({
|
||||
rule: {
|
||||
pattern: 'if (authEnv.NEXT_PUBLIC_ENABLE_CLERK_AUTH) { $$$ }',
|
||||
},
|
||||
});
|
||||
|
||||
if (directMatch) return directMatch;
|
||||
|
||||
const allIfStatements = root.findAll({
|
||||
rule: {
|
||||
kind: 'if_statement',
|
||||
},
|
||||
});
|
||||
|
||||
for (const ifStmt of allIfStatements) {
|
||||
const condition = ifStmt.find({
|
||||
rule: {
|
||||
pattern: 'authEnv.NEXT_PUBLIC_ENABLE_CLERK_AUTH',
|
||||
},
|
||||
});
|
||||
|
||||
if (condition) return ifStmt;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
const clerkIfStatement = findClerkIfStatement();
|
||||
|
||||
if (clerkIfStatement) {
|
||||
const ifRange = clerkIfStatement.range();
|
||||
const elseClause = clerkIfStatement.find({
|
||||
rule: {
|
||||
kind: 'else_clause',
|
||||
},
|
||||
});
|
||||
|
||||
if (elseClause) {
|
||||
const elseIfStmt = elseClause.find({
|
||||
rule: {
|
||||
kind: 'if_statement',
|
||||
},
|
||||
});
|
||||
|
||||
if (elseIfStmt) {
|
||||
// Promote the first else-if to a top-level if and keep the rest of the chain
|
||||
const elseRange = elseClause.range();
|
||||
const replacement = code
|
||||
.slice(elseRange.start.index, elseRange.end.index)
|
||||
.replace(/^\s*else\s+/, '');
|
||||
|
||||
edits.push({
|
||||
start: ifRange.start.index,
|
||||
end: ifRange.end.index,
|
||||
text: replacement,
|
||||
});
|
||||
} else {
|
||||
const elseBlock = elseClause.find({
|
||||
rule: {
|
||||
kind: 'statement_block',
|
||||
},
|
||||
});
|
||||
|
||||
if (elseBlock) {
|
||||
edits.push({
|
||||
start: ifRange.start.index,
|
||||
end: ifRange.end.index,
|
||||
text: code.slice(elseBlock.range().start.index, elseBlock.range().end.index),
|
||||
});
|
||||
} else {
|
||||
edits.push({ start: ifRange.start.index, end: ifRange.end.index, text: '' });
|
||||
}
|
||||
}
|
||||
} else {
|
||||
edits.push({ start: ifRange.start.index, end: ifRange.end.index, text: '' });
|
||||
}
|
||||
}
|
||||
|
||||
// Apply edits
|
||||
if (edits.length === 0) return code;
|
||||
|
||||
edits.sort((a, b) => b.start - a.start);
|
||||
let result = code;
|
||||
for (const edit of edits) {
|
||||
result = result.slice(0, edit.start) + edit.text + result.slice(edit.end);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const removeManifestFromMetadata = (code: string) => {
|
||||
const ast = parse(Lang.Tsx, code);
|
||||
const root = ast.root();
|
||||
const edits: Array<{ start: number; end: number; text: string }> = [];
|
||||
|
||||
// Find generateMetadata function
|
||||
const generateMetadataFunc = root.find({
|
||||
rule: {
|
||||
pattern: 'export const generateMetadata = async ($$$) => { $$$ }',
|
||||
},
|
||||
});
|
||||
|
||||
if (!generateMetadataFunc) return code;
|
||||
|
||||
// Find return statement
|
||||
const returnStatement = generateMetadataFunc.find({
|
||||
rule: {
|
||||
kind: 'return_statement',
|
||||
},
|
||||
});
|
||||
|
||||
if (!returnStatement) return code;
|
||||
|
||||
// Find the object in return statement
|
||||
const returnObject = returnStatement.find({
|
||||
rule: {
|
||||
kind: 'object',
|
||||
},
|
||||
});
|
||||
|
||||
if (!returnObject) return code;
|
||||
|
||||
// Find all pair nodes (key-value pairs in the object)
|
||||
const allPairs = returnObject.findAll({
|
||||
rule: {
|
||||
kind: 'pair',
|
||||
},
|
||||
});
|
||||
|
||||
const keysToRemove = ['manifest', 'metadataBase'];
|
||||
|
||||
for (const pair of allPairs) {
|
||||
// Find the property_identifier or identifier
|
||||
const key = pair.find({
|
||||
rule: {
|
||||
any: [{ kind: 'property_identifier' }, { kind: 'identifier' }],
|
||||
},
|
||||
});
|
||||
|
||||
if (key && keysToRemove.includes(key.text())) {
|
||||
const range = pair.range();
|
||||
// Include the trailing comma if present
|
||||
const afterPair = code.slice(range.end.index, range.end.index + 10);
|
||||
const commaMatch = afterPair.match(/^,\s*/);
|
||||
const endIndex = commaMatch ? range.end.index + commaMatch[0].length : range.end.index;
|
||||
|
||||
edits.push({
|
||||
start: range.start.index,
|
||||
end: endIndex,
|
||||
text: '',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Apply edits
|
||||
if (edits.length === 0) return code;
|
||||
|
||||
edits.sort((a, b) => b.start - a.start);
|
||||
let result = code;
|
||||
for (const edit of edits) {
|
||||
result = result.slice(0, edit.start) + edit.text + result.slice(edit.end);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
export const modifyAppCode = async (TEMP_DIR: string) => {
|
||||
// 1. Replace src/app/[variants]/page.tsx with a desktop-only entry
|
||||
const variantsPagePath = path.join(TEMP_DIR, 'src/app/[variants]/page.tsx');
|
||||
if (fs.existsSync(variantsPagePath)) {
|
||||
console.log(' Processing src/app/[variants]/page.tsx...');
|
||||
await fs.writeFile(variantsPagePath, desktopOnlyVariantsPage);
|
||||
}
|
||||
|
||||
// 2. Remove DevPanel from src/layout/GlobalProvider/index.tsx
|
||||
const globalProviderPath = path.join(TEMP_DIR, 'src/layout/GlobalProvider/index.tsx');
|
||||
if (fs.existsSync(globalProviderPath)) {
|
||||
console.log(' Processing src/layout/GlobalProvider/index.tsx...');
|
||||
await rewriteFile(globalProviderPath, stripDevPanel);
|
||||
}
|
||||
|
||||
// 3. Delete src/app/[variants]/(main)/settings/security directory
|
||||
const securityDirPath = path.join(TEMP_DIR, 'src/app/[variants]/(main)/settings/security');
|
||||
if (fs.existsSync(securityDirPath)) {
|
||||
console.log(' Deleting src/app/[variants]/(main)/settings/security directory...');
|
||||
await fs.remove(securityDirPath);
|
||||
}
|
||||
|
||||
// 4. Remove Security tab wiring from SettingsContent
|
||||
const settingsContentPath = path.join(
|
||||
TEMP_DIR,
|
||||
'src/app/[variants]/(main)/settings/features/SettingsContent.tsx',
|
||||
);
|
||||
if (fs.existsSync(settingsContentPath)) {
|
||||
console.log(' Processing src/app/[variants]/(main)/settings/features/SettingsContent.tsx...');
|
||||
await rewriteFile(settingsContentPath, removeSecurityTab);
|
||||
}
|
||||
|
||||
// 5. Remove SpeedInsights and Analytics from src/app/[variants]/layout.tsx
|
||||
const variantsLayoutPath = path.join(TEMP_DIR, 'src/app/[variants]/layout.tsx');
|
||||
if (fs.existsSync(variantsLayoutPath)) {
|
||||
console.log(' Processing src/app/[variants]/layout.tsx...');
|
||||
await rewriteFile(variantsLayoutPath, removeSpeedInsightsAndAnalytics);
|
||||
}
|
||||
|
||||
// 6. Remove Clerk logic from src/layout/AuthProvider/index.tsx
|
||||
const authProviderPath = path.join(TEMP_DIR, 'src/layout/AuthProvider/index.tsx');
|
||||
if (fs.existsSync(authProviderPath)) {
|
||||
console.log(' Processing src/layout/AuthProvider/index.tsx...');
|
||||
await rewriteFile(authProviderPath, removeClerkLogic);
|
||||
}
|
||||
|
||||
// 7. Replace mdx Image component with next/image export
|
||||
const mdxImagePath = path.join(TEMP_DIR, 'src/components/mdx/Image.tsx');
|
||||
if (fs.existsSync(mdxImagePath)) {
|
||||
console.log(' Processing src/components/mdx/Image.tsx...');
|
||||
await fs.writeFile(mdxImagePath, "export { default } from 'next/image';\n");
|
||||
}
|
||||
|
||||
// 8. Remove manifest from metadata
|
||||
const metadataPath = path.join(TEMP_DIR, 'src/app/[variants]/metadata.ts');
|
||||
if (fs.existsSync(metadataPath)) {
|
||||
console.log(' Processing src/app/[variants]/metadata.ts...');
|
||||
await rewriteFile(metadataPath, removeManifestFromMetadata);
|
||||
}
|
||||
};
|
||||
|
||||
if (isDirectRun(import.meta.url)) {
|
||||
await runStandalone('modifyAppCode', modifyAppCode, [
|
||||
{ lang: Lang.Tsx, path: 'src/app/[variants]/page.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/layout/GlobalProvider/index.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/app/[variants]/(main)/settings/features/SettingsContent.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/app/[variants]/layout.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/layout/AuthProvider/index.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/components/mdx/Image.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/app/[variants]/metadata.ts' },
|
||||
]);
|
||||
}
|
||||
61
scripts/electronWorkflow/modifiers/cleanUp.mts
Normal file
61
scripts/electronWorkflow/modifiers/cleanUp.mts
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
import { Lang, parse } from '@ast-grep/napi';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
import { isDirectRun, runStandalone } from './utils.mjs';
|
||||
|
||||
export const cleanUpCode = async (TEMP_DIR: string) => {
|
||||
// Remove 'use server'
|
||||
const filesToRemoveUseServer = [
|
||||
'src/components/mdx/Image.tsx',
|
||||
'src/features/DevPanel/CacheViewer/getCacheEntries.ts',
|
||||
'src/server/translation.ts',
|
||||
];
|
||||
|
||||
for (const file of filesToRemoveUseServer) {
|
||||
const filePath = path.join(TEMP_DIR, file);
|
||||
if (fs.existsSync(filePath)) {
|
||||
console.log(` Processing ${file}...`);
|
||||
const code = await fs.readFile(filePath, 'utf8');
|
||||
const ast = parse(Lang.TypeScript, code);
|
||||
const root = ast.root();
|
||||
|
||||
// 'use server' is usually an expression statement at the top
|
||||
// We look for the literal string 'use server' or "use server"
|
||||
const useServer =
|
||||
root.find({
|
||||
rule: {
|
||||
pattern: "'use server'",
|
||||
},
|
||||
}) ||
|
||||
root.find({
|
||||
rule: {
|
||||
pattern: '"use server"',
|
||||
},
|
||||
});
|
||||
|
||||
if (useServer) {
|
||||
// Find the statement containing this string
|
||||
let curr = useServer.parent();
|
||||
while (curr) {
|
||||
if (curr.kind() === 'expression_statement') {
|
||||
curr.replace('');
|
||||
break;
|
||||
}
|
||||
if (curr.kind() === 'program') break;
|
||||
curr = curr.parent();
|
||||
}
|
||||
}
|
||||
|
||||
await fs.writeFile(filePath, root.text());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (isDirectRun(import.meta.url)) {
|
||||
await runStandalone('cleanUpCode', cleanUpCode, [
|
||||
{ lang: Lang.Tsx, path: 'src/components/mdx/Image.tsx' },
|
||||
{ lang: Lang.TypeScript, path: 'src/features/DevPanel/CacheViewer/getCacheEntries.ts' },
|
||||
{ lang: Lang.TypeScript, path: 'src/server/translation.ts' },
|
||||
]);
|
||||
}
|
||||
27
scripts/electronWorkflow/modifiers/index.mts
Normal file
27
scripts/electronWorkflow/modifiers/index.mts
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import { Lang } from '@ast-grep/napi';
|
||||
import path from 'node:path';
|
||||
|
||||
import { modifyAppCode } from './appCode.mjs';
|
||||
import { cleanUpCode } from './cleanUp.mjs';
|
||||
import { modifyNextConfig } from './nextConfig.mjs';
|
||||
import { modifyRoutes } from './routes.mjs';
|
||||
import { isDirectRun, runStandalone } from './utils.mjs';
|
||||
|
||||
export const modifySourceForElectron = async (TEMP_DIR: string) => {
|
||||
await modifyNextConfig(TEMP_DIR);
|
||||
await modifyAppCode(TEMP_DIR);
|
||||
await modifyRoutes(TEMP_DIR);
|
||||
await cleanUpCode(TEMP_DIR);
|
||||
};
|
||||
|
||||
if (isDirectRun(import.meta.url)) {
|
||||
await runStandalone('modifySourceForElectron', modifySourceForElectron, [
|
||||
{ lang: Lang.TypeScript, path: path.join(process.cwd(), 'next.config.ts') },
|
||||
{ lang: Lang.Tsx, path: 'src/app/[variants]/page.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/layout/GlobalProvider/index.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/app/[variants]/router/desktopRouter.config.tsx' },
|
||||
{ lang: Lang.Tsx, path: 'src/components/mdx/Image.tsx' },
|
||||
{ lang: Lang.TypeScript, path: 'src/features/DevPanel/CacheViewer/getCacheEntries.ts' },
|
||||
{ lang: Lang.TypeScript, path: 'src/server/translation.ts' },
|
||||
]);
|
||||
}
|
||||
133
scripts/electronWorkflow/modifiers/nextConfig.mts
Normal file
133
scripts/electronWorkflow/modifiers/nextConfig.mts
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
import { Lang, parse } from '@ast-grep/napi';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
import { isDirectRun, runStandalone } from './utils.mjs';
|
||||
|
||||
interface Edit {
|
||||
end: number;
|
||||
start: number;
|
||||
text: string;
|
||||
}
|
||||
|
||||
export const modifyNextConfig = async (TEMP_DIR: string) => {
|
||||
const nextConfigPath = path.join(TEMP_DIR, 'next.config.ts');
|
||||
if (!fs.existsSync(nextConfigPath)) return;
|
||||
|
||||
console.log(' Processing next.config.ts...');
|
||||
const code = await fs.readFile(nextConfigPath, 'utf8');
|
||||
const ast = parse(Lang.TypeScript, code);
|
||||
const root = ast.root();
|
||||
const edits: Edit[] = [];
|
||||
|
||||
// Find nextConfig declaration
|
||||
const nextConfigDecl = root.find({
|
||||
rule: {
|
||||
pattern: 'const nextConfig: NextConfig = { $$$ }',
|
||||
},
|
||||
});
|
||||
|
||||
if (nextConfigDecl) {
|
||||
// 1. Remove redirects
|
||||
const redirectsProp = nextConfigDecl.find({
|
||||
rule: {
|
||||
kind: 'property_identifier',
|
||||
regex: '^redirects$',
|
||||
},
|
||||
});
|
||||
if (redirectsProp) {
|
||||
let curr = redirectsProp.parent();
|
||||
while (curr) {
|
||||
if (curr.kind() === 'pair') {
|
||||
const range = curr.range();
|
||||
edits.push({ end: range.end.index, start: range.start.index, text: '' });
|
||||
break;
|
||||
}
|
||||
if (curr.kind() === 'object') break;
|
||||
curr = curr.parent();
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Remove headers
|
||||
const headersProp = nextConfigDecl.find({
|
||||
rule: {
|
||||
kind: 'property_identifier',
|
||||
regex: '^headers$',
|
||||
},
|
||||
});
|
||||
if (headersProp) {
|
||||
let curr = headersProp.parent();
|
||||
while (curr) {
|
||||
if (curr.kind() === 'pair' || curr.kind() === 'method_definition') {
|
||||
const range = curr.range();
|
||||
edits.push({ end: range.end.index, start: range.start.index, text: '' });
|
||||
break;
|
||||
}
|
||||
if (curr.kind() === 'object') break;
|
||||
curr = curr.parent();
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Remove spread element
|
||||
const spread = nextConfigDecl.find({
|
||||
rule: {
|
||||
kind: 'spread_element',
|
||||
},
|
||||
});
|
||||
if (spread) {
|
||||
const range = spread.range();
|
||||
edits.push({ end: range.end.index, start: range.start.index, text: '' });
|
||||
}
|
||||
|
||||
// 4. Inject output: 'export'
|
||||
const objectNode = nextConfigDecl.find({
|
||||
rule: { kind: 'object' },
|
||||
});
|
||||
|
||||
if (objectNode) {
|
||||
const range = objectNode.range();
|
||||
// Insert after the opening brace `{
|
||||
edits.push({
|
||||
end: range.start.index + 1,
|
||||
start: range.start.index + 1,
|
||||
text: "\n output: 'export',",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Remove withPWA wrapper
|
||||
const withPWA = root.find({
|
||||
rule: {
|
||||
pattern: 'withPWA($A)',
|
||||
},
|
||||
});
|
||||
if (withPWA) {
|
||||
const inner = withPWA.getMatch('A');
|
||||
if (inner) {
|
||||
const range = withPWA.range();
|
||||
edits.push({ end: range.end.index, start: range.start.index, text: inner.text() });
|
||||
}
|
||||
}
|
||||
|
||||
// Apply edits
|
||||
edits.sort((a, b) => b.start - a.start);
|
||||
let newCode = code;
|
||||
for (const edit of edits) {
|
||||
newCode = newCode.slice(0, edit.start) + edit.text + newCode.slice(edit.end);
|
||||
}
|
||||
|
||||
// Cleanup commas (syntax fix)
|
||||
// 1. Double commas ,, -> , (handle spaces/newlines between)
|
||||
newCode = newCode.replaceAll(/,(\s*,)+/g, ',');
|
||||
// 2. Leading comma in object { , -> {
|
||||
newCode = newCode.replaceAll(/{\s*,/g, '{');
|
||||
// 3. Trailing comma before closing brace is valid in JS/TS
|
||||
|
||||
await fs.writeFile(nextConfigPath, newCode);
|
||||
};
|
||||
|
||||
if (isDirectRun(import.meta.url)) {
|
||||
await runStandalone('modifyNextConfig', modifyNextConfig, [
|
||||
{ lang: Lang.TypeScript, path: process.cwd() + '/next.config.ts' },
|
||||
]);
|
||||
}
|
||||
90
scripts/electronWorkflow/modifiers/routes.mts
Normal file
90
scripts/electronWorkflow/modifiers/routes.mts
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
import { Lang, parse } from '@ast-grep/napi';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
import { isDirectRun, runStandalone } from './utils.mjs';
|
||||
|
||||
export const modifyRoutes = async (TEMP_DIR: string) => {
|
||||
// 1. Delete routes
|
||||
const filesToDelete = [
|
||||
// Backend API routes
|
||||
'src/app/(backend)/api',
|
||||
'src/app/(backend)/webapi',
|
||||
'src/app/(backend)/trpc',
|
||||
'src/app/(backend)/oidc',
|
||||
'src/app/(backend)/middleware',
|
||||
'src/app/(backend)/f',
|
||||
'src/app/(backend)/market',
|
||||
|
||||
// Auth & User routes
|
||||
'src/app/[variants]/(auth)',
|
||||
'src/app/[variants]/(main)/(mobile)/me',
|
||||
'src/app/[variants]/(main)/changelog',
|
||||
'src/app/[variants]/oauth',
|
||||
|
||||
// Other app roots
|
||||
'src/app/market-auth-callback',
|
||||
'src/app/manifest.ts',
|
||||
'src/app/robots.tsx',
|
||||
'src/app/sitemap.tsx',
|
||||
'src/app/sw.ts',
|
||||
|
||||
// Config files
|
||||
'src/instrumentation.ts',
|
||||
'src/instrumentation.node.ts',
|
||||
|
||||
// Desktop specific routes
|
||||
'src/app/desktop/devtools',
|
||||
'src/app/desktop/layout.tsx',
|
||||
];
|
||||
|
||||
for (const file of filesToDelete) {
|
||||
const fullPath = path.join(TEMP_DIR, file);
|
||||
await fs.remove(fullPath);
|
||||
}
|
||||
|
||||
// 2. Modify desktopRouter.config.tsx
|
||||
const routerConfigPath = path.join(
|
||||
TEMP_DIR,
|
||||
'src/app/[variants]/router/desktopRouter.config.tsx',
|
||||
);
|
||||
if (fs.existsSync(routerConfigPath)) {
|
||||
console.log(' Processing src/app/[variants]/router/desktopRouter.config.tsx...');
|
||||
const code = await fs.readFile(routerConfigPath, 'utf8');
|
||||
const ast = parse(Lang.Tsx, code);
|
||||
const root = ast.root();
|
||||
|
||||
const changelogNode = root.find({
|
||||
rule: {
|
||||
pattern: "{ path: 'changelog', $$$ }",
|
||||
},
|
||||
});
|
||||
if (changelogNode) {
|
||||
changelogNode.replace('');
|
||||
}
|
||||
|
||||
const changelogImport = root.find({
|
||||
rule: {
|
||||
pattern: "import('../(main)/changelog')",
|
||||
},
|
||||
});
|
||||
if (changelogImport) {
|
||||
// Find the closest object (route definition) and remove it
|
||||
let curr = changelogImport.parent();
|
||||
while (curr) {
|
||||
if (curr.kind() === 'object') {
|
||||
curr.replace('');
|
||||
break;
|
||||
}
|
||||
curr = curr.parent();
|
||||
}
|
||||
}
|
||||
await fs.writeFile(routerConfigPath, root.text());
|
||||
}
|
||||
};
|
||||
|
||||
if (isDirectRun(import.meta.url)) {
|
||||
await runStandalone('modifyRoutes', modifyRoutes, [
|
||||
{ lang: Lang.Tsx, path: 'src/app/[variants]/router/desktopRouter.config.tsx' },
|
||||
]);
|
||||
}
|
||||
67
scripts/electronWorkflow/modifiers/utils.mts
Normal file
67
scripts/electronWorkflow/modifiers/utils.mts
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
import { Lang, parse } from '@ast-grep/napi';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
|
||||
interface ValidationTarget {
|
||||
lang: Lang;
|
||||
path: string;
|
||||
}
|
||||
|
||||
export const isDirectRun = (importMetaUrl: string) => {
|
||||
const entry = process.argv[1];
|
||||
if (!entry) return false;
|
||||
|
||||
return importMetaUrl === pathToFileURL(entry).href;
|
||||
};
|
||||
|
||||
export const resolveTempDir = () => {
|
||||
const candidate = process.env.TEMP_DIR || process.argv[2];
|
||||
const resolved = candidate
|
||||
? path.resolve(candidate)
|
||||
: path.resolve(process.cwd(), 'tmp', 'desktop-build');
|
||||
|
||||
if (!fs.existsSync(resolved)) {
|
||||
throw new Error(`TEMP_DIR not found: ${resolved}`);
|
||||
}
|
||||
|
||||
return resolved;
|
||||
};
|
||||
|
||||
export const validateFiles = async (tempDir: string, targets: ValidationTarget[]) => {
|
||||
for (const target of targets) {
|
||||
const filePath = path.join(tempDir, target.path);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
console.warn(` ⚠️ Skipped validation, missing file: ${target.path}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const code = await fs.readFile(filePath, 'utf8');
|
||||
parse(target.lang, code);
|
||||
console.log(` ✅ Validated: ${target.path}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const runStandalone = async (
|
||||
name: string,
|
||||
modifier: (tempDir: string) => Promise<void>,
|
||||
validateTargets: ValidationTarget[] = [],
|
||||
) => {
|
||||
try {
|
||||
const workdir = process.cwd();
|
||||
console.log(`▶️ Running ${name} with TEMP_DIR=${workdir}`);
|
||||
|
||||
await modifier(workdir);
|
||||
|
||||
if (validateTargets.length) {
|
||||
console.log('🔎 Validating modified files...');
|
||||
await validateFiles(workdir, validateTargets);
|
||||
}
|
||||
|
||||
console.log(`✅ ${name} completed`);
|
||||
} catch (error) {
|
||||
console.error(`❌ ${name} failed`, error);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
};
|
||||
18
scripts/electronWorkflow/moveNextExports.ts
Normal file
18
scripts/electronWorkflow/moveNextExports.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
const rootDir = path.resolve(__dirname, '../..');
|
||||
|
||||
const exportSourceDir = path.join(rootDir, 'out');
|
||||
const exportTargetDir = path.join(rootDir, 'apps/desktop/dist/next');
|
||||
|
||||
if (fs.existsSync(exportSourceDir)) {
|
||||
console.log(`📦 Copying Next export assets from ${exportSourceDir} to ${exportTargetDir}...`);
|
||||
fs.ensureDirSync(exportTargetDir);
|
||||
fs.copySync(exportSourceDir, exportTargetDir, { overwrite: true });
|
||||
console.log(`✅ Export assets copied successfully!`);
|
||||
} else {
|
||||
console.log(`ℹ️ No Next export output found at ${exportSourceDir}, skipping copy.`);
|
||||
}
|
||||
|
||||
console.log(`🎉 Export move completed!`);
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
/* eslint-disable unicorn/no-process-exit */
|
||||
import fs from 'fs-extra';
|
||||
import { execSync } from 'node:child_process';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
const rootDir = path.resolve(__dirname, '../..');
|
||||
|
||||
// 定义源目录和目标目录
|
||||
const sourceDir: string = path.join(rootDir, '.next/standalone');
|
||||
const targetDir: string = path.join(rootDir, 'apps/desktop/dist/next');
|
||||
|
||||
// 向 sourceDir 写入 .env 文件
|
||||
const env = fs.readFileSync(path.join(rootDir, '.env.desktop'), 'utf8');
|
||||
|
||||
fs.writeFileSync(path.join(sourceDir, '.env'), env, 'utf8');
|
||||
console.log(`⚓️ Inject .env successful`);
|
||||
|
||||
// 确保目标目录的父目录存在
|
||||
fs.ensureDirSync(path.dirname(targetDir));
|
||||
|
||||
// 如果目标目录已存在,先删除它
|
||||
if (fs.existsSync(targetDir)) {
|
||||
console.log(`🗑️ Target directory ${targetDir} already exists, deleting...`);
|
||||
try {
|
||||
fs.removeSync(targetDir);
|
||||
console.log(`✅ Old target directory removed successfully`);
|
||||
} catch (error) {
|
||||
console.warn(`⚠️ Failed to delete target directory: ${error}`);
|
||||
console.log('🔄 Trying to delete using system command...');
|
||||
try {
|
||||
if (os.platform() === 'win32') {
|
||||
execSync(`rmdir /S /Q "${targetDir}"`, { stdio: 'inherit' });
|
||||
} else {
|
||||
execSync(`rm -rf "${targetDir}"`, { stdio: 'inherit' });
|
||||
}
|
||||
console.log('✅ Successfully deleted old target directory');
|
||||
} catch (cmdError) {
|
||||
console.error(`❌ Unable to delete target directory, might need manual cleanup: ${cmdError}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`🚚 Moving ${sourceDir} to ${targetDir}...`);
|
||||
|
||||
try {
|
||||
// 使用 fs-extra 的 move 方法
|
||||
fs.moveSync(sourceDir, targetDir, { overwrite: true });
|
||||
console.log(`✅ Directory moved successfully!`);
|
||||
} catch (error) {
|
||||
console.error('❌ fs-extra move failed:', error);
|
||||
console.log('🔄 Trying to move using system command...');
|
||||
|
||||
try {
|
||||
// 使用系统命令进行移动
|
||||
if (os.platform() === 'win32') {
|
||||
execSync(`move "${sourceDir}" "${targetDir}"`, { stdio: 'inherit' });
|
||||
} else {
|
||||
execSync(`mv "${sourceDir}" "${targetDir}"`, { stdio: 'inherit' });
|
||||
}
|
||||
console.log('✅ System command move completed successfully!');
|
||||
} catch (mvError) {
|
||||
console.error('❌ Failed to move directory:', mvError);
|
||||
console.log('💡 Try running manually: sudo mv ' + sourceDir + ' ' + targetDir);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`🎉 Move completed!`);
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import { consola } from 'consola';
|
||||
import { colors } from 'consola/utils';
|
||||
import { unset } from 'es-toolkit/compat';
|
||||
import { diff } from 'just-diff';
|
||||
import { unset } from 'lodash';
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
import {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,18 @@
|
|||
import * as dotenv from 'dotenv';
|
||||
import dotenvExpand from 'dotenv-expand';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { rm } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const isDesktop = process.env.NEXT_PUBLIC_IS_DESKTOP_APP === '1';
|
||||
|
||||
dotenv.config();
|
||||
if (isDesktop) {
|
||||
dotenvExpand.expand(dotenv.config({ path: '.env.desktop' }));
|
||||
dotenvExpand.expand(dotenv.config({ override: true, path: '.env.desktop.local' }));
|
||||
} else {
|
||||
dotenvExpand.expand(dotenv.config());
|
||||
}
|
||||
// 创建需要排除的特性映射
|
||||
/* eslint-disable sort-keys-fix/sort-keys-fix */
|
||||
const partialBuildPages = [
|
||||
|
|
@ -62,22 +69,24 @@ const partialBuildPages = [
|
|||
/**
|
||||
* 删除指定的目录
|
||||
*/
|
||||
const removeDirectories = async () => {
|
||||
export const runPrebuild = async (targetDir: string = 'src') => {
|
||||
// 遍历 partialBuildPages 数组
|
||||
for (const page of partialBuildPages) {
|
||||
// 检查是否需要禁用该功能
|
||||
if (page.disabled) {
|
||||
for (const dirPath of page.paths) {
|
||||
const fullPath = path.resolve(process.cwd(), dirPath);
|
||||
// Replace 'src' with targetDir
|
||||
const relativePath = dirPath.replace(/^src/, targetDir);
|
||||
const fullPath = path.resolve(process.cwd(), relativePath);
|
||||
|
||||
// 检查目录是否存在
|
||||
if (existsSync(fullPath)) {
|
||||
try {
|
||||
// 递归删除目录
|
||||
await rm(fullPath, { force: true, recursive: true });
|
||||
console.log(`♻️ Removed ${dirPath} successfully`);
|
||||
console.log(`♻️ Removed ${relativePath} successfully`);
|
||||
} catch (error) {
|
||||
console.error(`Failed to remove directory ${dirPath}:`, error);
|
||||
console.error(`Failed to remove directory ${relativePath}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -85,7 +94,12 @@ const removeDirectories = async () => {
|
|||
}
|
||||
};
|
||||
|
||||
// 执行删除操作
|
||||
console.log('Starting prebuild cleanup...');
|
||||
await removeDirectories();
|
||||
console.log('Prebuild cleanup completed.');
|
||||
// Check if the script is being run directly
|
||||
const isMainModule = process.argv[1] === fileURLToPath(import.meta.url);
|
||||
|
||||
if (isMainModule) {
|
||||
// 执行删除操作
|
||||
console.log('Starting prebuild cleanup...');
|
||||
await runPrebuild();
|
||||
console.log('Prebuild cleanup completed.');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { kebabCase } from 'lodash';
|
||||
import { kebabCase } from 'es-toolkit/compat';
|
||||
import { readFileSync, writeFileSync } from 'node:fs';
|
||||
import { resolve } from 'node:path';
|
||||
|
||||
|
|
|
|||
29
scripts/registerDesktopEnv.cjs
Normal file
29
scripts/registerDesktopEnv.cjs
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
/**
|
||||
* Desktop env preloader for Next.js.
|
||||
*
|
||||
* Why: Next.js only auto-loads `.env*` (e.g. `.env`, `.env.local`, `.env.development`),
|
||||
* but our desktop build expects `.env.desktop`.
|
||||
*
|
||||
* This file is intended to be used via Node's `-r` (require) flag so it runs
|
||||
* BEFORE Next.js loads its own env config:
|
||||
*
|
||||
* node -r ./scripts/registerDesktopEnv.cjs ./node_modules/next/dist/bin/next build
|
||||
*/
|
||||
|
||||
const path = require('node:path');
|
||||
const dotenv = require('dotenv');
|
||||
const dotenvExpand = require('dotenv-expand');
|
||||
const fs = require('node:fs');
|
||||
|
||||
const isDesktop = process.env.NEXT_PUBLIC_IS_DESKTOP_APP === '1';
|
||||
|
||||
if (isDesktop) {
|
||||
const cwd = process.cwd();
|
||||
|
||||
const envDesktop = path.join(cwd, '.env.desktop');
|
||||
const envDesktopLocal = path.join(cwd, '.env.desktop.local');
|
||||
|
||||
if (fs.existsSync(envDesktop)) dotenvExpand.expand(dotenv.config({ path: envDesktop }));
|
||||
if (fs.existsSync(envDesktopLocal))
|
||||
dotenvExpand.expand(dotenv.config({ override: true, path: envDesktopLocal }));
|
||||
}
|
||||
251
scripts/replaceComponentImports.ts
Normal file
251
scripts/replaceComponentImports.ts
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
import { readFileSync, readdirSync, statSync, writeFileSync } from 'node:fs';
|
||||
import { join, relative } from 'node:path';
|
||||
|
||||
interface ReplaceConfig {
|
||||
/** 要替换的组件列表 */
|
||||
components: string[];
|
||||
/** 是否为 dry-run 模式(仅预览,不实际修改) */
|
||||
dryRun?: boolean;
|
||||
/** 文件扩展名白名单 */
|
||||
fileExtensions?: string[];
|
||||
/** 原始包名 */
|
||||
fromPackage: string;
|
||||
/** 要扫描的目录 */
|
||||
targetDir: string;
|
||||
/** 目标包名 */
|
||||
toPackage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* 递归获取目录下所有文件
|
||||
*/
|
||||
function getAllFiles(dir: string, extensions: string[]): string[] {
|
||||
const files: string[] = [];
|
||||
|
||||
function walk(currentPath: string) {
|
||||
const items = readdirSync(currentPath);
|
||||
|
||||
for (const item of items) {
|
||||
const fullPath = join(currentPath, item);
|
||||
const stat = statSync(fullPath);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
// 跳过 node_modules 等目录
|
||||
if (!['node_modules', '.git', 'dist', 'build', '.next'].includes(item)) {
|
||||
walk(fullPath);
|
||||
}
|
||||
} else if (stat.isFile()) {
|
||||
const hasValidExtension = extensions.some((ext) => fullPath.endsWith(ext));
|
||||
if (hasValidExtension) {
|
||||
files.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk(dir);
|
||||
return files;
|
||||
}
|
||||
|
||||
/**
|
||||
* 解析 import 语句,提取导入的组件
|
||||
*/
|
||||
function parseImportStatement(line: string, packageName: string) {
|
||||
// 匹配 import { ... } from 'package'
|
||||
const importRegex = new RegExp(
|
||||
`import\\s+{([^}]+)}\\s+from\\s+['"]${packageName.replaceAll(/[$()*+.?[\\\]^{|}]/g, '\\$&')}['"]`,
|
||||
);
|
||||
const match = line.match(importRegex);
|
||||
|
||||
if (!match) return null;
|
||||
|
||||
const importContent = match[1];
|
||||
const components = importContent
|
||||
.split(',')
|
||||
.map((item) => {
|
||||
const trimmed = item.trim();
|
||||
// 处理 as 别名: ComponentName as AliasName
|
||||
const asMatch = trimmed.match(/^(\w+)(?:\s+as\s+(\w+))?/);
|
||||
return asMatch
|
||||
? {
|
||||
alias: asMatch[2] || null,
|
||||
name: asMatch[1],
|
||||
raw: trimmed,
|
||||
}
|
||||
: null;
|
||||
})
|
||||
.filter(Boolean) as Array<{ alias: string | null; name: string; raw: string }>;
|
||||
|
||||
return {
|
||||
components,
|
||||
fullMatch: match[0],
|
||||
indentation: line.match(/^\s*/)?.[0] || '',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* 替换文件中的 import 语句
|
||||
*/
|
||||
function replaceImportsInFile(filePath: string, config: ReplaceConfig): boolean {
|
||||
const content = readFileSync(filePath, 'utf8');
|
||||
const lines = content.split('\n');
|
||||
let modified = false;
|
||||
const newLines: string[] = [];
|
||||
|
||||
for (const line of lines) {
|
||||
const parsed = parseImportStatement(line, config.fromPackage);
|
||||
|
||||
if (!parsed) {
|
||||
newLines.push(line);
|
||||
continue;
|
||||
}
|
||||
|
||||
// 找出需要替换的组件和保留的组件
|
||||
const toReplace = parsed.components.filter((comp) => config.components.includes(comp.name));
|
||||
const toKeep = parsed.components.filter((comp) => !config.components.includes(comp.name));
|
||||
|
||||
if (toReplace.length === 0) {
|
||||
// 没有需要替换的组件
|
||||
newLines.push(line);
|
||||
continue;
|
||||
}
|
||||
|
||||
modified = true;
|
||||
|
||||
// 生成新的 import 语句
|
||||
const { indentation } = parsed;
|
||||
|
||||
// 如果有保留的组件,保留原来的 import
|
||||
if (toKeep.length > 0) {
|
||||
const keepImports = toKeep.map((c) => c.raw).join(', ');
|
||||
newLines.push(`${indentation}import { ${keepImports} } from '${config.fromPackage}';`);
|
||||
}
|
||||
|
||||
// 添加新的 import
|
||||
const replaceImports = toReplace.map((c) => c.raw).join(', ');
|
||||
newLines.push(`${indentation}import { ${replaceImports} } from '${config.toPackage}';`);
|
||||
}
|
||||
|
||||
if (modified) {
|
||||
const newContent = newLines.join('\n');
|
||||
if (!config.dryRun) {
|
||||
writeFileSync(filePath, newContent, 'utf8');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* 执行替换
|
||||
*/
|
||||
function executeReplace(config: ReplaceConfig) {
|
||||
const extensions = config.fileExtensions || ['.ts', '.tsx', '.js', '.jsx'];
|
||||
const files = getAllFiles(config.targetDir, extensions);
|
||||
|
||||
console.log(`\n🔍 扫描目录: ${config.targetDir}`);
|
||||
console.log(`📦 从 "${config.fromPackage}" 替换到 "${config.toPackage}"`);
|
||||
console.log(`🎯 目标组件: ${config.components.join(', ')}`);
|
||||
console.log(`📄 找到 ${files.length} 个文件\n`);
|
||||
|
||||
if (config.dryRun) {
|
||||
console.log('🔔 [DRY RUN 模式] 仅预览,不会实际修改文件\n');
|
||||
}
|
||||
|
||||
let modifiedCount = 0;
|
||||
const modifiedFiles: string[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
const wasModified = replaceImportsInFile(file, config);
|
||||
if (wasModified) {
|
||||
modifiedCount++;
|
||||
modifiedFiles.push(relative(process.cwd(), file));
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n✅ 完成!');
|
||||
console.log(`📝 修改了 ${modifiedCount} 个文件\n`);
|
||||
|
||||
if (modifiedFiles.length > 0) {
|
||||
console.log('修改的文件:');
|
||||
for (const file of modifiedFiles) {
|
||||
console.log(` - ${file}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============ 主函数 ============
|
||||
|
||||
/**
|
||||
* 从命令行参数解析配置
|
||||
*/
|
||||
function parseArgs(): ReplaceConfig | null {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0 || args.includes('--help') || args.includes('-h')) {
|
||||
console.log(`
|
||||
使用方法:
|
||||
bun run scripts/replaceComponentImports.ts [选项]
|
||||
|
||||
选项:
|
||||
--components <comp1,comp2,...> 要替换的组件列表(逗号分隔)
|
||||
--from <package> 原始包名
|
||||
--to <package> 目标包名
|
||||
--dir <directory> 要扫描的目录(默认: src)
|
||||
--ext <.ext1,.ext2,...> 文件扩展名(默认: .ts,.tsx,.js,.jsx)
|
||||
--dry-run 仅预览,不实际修改文件
|
||||
--help, -h 显示帮助信息
|
||||
|
||||
示例:
|
||||
# 将 antd 的 Skeleton 和 Empty 替换为 @lobehub/ui
|
||||
bun run scripts/replaceComponentImports.ts \\
|
||||
--components Skeleton,Empty \\
|
||||
--from antd \\
|
||||
--to @lobehub/ui \\
|
||||
--dir src
|
||||
|
||||
# 仅预览,不修改
|
||||
bun run scripts/replaceComponentImports.ts \\
|
||||
--components Skeleton,Empty \\
|
||||
--from antd \\
|
||||
--to @lobehub/ui \\
|
||||
--dry-run
|
||||
`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const getArgValue = (flag: string): string | undefined => {
|
||||
const index = args.indexOf(flag);
|
||||
return index !== -1 && index + 1 < args.length ? args[index + 1] : undefined;
|
||||
};
|
||||
|
||||
const componentsStr = getArgValue('--components');
|
||||
const fromPackage = getArgValue('--from');
|
||||
const toPackage = getArgValue('--to');
|
||||
const targetDir = getArgValue('--dir') || 'src';
|
||||
const extStr = getArgValue('--ext');
|
||||
const dryRun = args.includes('--dry-run');
|
||||
|
||||
if (!componentsStr || !fromPackage || !toPackage) {
|
||||
console.error('❌ 错误: 必须指定 --components, --from 和 --to 参数');
|
||||
console.error('使用 --help 查看帮助信息');
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return {
|
||||
components: componentsStr.split(',').map((c) => c.trim()),
|
||||
dryRun,
|
||||
fileExtensions: extStr ? extStr.split(',').map((e) => e.trim()) : undefined,
|
||||
fromPackage,
|
||||
targetDir,
|
||||
toPackage,
|
||||
};
|
||||
}
|
||||
|
||||
// 执行脚本
|
||||
const config = parseArgs();
|
||||
if (config) {
|
||||
executeReplace(config);
|
||||
}
|
||||
33
scripts/runNextDesktop.mts
Normal file
33
scripts/runNextDesktop.mts
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
import * as dotenv from 'dotenv';
|
||||
import dotenvExpand from 'dotenv-expand';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { existsSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
const isDesktop = process.env.NEXT_PUBLIC_IS_DESKTOP_APP === '1';
|
||||
|
||||
if (isDesktop) {
|
||||
const envDesktop = path.resolve(process.cwd(), '.env.desktop');
|
||||
const envDesktopLocal = path.resolve(process.cwd(), '.env.desktop.local');
|
||||
|
||||
if (existsSync(envDesktop)) dotenvExpand.expand(dotenv.config({ path: envDesktop }));
|
||||
if (existsSync(envDesktopLocal))
|
||||
dotenvExpand.expand(dotenv.config({ override: true, path: envDesktopLocal }));
|
||||
}
|
||||
|
||||
const nextBin = path.resolve(process.cwd(), 'node_modules', 'next', 'dist', 'bin', 'next');
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
const child = spawn(process.execPath, [nextBin, ...args], {
|
||||
env: process.env,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
child.on('exit', (code, signal) => {
|
||||
if (typeof code === 'number') {
|
||||
process.exitCode = code;
|
||||
return;
|
||||
}
|
||||
|
||||
process.exitCode = signal ? 1 : 0;
|
||||
});
|
||||
Loading…
Reference in a new issue