mirror of
https://github.com/coleam00/Archon
synced 2026-04-21 13:37:41 +00:00
Add Archon distribution config and directory structure (#101)
* Add Archon distribution config and directory structure
- Create centralized path resolution in src/utils/archon-paths.ts
- Add YAML configuration system (src/config/) with layered loading
- Update Dockerfile and docker-compose for /.archon/ directory
- Add GHCR publish workflow for multi-arch Docker builds
- Create deploy/ directory with end-user docker-compose
- Add /init command to create .archon structure in repos
- Add docs/configuration.md reference guide
- Update README with Quick Start section
- Add bun run validate script
- Update tests for new path defaults (~/.archon/)
Directory structure:
- Local: ~/.archon/{workspaces,worktrees,config.yaml}
- Docker: /.archon/{workspaces,worktrees}
- Repo: .archon/{commands,workflows,config.yaml}
Legacy WORKSPACE_PATH and WORKTREE_BASE env vars still supported.
* Complete Archon distribution config implementation
- Wire up config system in src/index.ts (Task 3.5)
- Remove legacy WORKSPACE_PATH and WORKTREE_BASE support
- Add logConfig() function to config-loader.ts
- Update docker-compose.yml to use ARCHON_DOCKER env var
- Remove legacy env vars from .env.example
- Update all documentation to reference ARCHON_HOME
- Create scripts/validate-setup.sh for setup validation
- Add setup:check script to package.json
- Create docs/getting-started.md guide
- Create docs/archon-architecture.md technical docs
- Update tests to use ARCHON_HOME instead of legacy vars
- Fix validate.md command template for new paths
All plan phases now complete:
- Phase 1: Archon Directory Structure
- Phase 2: Docker Distribution
- Phase 3: YAML Configuration System
- Phase 4: Developer Experience
- Phase 5: Documentation
This commit is contained in:
parent
6cd733c922
commit
3026a6445d
36 changed files with 4956 additions and 228 deletions
2831
.agents/plans/completed/archon-distribution-config.plan.md
Normal file
2831
.agents/plans/completed/archon-distribution-config.plan.md
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -78,11 +78,11 @@ source .env
|
|||
PROJECT_ROOT="$(pwd)"
|
||||
export PROJECT_ROOT
|
||||
|
||||
# Determine workspace path (use WORKSPACE_PATH from .env or fallback to ./workspace)
|
||||
if [ -n "$WORKSPACE_PATH" ]; then
|
||||
WORK_DIR="$WORKSPACE_PATH"
|
||||
# Determine workspace path (use ARCHON_HOME from .env or fallback to ~/.archon)
|
||||
if [ -n "$ARCHON_HOME" ]; then
|
||||
WORK_DIR="${ARCHON_HOME}/workspaces"
|
||||
else
|
||||
WORK_DIR="workspace"
|
||||
WORK_DIR="${HOME}/.archon/workspaces"
|
||||
fi
|
||||
|
||||
echo "Using workspace directory: ${WORK_DIR}"
|
||||
|
|
@ -137,7 +137,7 @@ export WORK_DIR
|
|||
**Why this is needed:**
|
||||
1. **Workspace cleanup:** The workspace is mounted from the host into the Docker container. If a directory exists on the host, git clone inside the container will fail with "directory already exists".
|
||||
2. **Database cleanup:** Test adapter conversations (e.g., `test-e2e`) persist across validation runs. Without cleanup, old conversations retain their original `ai_assistant_type` even if `DEFAULT_AI_ASSISTANT` environment variable has changed. This causes the test to use the wrong AI assistant.
|
||||
3. **WORKSPACE_PATH support:** Reads WORKSPACE_PATH from .env to support custom workspace directories (e.g., `C:\Users\colem\remote-agent-repos` on Windows or `/tmp/workspace` on Linux).
|
||||
3. **ARCHON_HOME support:** Reads ARCHON_HOME from .env to use a custom base directory. Default: `~/.archon` (workspaces at `~/.archon/workspaces`).
|
||||
4. **Remote database support:** Works with both local PostgreSQL and remote databases (like Supabase) by using `psql` with the connection string directly, with Node.js fallback.
|
||||
|
||||
### 2.1 Store ngrok URL
|
||||
|
|
@ -1438,7 +1438,7 @@ rm -rf "${WORK_DIR}/${TEST_REPO_NAME}"
|
|||
- **Batch Mode**: GitHub responses should be single comments, not streaming (verified in Phase 6-7)
|
||||
- **Database**: Queries use psql (if available) or Node.js fallback - works with both local and remote databases
|
||||
- **Database Validation**: Critical throughout - verifies conversations, sessions, and state transitions
|
||||
- **Workspace**: Uses WORKSPACE_PATH from .env (or defaults to ./workspace), cleaned automatically at start
|
||||
- **Workspace**: Uses ARCHON_HOME from .env (or defaults to ~/.archon/workspaces), cleaned automatically at start
|
||||
- **Webhook**: Automatically configured with secret from `.env`
|
||||
|
||||
### Database Validation Checkpoints
|
||||
|
|
|
|||
22
.env.example
22
.env.example
|
|
@ -77,16 +77,15 @@ BOT_DISPLAY_NAME=CodingAgent # e.g., "My-bot", "CodeBot", etc.
|
|||
GITHUB_BOT_MENTION=remote-agent
|
||||
|
||||
# Optional
|
||||
# WORKSPACE_PATH: Directory where cloned repositories will be stored
|
||||
# RECOMMENDED: Use a path outside your project directory to avoid nested repos
|
||||
# Examples:
|
||||
# - /tmp/remote-agent-workspace (temporary, auto-cleaned on reboot - Linux/Mac)
|
||||
# - ~/remote-agent-workspace (persistent in home directory - Linux/Mac)
|
||||
# - C:Users\[your-user-ID]\remote-agent-workspace (Windows)
|
||||
# AVOID: ./workspace (causes repo-inside-repo when working on this project)
|
||||
WORKSPACE_PATH=
|
||||
PORT=3000
|
||||
|
||||
# ============================================
|
||||
# Archon Directory Configuration (NEW)
|
||||
# ============================================
|
||||
# All Archon-managed files go in ~/.archon/ by default
|
||||
# Override with ARCHON_HOME to use a custom location
|
||||
# ARCHON_HOME=~/.archon
|
||||
|
||||
# Concurrency
|
||||
MAX_CONCURRENT_CONVERSATIONS=10 # Maximum concurrent AI conversations (default: 10)
|
||||
|
||||
|
|
@ -95,10 +94,3 @@ MAX_CONCURRENT_CONVERSATIONS=10 # Maximum concurrent AI conversations (default:
|
|||
# (plan, implement, commit, review-pr, etc.) that ship with this repo.
|
||||
# These are updated by the repo maintainers with each release.
|
||||
LOAD_BUILTIN_COMMANDS=true # true (default) | false
|
||||
|
||||
# Worktree Configuration
|
||||
# Base directory where worktrees are created for GitHub issues/PRs
|
||||
# Set to same value as skill's worktreeBase for symbiosis with worktree-manager skill
|
||||
# Default: ${WORKSPACE_PATH}/../worktrees (sibling to workspace)
|
||||
# Example for skill symbiosis: ~/tmp/worktrees
|
||||
WORKTREE_BASE=
|
||||
|
|
|
|||
61
.github/workflows/publish.yml
vendored
Normal file
61
.github/workflows/publish.yml
vendored
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
name: Publish
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=sha
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
# 1. Run all tests EXCEPT orchestrator (which mocks command-handler and factory)
|
||||
# 2. Run orchestrator tests last so its mocks don't affect other test files
|
||||
run: |
|
||||
bun test --coverage src/db src/utils src/handlers src/clients src/adapters
|
||||
bun test --coverage src/db src/utils src/handlers src/clients src/adapters src/config
|
||||
bun test --coverage src/orchestrator
|
||||
|
||||
- name: Upload coverage report
|
||||
|
|
|
|||
51
CLAUDE.md
51
CLAUDE.md
|
|
@ -265,7 +265,7 @@ SLACK_STREAMING_MODE=stream # Default: stream
|
|||
GITHUB_STREAMING_MODE=batch # Default: batch
|
||||
|
||||
# Optional
|
||||
WORKSPACE_PATH=/workspace
|
||||
ARCHON_HOME=~/.archon # Override the base directory
|
||||
PORT=3000
|
||||
|
||||
# Builtin Commands (default: true)
|
||||
|
|
@ -280,14 +280,17 @@ The app can work alongside the worktree-manager Claude Code skill. Both use git
|
|||
|
||||
**To enable symbiosis:**
|
||||
|
||||
1. Set `WORKTREE_BASE` to match the skill's `worktreeBase` config:
|
||||
```env
|
||||
WORKTREE_BASE=~/tmp/worktrees
|
||||
1. Configure the worktree-manager skill to use Archon's worktrees directory:
|
||||
```json
|
||||
// In ~/.claude/settings.json or worktree-manager config
|
||||
{
|
||||
"worktreeBase": "~/.archon/worktrees"
|
||||
}
|
||||
```
|
||||
|
||||
2. Both systems will use the same directory:
|
||||
- Skill creates: `~/tmp/worktrees/<project>/<branch-slug>/`
|
||||
- App creates: `~/tmp/worktrees/<project>/<issue|pr>-<number>/`
|
||||
- Skill creates: `~/.archon/worktrees/<project>/<branch-slug>/`
|
||||
- App creates: `~/.archon/worktrees/<project>/<issue|pr>-<number>/`
|
||||
|
||||
3. The app will **adopt** skill-created worktrees when:
|
||||
- A PR is opened for a branch that already has a worktree
|
||||
|
|
@ -301,6 +304,39 @@ The app can work alongside the worktree-manager Claude Code skill. Both use git
|
|||
|
||||
Git (`git worktree list`) is the source of truth for what actually exists on disk.
|
||||
|
||||
### Archon Directory Structure
|
||||
|
||||
All Archon-managed files are organized under a dedicated namespace:
|
||||
|
||||
**User-level (`~/.archon/`):**
|
||||
```
|
||||
~/.archon/
|
||||
├── workspaces/ # Cloned repositories (via /clone)
|
||||
│ └── owner/repo/
|
||||
├── worktrees/ # Git worktrees for isolation
|
||||
│ └── repo-name/
|
||||
│ └── branch-name/
|
||||
└── config.yaml # Global configuration (non-secrets)
|
||||
```
|
||||
|
||||
**Repo-level (`.archon/` in any repository):**
|
||||
```
|
||||
.archon/
|
||||
├── commands/ # Custom command templates
|
||||
├── workflows/ # Future: workflow definitions
|
||||
└── config.yaml # Repo-specific configuration
|
||||
```
|
||||
|
||||
**For Docker:** Paths are automatically set to `/.archon/`.
|
||||
|
||||
**Configuration:**
|
||||
- `ARCHON_HOME` - Override the base directory (default: `~/.archon`)
|
||||
|
||||
**Command folder detection priority:**
|
||||
1. `.archon/commands/` - Archon-specific commands
|
||||
2. `.claude/commands/` - Claude Code standard location
|
||||
3. `.agents/commands/` - Alternative location
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
### When Creating New Features
|
||||
|
|
@ -599,8 +635,7 @@ try {
|
|||
- Run app locally: `bun run dev` (hot reload enabled)
|
||||
|
||||
**Volumes:**
|
||||
- `/workspace` - Cloned repositories
|
||||
- Mount via `WORKSPACE_PATH` env var
|
||||
- `/.archon/` - All Archon-managed data (workspaces, worktrees)
|
||||
|
||||
**Networking:**
|
||||
- App: Port 3000 (configurable via `PORT` env var)
|
||||
|
|
|
|||
20
Dockerfile
20
Dockerfile
|
|
@ -1,5 +1,10 @@
|
|||
FROM oven/bun:1-slim
|
||||
|
||||
# OCI Labels for GHCR
|
||||
LABEL org.opencontainers.image.source="https://github.com/dynamous-community/remote-coding-agent"
|
||||
LABEL org.opencontainers.image.description="Control AI coding assistants remotely from Telegram, Slack, Discord, and GitHub"
|
||||
LABEL org.opencontainers.image.licenses="MIT"
|
||||
|
||||
# Prevent interactive prompts during installation
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
|
|
@ -26,8 +31,11 @@ RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | d
|
|||
# Create non-root user for running Claude Code
|
||||
# Claude Code refuses to run with --dangerously-skip-permissions as root for security
|
||||
RUN useradd -m -u 1001 -s /bin/bash appuser \
|
||||
&& mkdir -p /workspace \
|
||||
&& chown -R appuser:appuser /app /workspace
|
||||
&& chown -R appuser:appuser /app
|
||||
|
||||
# Create Archon directories
|
||||
RUN mkdir -p /.archon/workspaces /.archon/worktrees \
|
||||
&& chown -R appuser:appuser /.archon
|
||||
|
||||
# Copy package files and lockfile
|
||||
COPY package.json bun.lock ./
|
||||
|
|
@ -53,11 +61,13 @@ USER appuser
|
|||
# Create .codex directory for Codex authentication
|
||||
RUN mkdir -p /home/appuser/.codex
|
||||
|
||||
# Configure git to trust /workspace directory
|
||||
# Configure git to trust Archon directories
|
||||
# This prevents "fatal: detected dubious ownership" errors when git operations
|
||||
# are performed in mounted volumes or repos cloned by different users
|
||||
RUN git config --global --add safe.directory /workspace && \
|
||||
git config --global --add safe.directory '/workspace/*'
|
||||
RUN git config --global --add safe.directory '/.archon/workspaces' && \
|
||||
git config --global --add safe.directory '/.archon/workspaces/*' && \
|
||||
git config --global --add safe.directory '/.archon/worktrees' && \
|
||||
git config --global --add safe.directory '/.archon/worktrees/*'
|
||||
|
||||
# Expose port
|
||||
EXPOSE 3000
|
||||
|
|
|
|||
82
README.md
82
README.md
|
|
@ -27,7 +27,72 @@ Control AI coding assistants (Claude Code, Codex) remotely from Telegram, GitHub
|
|||
|
||||
---
|
||||
|
||||
**🌐 Production Deployment:** This guide covers local development setup. To deploy remotely for 24/7 operation on a cloud VPS (DigitalOcean, AWS, Linode, etc.), see the **[Cloud Deployment Guide](docs/cloud-deployment.md)**.
|
||||
## Quick Start
|
||||
|
||||
### Option 1: Docker (Recommended for trying it out)
|
||||
|
||||
```bash
|
||||
# 1. Get the files
|
||||
mkdir remote-agent && cd remote-agent
|
||||
curl -fsSL https://raw.githubusercontent.com/dynamous-community/remote-coding-agent/main/deploy/docker-compose.yml -o docker-compose.yml
|
||||
curl -fsSL https://raw.githubusercontent.com/dynamous-community/remote-coding-agent/main/deploy/.env.example -o .env
|
||||
|
||||
# 2. Configure (edit .env with your tokens)
|
||||
nano .env
|
||||
|
||||
# 3. Run
|
||||
docker compose up -d
|
||||
|
||||
# 4. Check it's working
|
||||
curl http://localhost:3000/health
|
||||
```
|
||||
|
||||
### Option 2: Local Development
|
||||
|
||||
```bash
|
||||
# 1. Clone and install
|
||||
git clone https://github.com/dynamous-community/remote-coding-agent
|
||||
cd remote-coding-agent
|
||||
bun install
|
||||
|
||||
# 2. Configure
|
||||
cp .env.example .env
|
||||
nano .env # Add your tokens
|
||||
|
||||
# 3. Start database
|
||||
docker compose --profile with-db up -d postgres
|
||||
|
||||
# 4. Run migrations
|
||||
psql $DATABASE_URL < migrations/000_combined.sql
|
||||
|
||||
# 5. Start with hot reload
|
||||
bun run dev
|
||||
|
||||
# 6. Validate setup
|
||||
bun run validate
|
||||
```
|
||||
|
||||
### Option 3: Self-Hosted Production
|
||||
|
||||
See [Cloud Deployment Guide](docs/cloud-deployment.md) for deploying to:
|
||||
- DigitalOcean, Linode, AWS EC2, or any VPS
|
||||
- With automatic HTTPS via Caddy
|
||||
|
||||
## Directory Structure
|
||||
|
||||
The app uses `~/.archon/` for all managed files:
|
||||
|
||||
```
|
||||
~/.archon/
|
||||
├── workspaces/ # Cloned repositories
|
||||
├── worktrees/ # Git worktrees for isolation
|
||||
└── config.yaml # Optional: global configuration
|
||||
```
|
||||
|
||||
On Windows: `C:\Users\<username>\.archon\`
|
||||
In Docker: `/.archon/`
|
||||
|
||||
See [Configuration Guide](docs/configuration.md) for customization options.
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -54,7 +119,7 @@ cp .env.example .env
|
|||
| `GH_TOKEN` | Repository cloning | [Generate token](https://github.com/settings/tokens) with `repo` scope |
|
||||
| `GITHUB_TOKEN` | Same as `GH_TOKEN` | Use same token value |
|
||||
| `PORT` | HTTP server port | Default: `3000` (optional) |
|
||||
| `WORKSPACE_PATH` | Clone destination | **Recommended**: `/tmp/remote-agent-workspace` or `~/remote-agent-workspace` (see note below) |
|
||||
| `ARCHON_HOME` | (Optional) Override base directory | Default: `~/.archon` |
|
||||
|
||||
**GitHub Personal Access Token Setup:**
|
||||
|
||||
|
|
@ -68,18 +133,7 @@ GH_TOKEN=ghp_your_token_here
|
|||
GITHUB_TOKEN=ghp_your_token_here # Same value
|
||||
```
|
||||
|
||||
**⚠️ Important: WORKSPACE_PATH Configuration**
|
||||
|
||||
The `WORKSPACE_PATH` determines where cloned repositories are stored. **Use a path outside your project directory** to avoid issues:
|
||||
|
||||
```env
|
||||
# Recommended options
|
||||
WORKSPACE_PATH=~/remote-agent-workspace (persistent in home directory - Linux/Mac)
|
||||
# or
|
||||
WORKSPACE_PATH=C:Users\[your-user-ID]\remote-agent-workspace (Windows)
|
||||
```
|
||||
|
||||
**Docker note**: Inside containers, the path is always `/workspace` (mapped from your host `WORKSPACE_PATH` in docker-compose.yml).
|
||||
**Note:** Repository clones are stored in `~/.archon/workspaces/` by default (or `/.archon/workspaces/` in Docker). Set `ARCHON_HOME` to override the base directory.
|
||||
|
||||
**Database Setup - Choose One:**
|
||||
|
||||
|
|
|
|||
3
bun.lock
3
bun.lock
|
|
@ -15,6 +15,7 @@
|
|||
"pg": "^8.11.0",
|
||||
"telegraf": "^4.16.0",
|
||||
"telegramify-markdown": "^1.3.0",
|
||||
"yaml": "^2.7.1",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.1",
|
||||
|
|
@ -732,6 +733,8 @@
|
|||
|
||||
"xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="],
|
||||
|
||||
"yaml": ["yaml@2.8.2", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A=="],
|
||||
|
||||
"yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
|
||||
|
||||
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
|
||||
|
|
|
|||
48
deploy/.env.example
Normal file
48
deploy/.env.example
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
# Remote Coding Agent - Environment Configuration
|
||||
# Copy to .env and fill in your values
|
||||
|
||||
# ============================================
|
||||
# Required: Database
|
||||
# ============================================
|
||||
# Use a managed PostgreSQL (Supabase, Neon, etc.)
|
||||
DATABASE_URL=postgresql://user:password@host:5432/dbname
|
||||
|
||||
# Or uncomment postgres service in docker-compose.yml and use:
|
||||
# DATABASE_URL=postgresql://postgres:postgres@postgres:5432/remote_coding_agent
|
||||
|
||||
# ============================================
|
||||
# Required: AI Assistant (at least one)
|
||||
# ============================================
|
||||
# Claude (recommended) - Get token: claude setup-token
|
||||
CLAUDE_CODE_OAUTH_TOKEN=sk-ant-oat01-...
|
||||
|
||||
# Or Codex - Get from ~/.codex/auth.json after: codex login
|
||||
# CODEX_ID_TOKEN=...
|
||||
# CODEX_ACCESS_TOKEN=...
|
||||
# CODEX_REFRESH_TOKEN=...
|
||||
# CODEX_ACCOUNT_ID=...
|
||||
|
||||
# ============================================
|
||||
# Required: Platform (at least one)
|
||||
# ============================================
|
||||
# Telegram - Create bot via @BotFather
|
||||
TELEGRAM_BOT_TOKEN=123456789:ABC...
|
||||
|
||||
# Discord - Create bot at discord.com/developers
|
||||
# DISCORD_BOT_TOKEN=...
|
||||
|
||||
# Slack - Create app at api.slack.com/apps
|
||||
# SLACK_BOT_TOKEN=xoxb-...
|
||||
# SLACK_APP_TOKEN=xapp-...
|
||||
|
||||
# GitHub Webhooks
|
||||
# GH_TOKEN=ghp_...
|
||||
# GITHUB_TOKEN=ghp_...
|
||||
# WEBHOOK_SECRET=...
|
||||
|
||||
# ============================================
|
||||
# Optional
|
||||
# ============================================
|
||||
PORT=3000
|
||||
# TELEGRAM_STREAMING_MODE=stream
|
||||
# DISCORD_STREAMING_MODE=batch
|
||||
45
deploy/docker-compose.yml
Normal file
45
deploy/docker-compose.yml
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
# Remote Coding Agent - Docker Compose for End Users
|
||||
#
|
||||
# Usage:
|
||||
# 1. Copy this file and .env.example to your server
|
||||
# 2. Rename .env.example to .env and configure
|
||||
# 3. Run: docker compose up -d
|
||||
#
|
||||
# For full documentation, see:
|
||||
# https://github.com/dynamous-community/remote-coding-agent
|
||||
|
||||
services:
|
||||
app:
|
||||
image: ghcr.io/dynamous-community/remote-coding-agent:latest
|
||||
restart: unless-stopped
|
||||
env_file: .env
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- archon_data:/.archon
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
# Uncomment to run PostgreSQL locally
|
||||
# postgres:
|
||||
# image: postgres:16-alpine
|
||||
# restart: unless-stopped
|
||||
# environment:
|
||||
# POSTGRES_DB: remote_coding_agent
|
||||
# POSTGRES_USER: postgres
|
||||
# POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
|
||||
# volumes:
|
||||
# - postgres_data:/var/lib/postgresql/data
|
||||
# healthcheck:
|
||||
# test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
# interval: 10s
|
||||
# timeout: 5s
|
||||
# retries: 5
|
||||
|
||||
volumes:
|
||||
archon_data:
|
||||
# postgres_data:
|
||||
|
|
@ -6,12 +6,12 @@ services:
|
|||
build: .
|
||||
env_file: .env
|
||||
environment:
|
||||
# Override WORKSPACE_PATH for container (host path differs from container path)
|
||||
WORKSPACE_PATH: /workspace
|
||||
# Signal Docker environment for Archon path detection
|
||||
ARCHON_DOCKER: "true"
|
||||
ports:
|
||||
- "${PORT:-3000}:${PORT:-3000}"
|
||||
volumes:
|
||||
- ${WORKSPACE_PATH:-./workspace}:/workspace
|
||||
- archon_data:/.archon # All Archon-managed data (workspaces, worktrees, config)
|
||||
restart: unless-stopped
|
||||
dns:
|
||||
- 8.8.8.8
|
||||
|
|
@ -28,12 +28,12 @@ services:
|
|||
environment:
|
||||
# Override DATABASE_URL to use Docker service name
|
||||
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/remote_coding_agent
|
||||
# Override WORKSPACE_PATH for container (host path differs from container path)
|
||||
WORKSPACE_PATH: /workspace
|
||||
# Signal Docker environment for Archon path detection
|
||||
ARCHON_DOCKER: "true"
|
||||
ports:
|
||||
- "${PORT:-3000}:${PORT:-3000}"
|
||||
volumes:
|
||||
- ${WORKSPACE_PATH:-./workspace}:/workspace
|
||||
- archon_data:/.archon # All Archon-managed data (workspaces, worktrees, config)
|
||||
restart: unless-stopped
|
||||
dns:
|
||||
- 8.8.8.8
|
||||
|
|
@ -67,3 +67,4 @@ services:
|
|||
|
||||
volumes:
|
||||
postgres_data:
|
||||
archon_data: # Persistent Archon data
|
||||
|
|
|
|||
|
|
@ -528,15 +528,15 @@ export class WorktreeProvider implements IIsolationProvider {
|
|||
### Storage Location
|
||||
|
||||
```
|
||||
LOCAL: ~/tmp/worktrees/<project>/<branch>/ ← WORKTREE_BASE can override
|
||||
DOCKER: /workspace/worktrees/<project>/<branch>/ ← FIXED, no override
|
||||
LOCAL: ~/.archon/worktrees/<project>/<branch>/ ← ARCHON_HOME can override base
|
||||
DOCKER: /.archon/worktrees/<project>/<branch>/ ← FIXED, no override
|
||||
```
|
||||
|
||||
**Logic in `getWorktreeBase()`:**
|
||||
|
||||
1. Docker detected? → `/workspace/worktrees` (always, no override)
|
||||
2. `WORKTREE_BASE` set? → use it (local only)
|
||||
3. Default → `~/tmp/worktrees`
|
||||
1. Docker detected? → `/.archon/worktrees` (always, no override)
|
||||
2. `ARCHON_HOME` set? → `${ARCHON_HOME}/worktrees`
|
||||
3. Default → `~/.archon/worktrees`
|
||||
|
||||
### Usage Pattern
|
||||
|
||||
|
|
|
|||
238
docs/archon-architecture.md
Normal file
238
docs/archon-architecture.md
Normal file
|
|
@ -0,0 +1,238 @@
|
|||
# Archon Architecture
|
||||
|
||||
This document explains the Archon directory structure and configuration system for developers contributing to or extending the remote-coding-agent.
|
||||
|
||||
## Overview
|
||||
|
||||
Archon is the unified directory and configuration system for the remote-coding-agent. It provides:
|
||||
|
||||
1. **Consistent paths** across all platforms (Mac, Linux, Windows, Docker)
|
||||
2. **Configuration precedence** chain (env > global > repo > defaults)
|
||||
3. **Future-ready structure** for workflow engine and UI integration
|
||||
|
||||
## Directory Structure
|
||||
|
||||
### User-Level: `~/.archon/`
|
||||
|
||||
```
|
||||
~/.archon/ # ARCHON_HOME
|
||||
├── workspaces/ # Cloned repositories
|
||||
│ └── owner/
|
||||
│ └── repo/
|
||||
├── worktrees/ # Git worktrees for isolation
|
||||
│ └── repo-name/
|
||||
│ └── branch-name/
|
||||
└── config.yaml # Global user configuration
|
||||
```
|
||||
|
||||
**Purpose:**
|
||||
- `workspaces/` - Repositories cloned via `/clone` command or GitHub adapter
|
||||
- `worktrees/` - Isolated git worktrees created per conversation/issue/PR
|
||||
- `config.yaml` - Non-secret user preferences
|
||||
|
||||
### Repo-Level: `.archon/`
|
||||
|
||||
```
|
||||
any-repo/.archon/
|
||||
├── commands/ # Custom command templates
|
||||
│ ├── plan.md
|
||||
│ └── execute.md
|
||||
├── workflows/ # Future: workflow definitions
|
||||
│ └── pr-review.yaml
|
||||
└── config.yaml # Repo-specific configuration
|
||||
```
|
||||
|
||||
**Purpose:**
|
||||
- `commands/` - Slash command templates (priority over `.claude/commands/`, `.agents/commands/`)
|
||||
- `workflows/` - Future workflow engine definitions
|
||||
- `config.yaml` - Project-specific settings
|
||||
|
||||
### Docker: `/.archon/`
|
||||
|
||||
In Docker containers, the Archon home is fixed at `/.archon/` (root level). This is:
|
||||
- Mounted as a named volume for persistence
|
||||
- Not overridable by end users (simplifies container setup)
|
||||
|
||||
## Path Resolution
|
||||
|
||||
All path resolution is centralized in `src/utils/archon-paths.ts`.
|
||||
|
||||
### Core Functions
|
||||
|
||||
```typescript
|
||||
// Get the Archon home directory
|
||||
getArchonHome(): string
|
||||
// Returns: ~/.archon (local) or /.archon (Docker)
|
||||
|
||||
// Get workspaces directory
|
||||
getArchonWorkspacesPath(): string
|
||||
// Returns: ${ARCHON_HOME}/workspaces
|
||||
|
||||
// Get worktrees directory
|
||||
getArchonWorktreesPath(): string
|
||||
// Returns: ${ARCHON_HOME}/worktrees
|
||||
|
||||
// Get global config path
|
||||
getArchonConfigPath(): string
|
||||
// Returns: ${ARCHON_HOME}/config.yaml
|
||||
|
||||
// Get command folder search paths (priority order)
|
||||
getCommandFolderSearchPaths(): string[]
|
||||
// Returns: ['.archon/commands', '.claude/commands', '.agents/commands']
|
||||
```
|
||||
|
||||
### Docker Detection
|
||||
|
||||
```typescript
|
||||
function isDocker(): boolean {
|
||||
return (
|
||||
process.env.WORKSPACE_PATH === '/workspace' ||
|
||||
(process.env.HOME === '/root' && Boolean(process.env.WORKSPACE_PATH)) ||
|
||||
process.env.ARCHON_DOCKER === 'true'
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Platform-Specific Paths
|
||||
|
||||
| Platform | `getArchonHome()` |
|
||||
|----------|-------------------|
|
||||
| macOS | `/Users/<username>/.archon` |
|
||||
| Linux | `/home/<username>/.archon` |
|
||||
| Windows | `C:\Users\<username>\.archon` |
|
||||
| Docker | `/.archon` |
|
||||
|
||||
## Configuration System
|
||||
|
||||
### Precedence Chain
|
||||
|
||||
Configuration is resolved in this order (highest to lowest priority):
|
||||
|
||||
1. **Environment Variables** - Secrets, deployment-specific
|
||||
2. **Global Config** (`~/.archon/config.yaml`) - User preferences
|
||||
3. **Repo Config** (`.archon/config.yaml`) - Project-specific
|
||||
4. **Built-in Defaults** - Hardcoded in `src/config/config-types.ts`
|
||||
|
||||
### Config Loading
|
||||
|
||||
```typescript
|
||||
// Load merged config for a repo
|
||||
const config = await loadConfig(repoPath);
|
||||
|
||||
// Load just global config
|
||||
const globalConfig = await loadGlobalConfig();
|
||||
|
||||
// Load just repo config
|
||||
const repoConfig = await loadRepoConfig(repoPath);
|
||||
```
|
||||
|
||||
### Configuration Options
|
||||
|
||||
Key configuration options:
|
||||
|
||||
| Option | Env Override | Default |
|
||||
|--------|--------------|---------|
|
||||
| `ARCHON_HOME` | `ARCHON_HOME` | `~/.archon` |
|
||||
| Default AI Assistant | `DEFAULT_AI_ASSISTANT` | `claude` |
|
||||
| Telegram Streaming | `TELEGRAM_STREAMING_MODE` | `stream` |
|
||||
| Discord Streaming | `DISCORD_STREAMING_MODE` | `batch` |
|
||||
| Slack Streaming | `SLACK_STREAMING_MODE` | `batch` |
|
||||
| GitHub Streaming | `GITHUB_STREAMING_MODE` | `batch` |
|
||||
|
||||
## Command Folders
|
||||
|
||||
Command detection searches in priority order:
|
||||
|
||||
1. `.archon/commands/` - Archon-specific commands
|
||||
2. `.claude/commands/` - Claude Code standard location
|
||||
3. `.agents/commands/` - Alternative location
|
||||
|
||||
First match wins. No migration required.
|
||||
|
||||
## Extension Points
|
||||
|
||||
### Adding New Paths
|
||||
|
||||
To add a new managed directory:
|
||||
|
||||
1. Add function to `src/utils/archon-paths.ts`:
|
||||
```typescript
|
||||
export function getArchonNewPath(): string {
|
||||
return join(getArchonHome(), 'new-directory');
|
||||
}
|
||||
```
|
||||
|
||||
2. Update Docker setup in `Dockerfile`
|
||||
3. Update volume mounts in `docker-compose.yml`
|
||||
4. Add tests in `src/utils/archon-paths.test.ts`
|
||||
|
||||
### Adding Config Options
|
||||
|
||||
To add new configuration options:
|
||||
|
||||
1. Add type to `src/config/config-types.ts`:
|
||||
```typescript
|
||||
export interface GlobalConfig {
|
||||
// ...existing
|
||||
newFeature?: {
|
||||
enabled?: boolean;
|
||||
setting?: string;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
2. Add default in `getDefaults()` function
|
||||
3. Use via `loadConfig()` in your code
|
||||
|
||||
## Design Decisions
|
||||
|
||||
### Why `~/.archon/` instead of `~/.config/archon/`?
|
||||
|
||||
- Simpler path (fewer nested directories)
|
||||
- Follows Claude Code pattern (`~/.claude/`)
|
||||
- Cross-platform without XDG complexity
|
||||
- Easy to find and manage manually
|
||||
|
||||
### Why YAML for config?
|
||||
|
||||
- Bun has native support (via `yaml` package)
|
||||
- Supports comments (unlike JSON)
|
||||
- Future workflow definitions need YAML
|
||||
- Human-readable and editable
|
||||
|
||||
### Why fixed Docker paths?
|
||||
|
||||
- Simplifies container setup
|
||||
- Predictable volume mounts
|
||||
- No user confusion about env vars in containers
|
||||
- Matches convention (apps use fixed paths in containers)
|
||||
|
||||
### Why config precedence chain?
|
||||
|
||||
- Mirrors git config pattern (familiar to developers)
|
||||
- Secrets stay in env vars (security)
|
||||
- User preferences in global config (portable)
|
||||
- Project settings in repo config (version-controlled)
|
||||
|
||||
## Future Considerations
|
||||
|
||||
### Workflow Engine
|
||||
|
||||
The `.archon/workflows/` directory is reserved for:
|
||||
- YAML workflow definitions
|
||||
- Multi-step automated processes
|
||||
- Agent orchestration rules
|
||||
|
||||
### UI Integration
|
||||
|
||||
The config type system is designed for:
|
||||
- Future web UI configuration
|
||||
- API-driven config updates
|
||||
- Real-time config validation
|
||||
|
||||
### Multi-Tenant / SaaS
|
||||
|
||||
Path structure supports future scenarios:
|
||||
- Per-user isolation
|
||||
- Organization-level config
|
||||
- Shared workflow templates
|
||||
|
|
@ -260,7 +260,7 @@ GITHUB_TOKEN=ghp_your_token_here
|
|||
|
||||
# Server settings
|
||||
PORT=3000
|
||||
WORKSPACE_PATH=/tmp/remote-agent-workspace # Use external path to avoid nested repos
|
||||
ARCHON_HOME=/tmp/archon # Override base directory (optional)
|
||||
```
|
||||
|
||||
**GitHub Token Setup:**
|
||||
|
|
|
|||
147
docs/configuration.md
Normal file
147
docs/configuration.md
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
# Configuration Guide
|
||||
|
||||
Archon supports a layered configuration system with sensible defaults, optional YAML config files, and environment variable overrides.
|
||||
|
||||
## Directory Structure
|
||||
|
||||
### User-Level (~/.archon/)
|
||||
|
||||
```
|
||||
~/.archon/
|
||||
├── workspaces/ # Cloned repositories
|
||||
│ └── owner/repo/
|
||||
├── worktrees/ # Git worktrees for isolation
|
||||
│ └── repo-name/
|
||||
│ └── branch-name/
|
||||
└── config.yaml # Global configuration (optional)
|
||||
```
|
||||
|
||||
### Repository-Level (.archon/)
|
||||
|
||||
```
|
||||
.archon/
|
||||
├── commands/ # Custom command templates
|
||||
│ └── plan.md
|
||||
├── workflows/ # Future: workflow definitions
|
||||
└── config.yaml # Repo-specific configuration (optional)
|
||||
```
|
||||
|
||||
## Configuration Priority
|
||||
|
||||
Settings are loaded in this order (later overrides earlier):
|
||||
|
||||
1. **Defaults** - Sensible built-in defaults
|
||||
2. **Global Config** - `~/.archon/config.yaml`
|
||||
3. **Repo Config** - `.archon/config.yaml` in repository
|
||||
4. **Environment Variables** - Always highest priority
|
||||
|
||||
## Global Configuration
|
||||
|
||||
Create `~/.archon/config.yaml` for user-wide preferences:
|
||||
|
||||
```yaml
|
||||
# Default AI assistant
|
||||
defaultAssistant: claude # or 'codex'
|
||||
|
||||
# Streaming preferences per platform
|
||||
streaming:
|
||||
telegram: stream # 'stream' or 'batch'
|
||||
discord: batch
|
||||
slack: batch
|
||||
github: batch
|
||||
|
||||
# Custom paths (usually not needed)
|
||||
paths:
|
||||
workspaces: ~/.archon/workspaces
|
||||
worktrees: ~/.archon/worktrees
|
||||
|
||||
# Concurrency limits
|
||||
concurrency:
|
||||
maxConversations: 10
|
||||
```
|
||||
|
||||
## Repository Configuration
|
||||
|
||||
Create `.archon/config.yaml` in any repository for project-specific settings:
|
||||
|
||||
```yaml
|
||||
# AI assistant for this project
|
||||
assistant: claude
|
||||
|
||||
# Commands configuration
|
||||
commands:
|
||||
folder: .archon/commands
|
||||
autoLoad: true
|
||||
|
||||
# Worktree settings
|
||||
worktree:
|
||||
baseBranch: main
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Environment variables override all other configuration:
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ------------------------------ | -------------------------- | ------------- |
|
||||
| `ARCHON_HOME` | Base directory for Archon | `~/.archon` |
|
||||
| `DEFAULT_AI_ASSISTANT` | Default AI assistant | `claude` |
|
||||
| `TELEGRAM_STREAMING_MODE` | Telegram streaming | `stream` |
|
||||
| `DISCORD_STREAMING_MODE` | Discord streaming | `batch` |
|
||||
| `SLACK_STREAMING_MODE` | Slack streaming | `batch` |
|
||||
| `GITHUB_STREAMING_MODE` | GitHub streaming | `batch` |
|
||||
| `MAX_CONCURRENT_CONVERSATIONS` | Concurrency limit | `10` |
|
||||
|
||||
## Docker Configuration
|
||||
|
||||
In Docker containers, paths are automatically set:
|
||||
|
||||
```
|
||||
/.archon/
|
||||
├── workspaces/
|
||||
└── worktrees/
|
||||
```
|
||||
|
||||
Environment variables still work and override defaults.
|
||||
|
||||
## Command Folder Detection
|
||||
|
||||
When cloning or switching repositories, Archon looks for commands in this priority order:
|
||||
|
||||
1. `.archon/commands/` - Archon-specific commands
|
||||
2. `.claude/commands/` - Claude Code standard location
|
||||
3. `.agents/commands/` - Alternative location
|
||||
|
||||
First found folder is used.
|
||||
|
||||
## Examples
|
||||
|
||||
### Minimal Setup (Using Defaults)
|
||||
|
||||
No configuration needed! Archon works out of the box with:
|
||||
|
||||
- `~/.archon/` for all managed files
|
||||
- Claude as default AI assistant
|
||||
- Platform-appropriate streaming modes
|
||||
|
||||
### Custom AI Preference
|
||||
|
||||
```yaml
|
||||
# ~/.archon/config.yaml
|
||||
defaultAssistant: codex
|
||||
```
|
||||
|
||||
### Project-Specific Settings
|
||||
|
||||
```yaml
|
||||
# .archon/config.yaml in your repo
|
||||
assistant: claude
|
||||
commands:
|
||||
autoLoad: true
|
||||
```
|
||||
|
||||
### Docker with Custom Volume
|
||||
|
||||
```bash
|
||||
docker run -v /my/data:/.archon ghcr.io/dynamous-community/remote-coding-agent
|
||||
```
|
||||
190
docs/getting-started.md
Normal file
190
docs/getting-started.md
Normal file
|
|
@ -0,0 +1,190 @@
|
|||
# Getting Started
|
||||
|
||||
This guide walks you through setting up the Remote Coding Agent from scratch.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you begin, you'll need:
|
||||
|
||||
1. **Docker** (recommended) or **Bun** runtime
|
||||
2. **PostgreSQL** database (local or managed like Supabase/Neon)
|
||||
3. **AI Assistant credentials** (Claude or Codex)
|
||||
4. **Platform credentials** (Telegram, Discord, Slack, or GitHub)
|
||||
|
||||
## Step 1: Choose Your Setup Method
|
||||
|
||||
| Method | Best For | Time |
|
||||
|--------|----------|------|
|
||||
| [Docker Quick Start](#docker-quick-start) | Trying it out, production | ~10 min |
|
||||
| [Local Development](#local-development) | Contributing, customizing | ~15 min |
|
||||
| [Cloud Deployment](cloud-deployment.md) | 24/7 self-hosted | ~30 min |
|
||||
|
||||
## Docker Quick Start
|
||||
|
||||
### 1.1 Get the Files
|
||||
|
||||
```bash
|
||||
mkdir remote-agent && cd remote-agent
|
||||
|
||||
# Download docker-compose and env template
|
||||
curl -fsSL https://raw.githubusercontent.com/dynamous-community/remote-coding-agent/main/deploy/docker-compose.yml -o docker-compose.yml
|
||||
curl -fsSL https://raw.githubusercontent.com/dynamous-community/remote-coding-agent/main/deploy/.env.example -o .env
|
||||
```
|
||||
|
||||
### 1.2 Get Your Credentials
|
||||
|
||||
#### Database
|
||||
|
||||
**Option A: Use a managed database (recommended)**
|
||||
1. Create a free database at [Supabase](https://supabase.com) or [Neon](https://neon.tech)
|
||||
2. Copy the connection string
|
||||
|
||||
**Option B: Run PostgreSQL locally**
|
||||
- Uncomment the postgres service in docker-compose.yml
|
||||
- Use: `postgresql://postgres:postgres@postgres:5432/remote_coding_agent`
|
||||
|
||||
#### AI Assistant
|
||||
|
||||
**Claude (recommended):**
|
||||
1. Install Claude Code CLI: https://docs.anthropic.com/claude-code
|
||||
2. Run: `claude setup-token`
|
||||
3. Copy the token (starts with `sk-ant-oat01-`)
|
||||
|
||||
**Codex:**
|
||||
1. Run: `codex login`
|
||||
2. Copy credentials from `~/.codex/auth.json`
|
||||
|
||||
#### Platform (choose at least one)
|
||||
|
||||
**Telegram:**
|
||||
1. Message [@BotFather](https://t.me/BotFather) on Telegram
|
||||
2. Send `/newbot` and follow prompts
|
||||
3. Copy the bot token
|
||||
|
||||
**Discord:**
|
||||
1. Go to [Discord Developer Portal](https://discord.com/developers/applications)
|
||||
2. Create New Application > Bot > Reset Token
|
||||
3. Enable MESSAGE CONTENT INTENT in Bot settings
|
||||
4. Copy the bot token
|
||||
|
||||
**Slack:**
|
||||
1. Go to [Slack API](https://api.slack.com/apps)
|
||||
2. Create New App > From Scratch
|
||||
3. See [Slack Setup Guide](slack-setup.md) for detailed steps
|
||||
|
||||
### 1.3 Configure
|
||||
|
||||
Edit `.env` with your credentials:
|
||||
|
||||
```bash
|
||||
nano .env
|
||||
```
|
||||
|
||||
At minimum, set:
|
||||
- `DATABASE_URL`
|
||||
- One AI assistant (`CLAUDE_CODE_OAUTH_TOKEN` or Codex credentials)
|
||||
- One platform (`TELEGRAM_BOT_TOKEN`, `DISCORD_BOT_TOKEN`, etc.)
|
||||
|
||||
### 1.4 Start
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### 1.5 Verify
|
||||
|
||||
```bash
|
||||
# Check health
|
||||
curl http://localhost:3000/health
|
||||
# Expected: {"status":"ok"}
|
||||
|
||||
# Check database
|
||||
curl http://localhost:3000/health/db
|
||||
# Expected: {"status":"ok","database":"connected"}
|
||||
```
|
||||
|
||||
### 1.6 Test Your Bot
|
||||
|
||||
Send a message to your bot:
|
||||
- **Telegram**: Message your bot with `/help`
|
||||
- **Discord**: Mention your bot with `@botname /help`
|
||||
- **Slack**: Message your bot with `/help`
|
||||
|
||||
## Local Development
|
||||
|
||||
### 2.1 Clone and Install
|
||||
|
||||
```bash
|
||||
git clone https://github.com/dynamous-community/remote-coding-agent
|
||||
cd remote-coding-agent
|
||||
bun install
|
||||
```
|
||||
|
||||
### 2.2 Configure
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
nano .env # Add your credentials (same as Docker method)
|
||||
```
|
||||
|
||||
### 2.3 Start Database
|
||||
|
||||
```bash
|
||||
docker compose --profile with-db up -d postgres
|
||||
```
|
||||
|
||||
### 2.4 Run Migrations
|
||||
|
||||
```bash
|
||||
psql $DATABASE_URL < migrations/000_combined.sql
|
||||
```
|
||||
|
||||
### 2.5 Validate Setup
|
||||
|
||||
```bash
|
||||
bun run setup:check
|
||||
```
|
||||
|
||||
### 2.6 Start Development Server
|
||||
|
||||
```bash
|
||||
bun run dev
|
||||
```
|
||||
|
||||
The server starts with hot reload. Changes to code automatically restart.
|
||||
|
||||
## Next Steps
|
||||
|
||||
- [Configuration Guide](configuration.md) - Customize settings
|
||||
- [Command System](../CLAUDE.md#command-system-patterns) - Create custom commands
|
||||
- [Cloud Deployment](cloud-deployment.md) - Deploy for 24/7 operation
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Database connection failed"
|
||||
|
||||
1. Check `DATABASE_URL` is correct
|
||||
2. For managed DB: Ensure IP is whitelisted
|
||||
3. For local: Ensure postgres container is running: `docker compose ps`
|
||||
|
||||
### "No AI assistant credentials found"
|
||||
|
||||
Set at least one of:
|
||||
- `CLAUDE_CODE_OAUTH_TOKEN` (recommended)
|
||||
- `CLAUDE_API_KEY`
|
||||
- `CODEX_ID_TOKEN` + `CODEX_ACCESS_TOKEN` + `CODEX_REFRESH_TOKEN`
|
||||
|
||||
### "Bot not responding"
|
||||
|
||||
1. Check logs: `docker compose logs -f app` or terminal output for `bun run dev`
|
||||
2. Verify bot token is correct
|
||||
3. For Discord: Ensure MESSAGE CONTENT INTENT is enabled
|
||||
4. For Slack: Ensure Socket Mode is enabled
|
||||
|
||||
### Archon Directory Not Created
|
||||
|
||||
The `~/.archon/` directory is created automatically on first use. To create manually:
|
||||
|
||||
```bash
|
||||
mkdir -p ~/.archon/workspaces ~/.archon/worktrees
|
||||
```
|
||||
|
|
@ -5,16 +5,16 @@
|
|||
## Storage Location
|
||||
|
||||
```
|
||||
LOCAL: ~/tmp/worktrees/<project>/<branch>/ ← WORKTREE_BASE can override
|
||||
DOCKER: /workspace/worktrees/<project>/<branch>/ ← FIXED, no override
|
||||
LOCAL: ~/.archon/worktrees/<project>/<branch>/ ← ARCHON_HOME can override base
|
||||
DOCKER: /.archon/worktrees/<project>/<branch>/ ← FIXED, no override
|
||||
```
|
||||
|
||||
Detection order in `getWorktreeBase()`:
|
||||
|
||||
```
|
||||
1. isDocker? → /workspace/worktrees (ALWAYS)
|
||||
2. WORKTREE_BASE set? → use it (local only)
|
||||
3. default → ~/tmp/worktrees
|
||||
1. isDocker? → /.archon/worktrees (ALWAYS)
|
||||
2. ARCHON_HOME set? → ${ARCHON_HOME}/worktrees
|
||||
3. default → ~/.archon/worktrees
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
|
|
|||
|
|
@ -16,7 +16,9 @@
|
|||
"lint": "bun x eslint . --cache",
|
||||
"lint:fix": "bun x eslint . --cache --fix",
|
||||
"format": "bun x prettier --write .",
|
||||
"format:check": "bun x prettier --check ."
|
||||
"format:check": "bun x prettier --check .",
|
||||
"validate": "bun run type-check && bun run lint && bun test",
|
||||
"setup:check": "./scripts/validate-setup.sh"
|
||||
},
|
||||
"keywords": [
|
||||
"ai",
|
||||
|
|
@ -39,7 +41,8 @@
|
|||
"express": "^5.2.1",
|
||||
"pg": "^8.11.0",
|
||||
"telegraf": "^4.16.0",
|
||||
"telegramify-markdown": "^1.3.0"
|
||||
"telegramify-markdown": "^1.3.0",
|
||||
"yaml": "^2.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.1",
|
||||
|
|
|
|||
176
scripts/validate-setup.sh
Executable file
176
scripts/validate-setup.sh
Executable file
|
|
@ -0,0 +1,176 @@
|
|||
#!/bin/bash
|
||||
# validate-setup.sh - Validate Remote Coding Agent configuration
|
||||
#
|
||||
# Usage: ./scripts/validate-setup.sh
|
||||
|
||||
set -e
|
||||
|
||||
echo "Remote Coding Agent Setup Validator"
|
||||
echo "======================================="
|
||||
echo ""
|
||||
|
||||
ERRORS=0
|
||||
WARNINGS=0
|
||||
|
||||
# Color codes
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
check_pass() {
|
||||
echo -e "${GREEN}✓${NC} $1"
|
||||
}
|
||||
|
||||
check_fail() {
|
||||
echo -e "${RED}✗${NC} $1"
|
||||
((ERRORS++))
|
||||
}
|
||||
|
||||
check_warn() {
|
||||
echo -e "${YELLOW}!${NC} $1"
|
||||
((WARNINGS++))
|
||||
}
|
||||
|
||||
# Check .env file
|
||||
echo "Configuration Files"
|
||||
echo "----------------------"
|
||||
|
||||
if [ -f ".env" ]; then
|
||||
check_pass ".env file exists"
|
||||
else
|
||||
check_fail ".env file not found (copy from .env.example)"
|
||||
fi
|
||||
|
||||
# Check required environment variables
|
||||
echo ""
|
||||
echo "Required Environment Variables"
|
||||
echo "----------------------------------"
|
||||
|
||||
# Load .env if exists
|
||||
if [ -f ".env" ]; then
|
||||
set -a
|
||||
source .env 2>/dev/null || true
|
||||
set +a
|
||||
fi
|
||||
|
||||
if [ -n "$DATABASE_URL" ]; then
|
||||
check_pass "DATABASE_URL is set"
|
||||
else
|
||||
check_fail "DATABASE_URL not set"
|
||||
fi
|
||||
|
||||
# AI Assistants
|
||||
echo ""
|
||||
echo "AI Assistants"
|
||||
echo "----------------"
|
||||
|
||||
if [ -n "$CLAUDE_CODE_OAUTH_TOKEN" ] || [ -n "$CLAUDE_API_KEY" ]; then
|
||||
check_pass "Claude credentials configured"
|
||||
else
|
||||
check_warn "Claude credentials not found"
|
||||
fi
|
||||
|
||||
if [ -n "$CODEX_ID_TOKEN" ] && [ -n "$CODEX_ACCESS_TOKEN" ]; then
|
||||
check_pass "Codex credentials configured"
|
||||
else
|
||||
check_warn "Codex credentials not found"
|
||||
fi
|
||||
|
||||
if [ -z "$CLAUDE_CODE_OAUTH_TOKEN" ] && [ -z "$CLAUDE_API_KEY" ] && [ -z "$CODEX_ID_TOKEN" ]; then
|
||||
check_fail "No AI assistant credentials found (need at least one)"
|
||||
fi
|
||||
|
||||
# Platforms
|
||||
echo ""
|
||||
echo "Platform Adapters"
|
||||
echo "--------------------"
|
||||
|
||||
PLATFORMS=0
|
||||
|
||||
if [ -n "$TELEGRAM_BOT_TOKEN" ]; then
|
||||
check_pass "Telegram configured"
|
||||
((PLATFORMS++))
|
||||
else
|
||||
check_warn "Telegram not configured"
|
||||
fi
|
||||
|
||||
if [ -n "$DISCORD_BOT_TOKEN" ]; then
|
||||
check_pass "Discord configured"
|
||||
((PLATFORMS++))
|
||||
else
|
||||
check_warn "Discord not configured"
|
||||
fi
|
||||
|
||||
if [ -n "$SLACK_BOT_TOKEN" ] && [ -n "$SLACK_APP_TOKEN" ]; then
|
||||
check_pass "Slack configured"
|
||||
((PLATFORMS++))
|
||||
else
|
||||
check_warn "Slack not configured"
|
||||
fi
|
||||
|
||||
if [ -n "$GITHUB_TOKEN" ] && [ -n "$WEBHOOK_SECRET" ]; then
|
||||
check_pass "GitHub webhooks configured"
|
||||
((PLATFORMS++))
|
||||
else
|
||||
check_warn "GitHub webhooks not configured"
|
||||
fi
|
||||
|
||||
if [ $PLATFORMS -eq 0 ]; then
|
||||
check_fail "No platform adapters configured (need at least one)"
|
||||
fi
|
||||
|
||||
# Docker
|
||||
echo ""
|
||||
echo "Docker"
|
||||
echo "---------"
|
||||
|
||||
if command -v docker &> /dev/null; then
|
||||
check_pass "Docker is installed"
|
||||
|
||||
if docker compose version &> /dev/null; then
|
||||
check_pass "Docker Compose is available"
|
||||
else
|
||||
check_warn "Docker Compose not found"
|
||||
fi
|
||||
else
|
||||
check_warn "Docker not installed (required for containerized deployment)"
|
||||
fi
|
||||
|
||||
# Archon paths
|
||||
echo ""
|
||||
echo "Archon Paths"
|
||||
echo "---------------"
|
||||
|
||||
ARCHON_HOME="${ARCHON_HOME:-$HOME/.archon}"
|
||||
echo " Home: $ARCHON_HOME"
|
||||
echo " Workspaces: $ARCHON_HOME/workspaces"
|
||||
echo " Worktrees: $ARCHON_HOME/worktrees"
|
||||
|
||||
if [ -d "$ARCHON_HOME" ]; then
|
||||
check_pass "Archon home directory exists"
|
||||
else
|
||||
check_warn "Archon home directory will be created on first run"
|
||||
fi
|
||||
|
||||
# Summary
|
||||
echo ""
|
||||
echo "======================================="
|
||||
if [ $ERRORS -gt 0 ]; then
|
||||
echo -e "${RED}Validation failed with $ERRORS error(s) and $WARNINGS warning(s)${NC}"
|
||||
echo ""
|
||||
echo "Please fix the errors above before running the application."
|
||||
exit 1
|
||||
elif [ $WARNINGS -gt 0 ]; then
|
||||
echo -e "${YELLOW}Validation passed with $WARNINGS warning(s)${NC}"
|
||||
echo ""
|
||||
echo "The application should work, but some features may be unavailable."
|
||||
exit 0
|
||||
else
|
||||
echo -e "${GREEN}All checks passed!${NC}"
|
||||
echo ""
|
||||
echo "You can start the application with:"
|
||||
echo " bun run dev # Development with hot reload"
|
||||
echo " docker compose up -d # Docker deployment"
|
||||
exit 0
|
||||
fi
|
||||
|
|
@ -12,11 +12,12 @@ import * as codebaseDb from '../db/codebases';
|
|||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { readdir, access } from 'fs/promises';
|
||||
import { join, resolve } from 'path';
|
||||
import { join } from 'path';
|
||||
import { parseAllowedUsers, isGitHubUserAuthorized } from '../utils/github-auth';
|
||||
import { getLinkedIssueNumbers } from '../utils/github-graphql';
|
||||
import { onConversationClosed } from '../services/cleanup-service';
|
||||
import { isWorktreePath } from '../utils/git';
|
||||
import { getArchonWorkspacesPath, getCommandFolderSearchPaths } from '../utils/archon-paths';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
|
|
@ -365,10 +366,10 @@ export class GitHubAdapter implements IPlatformAdapter {
|
|||
}
|
||||
|
||||
/**
|
||||
* Auto-detect and load commands from .claude/commands or .agents/commands
|
||||
* Auto-detect and load commands from .archon/commands, .claude/commands or .agents/commands
|
||||
*/
|
||||
private async autoDetectAndLoadCommands(repoPath: string, codebaseId: string): Promise<void> {
|
||||
const commandFolders = ['.claude/commands', '.agents/commands'];
|
||||
const commandFolders = getCommandFolderSearchPaths();
|
||||
|
||||
for (const folder of commandFolders) {
|
||||
try {
|
||||
|
|
@ -417,7 +418,7 @@ export class GitHubAdapter implements IPlatformAdapter {
|
|||
|
||||
// Canonical path includes owner to prevent collisions between repos with same name
|
||||
// e.g., alice/utils and bob/utils get separate directories
|
||||
const canonicalPath = join(resolve(process.env.WORKSPACE_PATH ?? '/workspace'), owner, repo);
|
||||
const canonicalPath = join(getArchonWorkspacesPath(), owner, repo);
|
||||
|
||||
if (existing) {
|
||||
// Check if existing codebase points to a worktree path - fix it if so
|
||||
|
|
|
|||
193
src/config/config-loader.test.ts
Normal file
193
src/config/config-loader.test.ts
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
import { describe, test, expect, beforeEach, afterEach, mock } from 'bun:test';
|
||||
import { homedir } from 'os';
|
||||
import { join } from 'path';
|
||||
import * as fsPromises from 'fs/promises';
|
||||
|
||||
// Store original readFile for passthrough
|
||||
const originalReadFile = fsPromises.readFile;
|
||||
|
||||
// Mock readFile - defaults to calling original implementation
|
||||
const mockReadFile = mock(originalReadFile);
|
||||
mock.module('fs/promises', () => ({
|
||||
...fsPromises,
|
||||
readFile: mockReadFile,
|
||||
}));
|
||||
|
||||
import { loadGlobalConfig, loadRepoConfig, loadConfig, clearConfigCache } from './config-loader';
|
||||
|
||||
describe('config-loader', () => {
|
||||
const originalEnv: Record<string, string | undefined> = {};
|
||||
const envVars = [
|
||||
'DEFAULT_AI_ASSISTANT',
|
||||
'TELEGRAM_STREAMING_MODE',
|
||||
'DISCORD_STREAMING_MODE',
|
||||
'SLACK_STREAMING_MODE',
|
||||
'GITHUB_STREAMING_MODE',
|
||||
'MAX_CONCURRENT_CONVERSATIONS',
|
||||
'WORKSPACE_PATH',
|
||||
'WORKTREE_BASE',
|
||||
'ARCHON_HOME',
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
clearConfigCache();
|
||||
mockReadFile.mockReset();
|
||||
|
||||
// Save original env vars
|
||||
envVars.forEach(key => {
|
||||
originalEnv[key] = process.env[key];
|
||||
delete process.env[key];
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore env vars
|
||||
envVars.forEach(key => {
|
||||
if (originalEnv[key] === undefined) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = originalEnv[key];
|
||||
}
|
||||
});
|
||||
|
||||
// Restore mock to passthrough mode for other test files
|
||||
mockReadFile.mockImplementation(originalReadFile);
|
||||
});
|
||||
|
||||
describe('loadGlobalConfig', () => {
|
||||
test('returns empty object when file does not exist', async () => {
|
||||
const error = new Error('ENOENT') as NodeJS.ErrnoException;
|
||||
error.code = 'ENOENT';
|
||||
mockReadFile.mockRejectedValue(error);
|
||||
|
||||
const config = await loadGlobalConfig();
|
||||
expect(config).toEqual({});
|
||||
});
|
||||
|
||||
test('parses valid YAML config', async () => {
|
||||
mockReadFile.mockResolvedValue(`
|
||||
defaultAssistant: codex
|
||||
streaming:
|
||||
telegram: batch
|
||||
concurrency:
|
||||
maxConversations: 5
|
||||
`);
|
||||
|
||||
const config = await loadGlobalConfig();
|
||||
expect(config.defaultAssistant).toBe('codex');
|
||||
expect(config.streaming?.telegram).toBe('batch');
|
||||
expect(config.concurrency?.maxConversations).toBe(5);
|
||||
});
|
||||
|
||||
test('caches config on subsequent calls', async () => {
|
||||
mockReadFile.mockResolvedValue('defaultAssistant: claude');
|
||||
|
||||
await loadGlobalConfig();
|
||||
await loadGlobalConfig();
|
||||
|
||||
// Should only read file once
|
||||
expect(mockReadFile).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('reloads config when forceReload is true', async () => {
|
||||
mockReadFile.mockResolvedValue('defaultAssistant: claude');
|
||||
|
||||
await loadGlobalConfig();
|
||||
await loadGlobalConfig(true);
|
||||
|
||||
expect(mockReadFile).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadRepoConfig', () => {
|
||||
test('loads from .archon/config.yaml', async () => {
|
||||
mockReadFile.mockImplementation(async (path: string) => {
|
||||
if (path.includes('.archon/config.yaml')) {
|
||||
return 'assistant: codex';
|
||||
}
|
||||
throw new Error('Not found');
|
||||
});
|
||||
|
||||
const config = await loadRepoConfig('/test/repo');
|
||||
expect(config.assistant).toBe('codex');
|
||||
});
|
||||
|
||||
test('falls back to .claude/config.yaml', async () => {
|
||||
mockReadFile.mockImplementation(async (path: string) => {
|
||||
if (path.includes('.claude/config.yaml')) {
|
||||
return 'assistant: claude';
|
||||
}
|
||||
throw new Error('Not found');
|
||||
});
|
||||
|
||||
const config = await loadRepoConfig('/test/repo');
|
||||
expect(config.assistant).toBe('claude');
|
||||
});
|
||||
|
||||
test('returns empty object when no config found', async () => {
|
||||
mockReadFile.mockRejectedValue(new Error('Not found'));
|
||||
|
||||
const config = await loadRepoConfig('/test/repo');
|
||||
expect(config).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadConfig', () => {
|
||||
test('returns defaults when no configs exist', async () => {
|
||||
const error = new Error('ENOENT') as NodeJS.ErrnoException;
|
||||
error.code = 'ENOENT';
|
||||
mockReadFile.mockRejectedValue(error);
|
||||
|
||||
const config = await loadConfig();
|
||||
|
||||
expect(config.assistant).toBe('claude');
|
||||
expect(config.streaming.telegram).toBe('stream');
|
||||
expect(config.streaming.github).toBe('batch');
|
||||
expect(config.concurrency.maxConversations).toBe(10);
|
||||
});
|
||||
|
||||
test('env vars override config files', async () => {
|
||||
mockReadFile.mockResolvedValue(`
|
||||
defaultAssistant: claude
|
||||
streaming:
|
||||
telegram: stream
|
||||
`);
|
||||
|
||||
process.env.DEFAULT_AI_ASSISTANT = 'codex';
|
||||
process.env.TELEGRAM_STREAMING_MODE = 'batch';
|
||||
|
||||
const config = await loadConfig();
|
||||
|
||||
expect(config.assistant).toBe('codex');
|
||||
expect(config.streaming.telegram).toBe('batch');
|
||||
});
|
||||
|
||||
test('repo config overrides global config', async () => {
|
||||
let callCount = 0;
|
||||
mockReadFile.mockImplementation(async (path: string) => {
|
||||
callCount++;
|
||||
if (path.includes('.archon/config.yaml') && callCount <= 1) {
|
||||
return 'defaultAssistant: claude';
|
||||
}
|
||||
if (path.includes('/repo/.archon/config.yaml')) {
|
||||
return 'assistant: codex';
|
||||
}
|
||||
throw new Error('Not found');
|
||||
});
|
||||
|
||||
const config = await loadConfig('/test/repo');
|
||||
expect(config.assistant).toBe('codex');
|
||||
});
|
||||
|
||||
test('paths use archon defaults', async () => {
|
||||
const error = new Error('ENOENT') as NodeJS.ErrnoException;
|
||||
error.code = 'ENOENT';
|
||||
mockReadFile.mockRejectedValue(error);
|
||||
|
||||
const config = await loadConfig();
|
||||
|
||||
expect(config.paths.workspaces).toBe(join(homedir(), '.archon', 'workspaces'));
|
||||
expect(config.paths.worktrees).toBe(join(homedir(), '.archon', 'worktrees'));
|
||||
});
|
||||
});
|
||||
});
|
||||
234
src/config/config-loader.ts
Normal file
234
src/config/config-loader.ts
Normal file
|
|
@ -0,0 +1,234 @@
|
|||
/**
|
||||
* Configuration loader for Archon YAML config files
|
||||
*
|
||||
* Loading order (later overrides earlier):
|
||||
* 1. Defaults
|
||||
* 2. Global config (~/.archon/config.yaml)
|
||||
* 3. Repository config (.archon/config.yaml)
|
||||
* 4. Environment variables
|
||||
*/
|
||||
|
||||
import { readFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { parse as parseYaml } from 'yaml';
|
||||
import {
|
||||
getArchonConfigPath,
|
||||
getArchonWorkspacesPath,
|
||||
getArchonWorktreesPath,
|
||||
} from '../utils/archon-paths';
|
||||
import type { GlobalConfig, RepoConfig, MergedConfig } from './config-types';
|
||||
|
||||
// Cache for loaded configs
|
||||
let cachedGlobalConfig: GlobalConfig | null = null;
|
||||
|
||||
/**
|
||||
* Load global config from ~/.archon/config.yaml
|
||||
* Returns empty object if file doesn't exist
|
||||
*/
|
||||
export async function loadGlobalConfig(forceReload = false): Promise<GlobalConfig> {
|
||||
if (cachedGlobalConfig && !forceReload) {
|
||||
return cachedGlobalConfig;
|
||||
}
|
||||
|
||||
const configPath = getArchonConfigPath();
|
||||
|
||||
try {
|
||||
const content = await readFile(configPath, 'utf-8');
|
||||
cachedGlobalConfig = parseYaml(content) as GlobalConfig;
|
||||
return cachedGlobalConfig ?? {};
|
||||
} catch (error) {
|
||||
// File doesn't exist or can't be read - return empty config
|
||||
const err = error as NodeJS.ErrnoException;
|
||||
if (err.code !== 'ENOENT') {
|
||||
console.warn(`[Config] Failed to load global config: ${err.message}`);
|
||||
}
|
||||
cachedGlobalConfig = {};
|
||||
return cachedGlobalConfig;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load repository config from .archon/config.yaml
|
||||
* Falls back to .claude/config.yaml for legacy support
|
||||
* Returns empty object if no config found
|
||||
*/
|
||||
export async function loadRepoConfig(repoPath: string): Promise<RepoConfig> {
|
||||
const configPaths = [
|
||||
join(repoPath, '.archon', 'config.yaml'),
|
||||
join(repoPath, '.claude', 'config.yaml'),
|
||||
];
|
||||
|
||||
for (const configPath of configPaths) {
|
||||
try {
|
||||
const content = await readFile(configPath, 'utf-8');
|
||||
return (parseYaml(content) as RepoConfig) ?? {};
|
||||
} catch {
|
||||
// Try next path
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// No config found
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default configuration
|
||||
*/
|
||||
function getDefaults(): MergedConfig {
|
||||
return {
|
||||
assistant: 'claude',
|
||||
streaming: {
|
||||
telegram: 'stream',
|
||||
discord: 'batch',
|
||||
slack: 'batch',
|
||||
github: 'batch',
|
||||
},
|
||||
paths: {
|
||||
workspaces: getArchonWorkspacesPath(),
|
||||
worktrees: getArchonWorktreesPath(),
|
||||
},
|
||||
concurrency: {
|
||||
maxConversations: 10,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply environment variable overrides
|
||||
*/
|
||||
function applyEnvOverrides(config: MergedConfig): MergedConfig {
|
||||
// Assistant override
|
||||
const envAssistant = process.env.DEFAULT_AI_ASSISTANT;
|
||||
if (envAssistant === 'claude' || envAssistant === 'codex') {
|
||||
config.assistant = envAssistant;
|
||||
}
|
||||
|
||||
// Streaming overrides
|
||||
const streamingModes = ['stream', 'batch'] as const;
|
||||
const telegramMode = process.env.TELEGRAM_STREAMING_MODE;
|
||||
if (telegramMode && streamingModes.includes(telegramMode as 'stream' | 'batch')) {
|
||||
config.streaming.telegram = telegramMode as 'stream' | 'batch';
|
||||
}
|
||||
|
||||
const discordMode = process.env.DISCORD_STREAMING_MODE;
|
||||
if (discordMode && streamingModes.includes(discordMode as 'stream' | 'batch')) {
|
||||
config.streaming.discord = discordMode as 'stream' | 'batch';
|
||||
}
|
||||
|
||||
const slackMode = process.env.SLACK_STREAMING_MODE;
|
||||
if (slackMode && streamingModes.includes(slackMode as 'stream' | 'batch')) {
|
||||
config.streaming.slack = slackMode as 'stream' | 'batch';
|
||||
}
|
||||
|
||||
const githubMode = process.env.GITHUB_STREAMING_MODE;
|
||||
if (githubMode && streamingModes.includes(githubMode as 'stream' | 'batch')) {
|
||||
config.streaming.github = githubMode as 'stream' | 'batch';
|
||||
}
|
||||
|
||||
// Path overrides (these come from archon-paths.ts which already checks env vars)
|
||||
// No need to re-apply here since getDefaults() uses those functions
|
||||
|
||||
// Concurrency override
|
||||
const maxConcurrent = process.env.MAX_CONCURRENT_CONVERSATIONS;
|
||||
if (maxConcurrent) {
|
||||
const parsed = parseInt(maxConcurrent, 10);
|
||||
if (!isNaN(parsed) && parsed > 0) {
|
||||
config.concurrency.maxConversations = parsed;
|
||||
}
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge global config into defaults
|
||||
*/
|
||||
function mergeGlobalConfig(defaults: MergedConfig, global: GlobalConfig): MergedConfig {
|
||||
const result = { ...defaults };
|
||||
|
||||
// Assistant preference
|
||||
if (global.defaultAssistant) {
|
||||
result.assistant = global.defaultAssistant;
|
||||
}
|
||||
|
||||
// Streaming preferences
|
||||
if (global.streaming) {
|
||||
if (global.streaming.telegram) result.streaming.telegram = global.streaming.telegram;
|
||||
if (global.streaming.discord) result.streaming.discord = global.streaming.discord;
|
||||
if (global.streaming.slack) result.streaming.slack = global.streaming.slack;
|
||||
if (global.streaming.github) result.streaming.github = global.streaming.github;
|
||||
}
|
||||
|
||||
// Path preferences
|
||||
if (global.paths) {
|
||||
if (global.paths.workspaces) result.paths.workspaces = global.paths.workspaces;
|
||||
if (global.paths.worktrees) result.paths.worktrees = global.paths.worktrees;
|
||||
}
|
||||
|
||||
// Concurrency preferences
|
||||
if (global.concurrency?.maxConversations) {
|
||||
result.concurrency.maxConversations = global.concurrency.maxConversations;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge repo config into merged config
|
||||
*/
|
||||
function mergeRepoConfig(merged: MergedConfig, repo: RepoConfig): MergedConfig {
|
||||
const result = { ...merged };
|
||||
|
||||
// Assistant override (repo-level takes precedence)
|
||||
if (repo.assistant) {
|
||||
result.assistant = repo.assistant;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load fully merged configuration
|
||||
*
|
||||
* @param repoPath - Optional repository path for repo-level config
|
||||
* @returns Merged configuration with all overrides applied
|
||||
*/
|
||||
export async function loadConfig(repoPath?: string): Promise<MergedConfig> {
|
||||
// 1. Start with defaults
|
||||
let config = getDefaults();
|
||||
|
||||
// 2. Apply global config
|
||||
const globalConfig = await loadGlobalConfig();
|
||||
config = mergeGlobalConfig(config, globalConfig);
|
||||
|
||||
// 3. Apply repo config if path provided
|
||||
if (repoPath) {
|
||||
const repoConfig = await loadRepoConfig(repoPath);
|
||||
config = mergeRepoConfig(config, repoConfig);
|
||||
}
|
||||
|
||||
// 4. Apply environment overrides (highest precedence)
|
||||
config = applyEnvOverrides(config);
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cached global config (useful for testing)
|
||||
*/
|
||||
export function clearConfigCache(): void {
|
||||
cachedGlobalConfig = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Log current configuration (for startup)
|
||||
*/
|
||||
export function logConfig(config: MergedConfig): void {
|
||||
console.log('[Config] Loaded configuration:');
|
||||
console.log(` AI Assistant: ${config.assistant}`);
|
||||
console.log(` Telegram Streaming: ${config.streaming.telegram}`);
|
||||
console.log(` Discord Streaming: ${config.streaming.discord}`);
|
||||
console.log(` Slack Streaming: ${config.streaming.slack}`);
|
||||
console.log(` GitHub Streaming: ${config.streaming.github}`);
|
||||
}
|
||||
118
src/config/config-types.ts
Normal file
118
src/config/config-types.ts
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
/**
|
||||
* Configuration types for Archon YAML config files
|
||||
*
|
||||
* Two levels:
|
||||
* - Global: ~/.archon/config.yaml (user preferences)
|
||||
* - Repository: .archon/config.yaml (project settings)
|
||||
*/
|
||||
|
||||
/**
|
||||
* Global configuration (non-secret user preferences)
|
||||
* Located at ~/.archon/config.yaml
|
||||
*/
|
||||
export interface GlobalConfig {
|
||||
/**
|
||||
* Default AI assistant when no codebase-specific preference
|
||||
* @default 'claude'
|
||||
*/
|
||||
defaultAssistant?: 'claude' | 'codex';
|
||||
|
||||
/**
|
||||
* Platform streaming preferences (can be overridden per conversation)
|
||||
*/
|
||||
streaming?: {
|
||||
telegram?: 'stream' | 'batch';
|
||||
discord?: 'stream' | 'batch';
|
||||
slack?: 'stream' | 'batch';
|
||||
github?: 'stream' | 'batch';
|
||||
};
|
||||
|
||||
/**
|
||||
* Directory preferences (usually not needed - defaults work well)
|
||||
*/
|
||||
paths?: {
|
||||
/**
|
||||
* Override workspaces directory
|
||||
* @default '~/.archon/workspaces'
|
||||
*/
|
||||
workspaces?: string;
|
||||
|
||||
/**
|
||||
* Override worktrees directory
|
||||
* @default '~/.archon/worktrees'
|
||||
*/
|
||||
worktrees?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Concurrency limits
|
||||
*/
|
||||
concurrency?: {
|
||||
/**
|
||||
* Maximum concurrent AI conversations
|
||||
* @default 10
|
||||
*/
|
||||
maxConversations?: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Repository configuration (project-specific settings)
|
||||
* Located at .archon/config.yaml in any repository
|
||||
*/
|
||||
export interface RepoConfig {
|
||||
/**
|
||||
* AI assistant preference for this repository
|
||||
* Overrides global default
|
||||
*/
|
||||
assistant?: 'claude' | 'codex';
|
||||
|
||||
/**
|
||||
* Commands configuration
|
||||
*/
|
||||
commands?: {
|
||||
/**
|
||||
* Custom command folder path (relative to repo root)
|
||||
* @default '.archon/commands'
|
||||
*/
|
||||
folder?: string;
|
||||
|
||||
/**
|
||||
* Auto-load commands on clone
|
||||
* @default true
|
||||
*/
|
||||
autoLoad?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Worktree settings for this repository
|
||||
*/
|
||||
worktree?: {
|
||||
/**
|
||||
* Base branch for worktrees (e.g., 'main', 'develop')
|
||||
* @default auto-detected from repo
|
||||
*/
|
||||
baseBranch?: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Merged configuration (global + repo + env vars)
|
||||
* Environment variables take precedence
|
||||
*/
|
||||
export interface MergedConfig {
|
||||
assistant: 'claude' | 'codex';
|
||||
streaming: {
|
||||
telegram: 'stream' | 'batch';
|
||||
discord: 'stream' | 'batch';
|
||||
slack: 'stream' | 'batch';
|
||||
github: 'stream' | 'batch';
|
||||
};
|
||||
paths: {
|
||||
workspaces: string;
|
||||
worktrees: string;
|
||||
};
|
||||
concurrency: {
|
||||
maxConversations: number;
|
||||
};
|
||||
}
|
||||
6
src/config/index.ts
Normal file
6
src/config/index.ts
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
/**
|
||||
* Configuration module exports
|
||||
*/
|
||||
|
||||
export * from './config-types';
|
||||
export * from './config-loader';
|
||||
|
|
@ -4,6 +4,7 @@
|
|||
import { describe, test, expect, mock, beforeEach, type Mock } from 'bun:test';
|
||||
import { Conversation } from '../types';
|
||||
import { resolve, join } from 'path';
|
||||
import * as fsPromises from 'fs/promises';
|
||||
|
||||
// Create mock functions
|
||||
const mockUpdateConversation = mock(() => Promise.resolve());
|
||||
|
|
@ -100,6 +101,7 @@ mock.module('child_process', () => ({
|
|||
}));
|
||||
|
||||
mock.module('fs/promises', () => ({
|
||||
...fsPromises,
|
||||
access: mockAccess,
|
||||
readdir: mockReaddir,
|
||||
mkdir: mock(() => Promise.resolve()),
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import {
|
|||
getWorktreeStatusBreakdown,
|
||||
MAX_WORKTREES_PER_CODEBASE,
|
||||
} from '../services/cleanup-service';
|
||||
import { getArchonWorkspacesPath, getCommandFolderSearchPaths } from '../utils/archon-paths';
|
||||
|
||||
/**
|
||||
* Convert an absolute path to a relative path from the repository root
|
||||
|
|
@ -35,7 +36,7 @@ function shortenPath(absolutePath: string, repoRoot?: string): string {
|
|||
}
|
||||
|
||||
// Fallback: show relative to workspace
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const workspacePath = getArchonWorkspacesPath();
|
||||
const relPath = relative(workspacePath, absolutePath);
|
||||
if (!relPath.startsWith('..')) {
|
||||
return relPath;
|
||||
|
|
@ -122,7 +123,7 @@ Codebase Commands (per-project):
|
|||
/load-commands <folder> - Bulk load (recursive)
|
||||
/command-invoke <name> [args] - Execute
|
||||
/commands - List registered
|
||||
Note: Commands use relative paths (e.g., .claude/commands)
|
||||
Note: Commands use relative paths (e.g., .archon/commands)
|
||||
|
||||
Codebase:
|
||||
/clone <repo-url> - Clone repository
|
||||
|
|
@ -144,7 +145,10 @@ Session:
|
|||
/status - Show state
|
||||
/reset - Clear session
|
||||
/reset-context - Reset AI context, keep worktree
|
||||
/help - Show help`,
|
||||
/help - Show help
|
||||
|
||||
Setup:
|
||||
/init - Create .archon structure in current repo`,
|
||||
};
|
||||
|
||||
case 'status': {
|
||||
|
|
@ -224,7 +228,7 @@ Session:
|
|||
const resolvedCwd = resolve(newCwd);
|
||||
|
||||
// Validate path is within workspace to prevent path traversal
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const workspacePath = getArchonWorkspacesPath();
|
||||
if (!isPathWithinWorkspace(resolvedCwd)) {
|
||||
return { success: false, message: `Path must be within ${workspacePath} directory` };
|
||||
}
|
||||
|
|
@ -279,9 +283,9 @@ Session:
|
|||
const repoName = urlParts.pop() ?? 'unknown';
|
||||
const ownerName = urlParts.pop() ?? 'unknown';
|
||||
|
||||
// Use WORKSPACE_PATH env var for flexibility (local dev vs Docker)
|
||||
// Use Archon workspaces path (ARCHON_HOME/workspaces or ~/.archon/workspaces)
|
||||
// Include owner in path to prevent collisions (e.g., alice/utils vs bob/utils)
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const workspacePath = getArchonWorkspacesPath();
|
||||
const targetPath = join(workspacePath, ownerName, repoName);
|
||||
|
||||
try {
|
||||
|
|
@ -313,7 +317,7 @@ Session:
|
|||
|
||||
// Check for command folders (same logic as successful clone)
|
||||
let commandFolder: string | null = null;
|
||||
for (const folder of ['.claude/commands', '.agents/commands']) {
|
||||
for (const folder of getCommandFolderSearchPaths()) {
|
||||
try {
|
||||
await access(join(targetPath, folder));
|
||||
commandFolder = folder;
|
||||
|
|
@ -413,7 +417,7 @@ Session:
|
|||
|
||||
// Auto-load commands if found
|
||||
let commandsLoaded = 0;
|
||||
for (const folder of ['.claude/commands', '.agents/commands']) {
|
||||
for (const folder of getCommandFolderSearchPaths()) {
|
||||
try {
|
||||
const commandPath = join(targetPath, folder);
|
||||
await access(commandPath);
|
||||
|
|
@ -468,7 +472,7 @@ Session:
|
|||
|
||||
const [commandName, commandPath, ...textParts] = args;
|
||||
const commandText = textParts.join(' ');
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const workspacePath = getArchonWorkspacesPath();
|
||||
const basePath = conversation.cwd ?? workspacePath;
|
||||
const fullPath = resolve(basePath, commandPath);
|
||||
|
||||
|
|
@ -507,7 +511,7 @@ Session:
|
|||
}
|
||||
|
||||
const folderPath = args.join(' ');
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const workspacePath = getArchonWorkspacesPath();
|
||||
const basePath = conversation.cwd ?? workspacePath;
|
||||
const fullPath = resolve(basePath, folderPath);
|
||||
|
||||
|
|
@ -573,7 +577,7 @@ Session:
|
|||
}
|
||||
|
||||
case 'repos': {
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const workspacePath = getArchonWorkspacesPath();
|
||||
|
||||
try {
|
||||
const entries = await readdir(workspacePath, { withFileTypes: true });
|
||||
|
|
@ -658,7 +662,7 @@ Session:
|
|||
return { success: false, message: 'Usage: /repo <number|name> [pull]' };
|
||||
}
|
||||
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const workspacePath = getArchonWorkspacesPath();
|
||||
const identifier = args[0];
|
||||
const shouldPull = args[1]?.toLowerCase() === 'pull';
|
||||
|
||||
|
|
@ -748,7 +752,7 @@ Session:
|
|||
|
||||
// Auto-load commands if found
|
||||
let commandsLoaded = 0;
|
||||
for (const folder of ['.claude/commands', '.agents/commands']) {
|
||||
for (const folder of getCommandFolderSearchPaths()) {
|
||||
try {
|
||||
const commandPath = join(targetPath, folder);
|
||||
await access(commandPath);
|
||||
|
|
@ -793,7 +797,7 @@ Session:
|
|||
return { success: false, message: 'Usage: /repo-remove <number|name>' };
|
||||
}
|
||||
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const workspacePath = getArchonWorkspacesPath();
|
||||
const identifier = args[0];
|
||||
|
||||
try {
|
||||
|
|
@ -1204,6 +1208,82 @@ Session:
|
|||
}
|
||||
}
|
||||
|
||||
case 'init': {
|
||||
// Create .archon structure in current repo
|
||||
if (!conversation.cwd) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'No working directory set. Use /clone or /setcwd first.',
|
||||
};
|
||||
}
|
||||
|
||||
const archonDir = join(conversation.cwd, '.archon');
|
||||
const commandsDir = join(archonDir, 'commands');
|
||||
const configPath = join(archonDir, 'config.yaml');
|
||||
|
||||
try {
|
||||
// Check if .archon already exists
|
||||
try {
|
||||
await access(archonDir);
|
||||
return {
|
||||
success: false,
|
||||
message: '.archon directory already exists. Nothing to do.',
|
||||
};
|
||||
} catch {
|
||||
// Directory doesn't exist, we can create it
|
||||
}
|
||||
|
||||
// Create directories
|
||||
await import('fs/promises').then(fs => fs.mkdir(commandsDir, { recursive: true }));
|
||||
|
||||
// Create default config.yaml
|
||||
const defaultConfig = `# Archon repository configuration
|
||||
# See: https://github.com/dynamous-community/remote-coding-agent
|
||||
|
||||
# AI assistant preference (optional - overrides global default)
|
||||
# assistant: claude
|
||||
|
||||
# Commands configuration (optional)
|
||||
# commands:
|
||||
# folder: .archon/commands
|
||||
# autoLoad: true
|
||||
`;
|
||||
await writeFile(configPath, defaultConfig);
|
||||
|
||||
// Create example command
|
||||
const exampleCommand = join(commandsDir, 'example.md');
|
||||
const exampleContent = `---
|
||||
description: Example command template
|
||||
---
|
||||
# Example Command
|
||||
|
||||
This is an example command template.
|
||||
|
||||
Arguments:
|
||||
- $1 - First positional argument
|
||||
- $ARGUMENTS - All arguments as string
|
||||
|
||||
Task: $ARGUMENTS
|
||||
`;
|
||||
await writeFile(exampleCommand, exampleContent);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Created .archon structure:
|
||||
.archon/
|
||||
├── config.yaml
|
||||
└── commands/
|
||||
└── example.md
|
||||
|
||||
Use /load-commands .archon/commands to register commands.`,
|
||||
};
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
console.error('[Command] init failed:', err);
|
||||
return { success: false, message: `Failed to initialize: ${err.message}` };
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return {
|
||||
success: false,
|
||||
|
|
|
|||
19
src/index.ts
19
src/index.ts
|
|
@ -8,7 +8,6 @@
|
|||
import 'dotenv/config';
|
||||
|
||||
import express from 'express';
|
||||
import { resolve } from 'path';
|
||||
import { TelegramAdapter } from './adapters/telegram';
|
||||
import { TestAdapter } from './adapters/test';
|
||||
import { GitHubAdapter } from './adapters/github';
|
||||
|
|
@ -20,6 +19,8 @@ import { ConversationLockManager } from './utils/conversation-lock';
|
|||
import { classifyAndFormatError } from './utils/error-formatter';
|
||||
import { seedDefaultCommands } from './scripts/seed-commands';
|
||||
import { startCleanupScheduler, stopCleanupScheduler } from './services/cleanup-service';
|
||||
import { logArchonPaths } from './utils/archon-paths';
|
||||
import { loadConfig, logConfig } from './config';
|
||||
|
||||
async function main(): Promise<void> {
|
||||
console.log('[App] Starting Remote Coding Agent');
|
||||
|
|
@ -64,16 +65,12 @@ async function main(): Promise<void> {
|
|||
// Start cleanup scheduler
|
||||
startCleanupScheduler();
|
||||
|
||||
// Warn if WORKSPACE_PATH is inside project directory
|
||||
const workspacePath = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
const projectRoot = resolve(__dirname, '..');
|
||||
if (workspacePath.startsWith(projectRoot + '/') || workspacePath === projectRoot) {
|
||||
console.warn('⚠️ WARNING: WORKSPACE_PATH is inside project directory');
|
||||
console.warn(' This can cause nested repository issues when working on this repo.');
|
||||
console.warn(` Current: ${workspacePath}`);
|
||||
console.warn(' Recommended: /tmp/remote-agent-workspace or ~/remote-agent-workspace');
|
||||
console.warn('');
|
||||
}
|
||||
// Log Archon paths configuration
|
||||
logArchonPaths();
|
||||
|
||||
// Load and log configuration
|
||||
const config = await loadConfig();
|
||||
logConfig(config);
|
||||
|
||||
// Seed default command templates
|
||||
await seedDefaultCommands();
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import { mock, describe, test, expect, beforeEach } from 'bun:test';
|
||||
import { mock, describe, test, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import { MockPlatformAdapter } from '../test/mocks/platform';
|
||||
import { Conversation, Codebase, Session } from '../types';
|
||||
import { join } from 'path';
|
||||
import * as fsPromises from 'fs/promises';
|
||||
|
||||
// Setup mocks before importing the module under test
|
||||
const mockGetOrCreateConversation = mock(() => Promise.resolve(null));
|
||||
|
|
@ -20,7 +21,10 @@ const mockParseCommand = mock((message: string) => {
|
|||
return { command: parts[0].substring(1), args: parts.slice(1) };
|
||||
});
|
||||
const mockGetAssistantClient = mock(() => null);
|
||||
const mockReadFile = mock(() => Promise.resolve(''));
|
||||
|
||||
// Store original readFile for passthrough
|
||||
const originalReadFile = fsPromises.readFile;
|
||||
const mockReadFile = mock(originalReadFile);
|
||||
|
||||
// Isolation environment mocks
|
||||
const mockIsolationEnvGetById = mock(() => Promise.resolve(null));
|
||||
|
|
@ -102,6 +106,7 @@ mock.module('../clients/factory', () => ({
|
|||
}));
|
||||
|
||||
mock.module('fs/promises', () => ({
|
||||
...fsPromises,
|
||||
readFile: mockReadFile,
|
||||
}));
|
||||
|
||||
|
|
@ -248,6 +253,11 @@ describe('orchestrator', () => {
|
|||
mockExecFileAsync.mockResolvedValue({ stdout: 'main', stderr: '' });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore mock to passthrough mode for other test files
|
||||
mockReadFile.mockImplementation(originalReadFile);
|
||||
});
|
||||
|
||||
describe('slash commands (non-invoke)', () => {
|
||||
test('delegates to command handler and returns', async () => {
|
||||
mockHandleCommand.mockResolvedValue({ message: 'Command executed', modified: false });
|
||||
|
|
@ -712,7 +722,6 @@ describe('orchestrator', () => {
|
|||
'claude-session-xyz'
|
||||
);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('stale worktree handling', () => {
|
||||
|
|
|
|||
|
|
@ -22,11 +22,7 @@ import { substituteVariables } from '../utils/variable-substitution';
|
|||
import { classifyAndFormatError } from '../utils/error-formatter';
|
||||
import { getAssistantClient } from '../clients/factory';
|
||||
import { getIsolationProvider } from '../isolation';
|
||||
import {
|
||||
worktreeExists,
|
||||
findWorktreeByBranch,
|
||||
getCanonicalRepoPath,
|
||||
} from '../utils/git';
|
||||
import { worktreeExists, findWorktreeByBranch, getCanonicalRepoPath } from '../utils/git';
|
||||
import {
|
||||
cleanupToMakeRoom,
|
||||
getWorktreeStatusBreakdown,
|
||||
|
|
|
|||
163
src/utils/archon-paths.test.ts
Normal file
163
src/utils/archon-paths.test.ts
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import { homedir } from 'os';
|
||||
import { join } from 'path';
|
||||
|
||||
import {
|
||||
isDocker,
|
||||
getArchonHome,
|
||||
getArchonWorkspacesPath,
|
||||
getArchonWorktreesPath,
|
||||
getArchonConfigPath,
|
||||
getCommandFolderSearchPaths,
|
||||
expandTilde,
|
||||
} from './archon-paths';
|
||||
|
||||
describe('archon-paths', () => {
|
||||
const originalEnv: Record<string, string | undefined> = {};
|
||||
const envVars = ['WORKSPACE_PATH', 'WORKTREE_BASE', 'ARCHON_HOME', 'ARCHON_DOCKER', 'HOME'];
|
||||
|
||||
beforeEach(() => {
|
||||
envVars.forEach(key => {
|
||||
originalEnv[key] = process.env[key];
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
envVars.forEach(key => {
|
||||
if (originalEnv[key] === undefined) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = originalEnv[key];
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('expandTilde', () => {
|
||||
test('expands ~ to home directory', () => {
|
||||
expect(expandTilde('~/test')).toBe(join(homedir(), 'test'));
|
||||
});
|
||||
|
||||
test('returns path unchanged if no tilde', () => {
|
||||
expect(expandTilde('/absolute/path')).toBe('/absolute/path');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isDocker', () => {
|
||||
test('returns true when WORKSPACE_PATH is /workspace', () => {
|
||||
process.env.WORKSPACE_PATH = '/workspace';
|
||||
expect(isDocker()).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true when HOME=/root and WORKSPACE_PATH set', () => {
|
||||
process.env.HOME = '/root';
|
||||
process.env.WORKSPACE_PATH = '/app/workspace';
|
||||
expect(isDocker()).toBe(true);
|
||||
});
|
||||
|
||||
test('returns true when ARCHON_DOCKER=true', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
process.env.ARCHON_DOCKER = 'true';
|
||||
expect(isDocker()).toBe(true);
|
||||
});
|
||||
|
||||
test('returns false for local development', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
process.env.HOME = homedir();
|
||||
expect(isDocker()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getArchonHome', () => {
|
||||
test('returns /.archon in Docker', () => {
|
||||
process.env.WORKSPACE_PATH = '/workspace';
|
||||
expect(getArchonHome()).toBe('/.archon');
|
||||
});
|
||||
|
||||
test('returns ARCHON_HOME when set (local)', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
process.env.ARCHON_HOME = '/custom/archon';
|
||||
expect(getArchonHome()).toBe('/custom/archon');
|
||||
});
|
||||
|
||||
test('expands tilde in ARCHON_HOME', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
process.env.ARCHON_HOME = '~/my-archon';
|
||||
expect(getArchonHome()).toBe(join(homedir(), 'my-archon'));
|
||||
});
|
||||
|
||||
test('returns ~/.archon by default (local)', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_HOME;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
expect(getArchonHome()).toBe(join(homedir(), '.archon'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getArchonWorkspacesPath', () => {
|
||||
test('returns ~/.archon/workspaces by default', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_HOME;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
expect(getArchonWorkspacesPath()).toBe(join(homedir(), '.archon', 'workspaces'));
|
||||
});
|
||||
|
||||
test('returns /.archon/workspaces in Docker', () => {
|
||||
process.env.ARCHON_DOCKER = 'true';
|
||||
expect(getArchonWorkspacesPath()).toBe('/.archon/workspaces');
|
||||
});
|
||||
|
||||
test('uses ARCHON_HOME when set', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
process.env.ARCHON_HOME = '/custom/archon';
|
||||
expect(getArchonWorkspacesPath()).toBe('/custom/archon/workspaces');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getArchonWorktreesPath', () => {
|
||||
test('returns ~/.archon/worktrees by default', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.WORKTREE_BASE;
|
||||
delete process.env.ARCHON_HOME;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
expect(getArchonWorktreesPath()).toBe(join(homedir(), '.archon', 'worktrees'));
|
||||
});
|
||||
|
||||
test('returns /.archon/worktrees in Docker', () => {
|
||||
process.env.ARCHON_DOCKER = 'true';
|
||||
expect(getArchonWorktreesPath()).toBe('/.archon/worktrees');
|
||||
});
|
||||
|
||||
test('uses ARCHON_HOME when set', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.WORKTREE_BASE;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
process.env.ARCHON_HOME = '/custom/archon';
|
||||
expect(getArchonWorktreesPath()).toBe('/custom/archon/worktrees');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCommandFolderSearchPaths', () => {
|
||||
test('returns folders in priority order', () => {
|
||||
const paths = getCommandFolderSearchPaths();
|
||||
expect(paths).toEqual(['.archon/commands', '.claude/commands', '.agents/commands']);
|
||||
});
|
||||
|
||||
test('.archon/commands has highest priority', () => {
|
||||
const paths = getCommandFolderSearchPaths();
|
||||
expect(paths[0]).toBe('.archon/commands');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getArchonConfigPath', () => {
|
||||
test('returns path to config.yaml', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_HOME;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
expect(getArchonConfigPath()).toBe(join(homedir(), '.archon', 'config.yaml'));
|
||||
});
|
||||
});
|
||||
});
|
||||
106
src/utils/archon-paths.ts
Normal file
106
src/utils/archon-paths.ts
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
/**
|
||||
* Archon path resolution utilities
|
||||
*
|
||||
* Directory structure:
|
||||
* ~/.archon/ # User-level (ARCHON_HOME)
|
||||
* ├── workspaces/ # Cloned repositories
|
||||
* ├── worktrees/ # Git worktrees
|
||||
* └── config.yaml # Global config
|
||||
*
|
||||
* For Docker: /.archon/
|
||||
*/
|
||||
|
||||
import { join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
|
||||
/**
|
||||
* Expand ~ to home directory
|
||||
*/
|
||||
export function expandTilde(path: string): string {
|
||||
if (path.startsWith('~')) {
|
||||
const pathAfterTilde = path.slice(1).replace(/^[/\\]/, '');
|
||||
return join(homedir(), pathAfterTilde);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect if running in Docker container
|
||||
*/
|
||||
export function isDocker(): boolean {
|
||||
return (
|
||||
process.env.WORKSPACE_PATH === '/workspace' ||
|
||||
(process.env.HOME === '/root' && Boolean(process.env.WORKSPACE_PATH)) ||
|
||||
process.env.ARCHON_DOCKER === 'true'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Archon home directory
|
||||
* - Docker: /.archon
|
||||
* - Local: ~/.archon (or ARCHON_HOME env var)
|
||||
*/
|
||||
export function getArchonHome(): string {
|
||||
if (isDocker()) {
|
||||
return '/.archon';
|
||||
}
|
||||
|
||||
const envHome = process.env.ARCHON_HOME;
|
||||
if (envHome) {
|
||||
return expandTilde(envHome);
|
||||
}
|
||||
|
||||
return join(homedir(), '.archon');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the workspaces directory (where repos are cloned)
|
||||
*/
|
||||
export function getArchonWorkspacesPath(): string {
|
||||
return join(getArchonHome(), 'workspaces');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the worktrees directory (where git worktrees are created)
|
||||
*/
|
||||
export function getArchonWorktreesPath(): string {
|
||||
return join(getArchonHome(), 'worktrees');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the global config file path
|
||||
*/
|
||||
export function getArchonConfigPath(): string {
|
||||
return join(getArchonHome(), 'config.yaml');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get command folder search paths for a repository
|
||||
* Returns folders in priority order (first match wins)
|
||||
*/
|
||||
export function getCommandFolderSearchPaths(): string[] {
|
||||
return ['.archon/commands', '.claude/commands', '.agents/commands'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow folder search paths for a repository (future)
|
||||
*/
|
||||
export function getWorkflowFolderSearchPaths(): string[] {
|
||||
return ['.archon/workflows', '.claude/workflows', '.agents/workflows'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Log the Archon paths configuration (for startup)
|
||||
*/
|
||||
export function logArchonPaths(): void {
|
||||
const home = getArchonHome();
|
||||
const workspaces = getArchonWorkspacesPath();
|
||||
const worktrees = getArchonWorktreesPath();
|
||||
const config = getArchonConfigPath();
|
||||
|
||||
console.log('[Archon] Paths configured:');
|
||||
console.log(` Home: ${home}`);
|
||||
console.log(` Workspaces: ${workspaces}`);
|
||||
console.log(` Worktrees: ${worktrees}`);
|
||||
console.log(` Config: ${config}`);
|
||||
}
|
||||
|
|
@ -73,6 +73,8 @@ describe('git utilities', () => {
|
|||
const originalEnv = process.env.WORKTREE_BASE;
|
||||
const originalWorkspacePath = process.env.WORKSPACE_PATH;
|
||||
const originalHome = process.env.HOME;
|
||||
const originalArchonHome = process.env.ARCHON_HOME;
|
||||
const originalArchonDocker = process.env.ARCHON_DOCKER;
|
||||
|
||||
afterEach(() => {
|
||||
if (originalEnv === undefined) {
|
||||
|
|
@ -90,61 +92,61 @@ describe('git utilities', () => {
|
|||
} else {
|
||||
process.env.HOME = originalHome;
|
||||
}
|
||||
if (originalArchonHome === undefined) {
|
||||
delete process.env.ARCHON_HOME;
|
||||
} else {
|
||||
process.env.ARCHON_HOME = originalArchonHome;
|
||||
}
|
||||
if (originalArchonDocker === undefined) {
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
} else {
|
||||
process.env.ARCHON_DOCKER = originalArchonDocker;
|
||||
}
|
||||
});
|
||||
|
||||
test('returns ~/tmp/worktrees by default for local (non-Docker)', () => {
|
||||
test('returns ~/.archon/worktrees by default for local (non-Docker)', () => {
|
||||
delete process.env.WORKTREE_BASE;
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_HOME;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
const result = git.getWorktreeBase('/workspace/my-repo');
|
||||
// Default for local: ~/tmp/worktrees (matches worktree-manager skill)
|
||||
expect(result).toBe(join(homedir(), 'tmp', 'worktrees'));
|
||||
// Default for local: ~/.archon/worktrees (new Archon structure)
|
||||
expect(result).toBe(join(homedir(), '.archon', 'worktrees'));
|
||||
});
|
||||
|
||||
test('returns /workspace/worktrees for Docker environment', () => {
|
||||
test('returns /.archon/worktrees for Docker environment', () => {
|
||||
delete process.env.WORKTREE_BASE;
|
||||
delete process.env.ARCHON_HOME;
|
||||
process.env.WORKSPACE_PATH = '/workspace';
|
||||
const result = git.getWorktreeBase('/workspace/my-repo');
|
||||
// Docker: inside mounted volume
|
||||
expect(result).toBe('/workspace/worktrees');
|
||||
// Docker: inside /.archon volume
|
||||
expect(result).toBe('/.archon/worktrees');
|
||||
});
|
||||
|
||||
test('detects Docker by HOME=/root + WORKSPACE_PATH', () => {
|
||||
delete process.env.WORKTREE_BASE;
|
||||
delete process.env.ARCHON_HOME;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
process.env.HOME = '/root';
|
||||
process.env.WORKSPACE_PATH = '/app/workspace';
|
||||
const result = git.getWorktreeBase('/workspace/my-repo');
|
||||
expect(result).toBe('/workspace/worktrees');
|
||||
expect(result).toBe('/.archon/worktrees');
|
||||
});
|
||||
|
||||
test('uses WORKTREE_BASE for local (non-Docker)', () => {
|
||||
delete process.env.WORKSPACE_PATH; // Ensure not Docker
|
||||
delete process.env.HOME; // Reset HOME to actual value
|
||||
process.env.WORKTREE_BASE = '/custom/worktrees';
|
||||
test('uses ARCHON_HOME for local (non-Docker)', () => {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.WORKTREE_BASE;
|
||||
delete process.env.ARCHON_DOCKER;
|
||||
process.env.ARCHON_HOME = '/custom/archon';
|
||||
const result = git.getWorktreeBase('/workspace/my-repo');
|
||||
expect(result).toBe('/custom/worktrees');
|
||||
expect(result).toBe('/custom/archon/worktrees');
|
||||
});
|
||||
|
||||
test('ignores WORKTREE_BASE in Docker (end user protection)', () => {
|
||||
process.env.WORKTREE_BASE = '/custom/worktrees';
|
||||
process.env.WORKSPACE_PATH = '/workspace'; // Docker flag
|
||||
test('uses fixed path in Docker', () => {
|
||||
delete process.env.ARCHON_HOME;
|
||||
process.env.ARCHON_DOCKER = 'true';
|
||||
const result = git.getWorktreeBase('/workspace/my-repo');
|
||||
// Docker ALWAYS uses fixed location, override IGNORED
|
||||
expect(result).toBe('/workspace/worktrees');
|
||||
});
|
||||
|
||||
test('expands tilde in WORKTREE_BASE (local only)', () => {
|
||||
delete process.env.WORKSPACE_PATH; // Ensure not Docker
|
||||
process.env.WORKTREE_BASE = '~/tmp/worktrees';
|
||||
const result = git.getWorktreeBase('/workspace/my-repo');
|
||||
expect(result).toBe(join(homedir(), 'tmp', 'worktrees'));
|
||||
});
|
||||
|
||||
test('ignores WORKTREE_BASE with tilde in Docker', () => {
|
||||
process.env.WORKSPACE_PATH = '/workspace'; // Docker flag
|
||||
process.env.WORKTREE_BASE = '~/custom/worktrees';
|
||||
const result = git.getWorktreeBase('/workspace/my-repo');
|
||||
// Tilde never expanded in Docker because override is ignored entirely
|
||||
expect(result).toBe('/workspace/worktrees');
|
||||
expect(result).toBe('/.archon/worktrees');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { readFile, access, mkdir as fsMkdir } from 'fs/promises';
|
|||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { join, basename } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import { getArchonWorktreesPath } from './archon-paths';
|
||||
|
||||
const promisifiedExecFile = promisify(execFile);
|
||||
|
||||
|
|
@ -27,38 +27,10 @@ export async function mkdirAsync(path: string, options?: { recursive?: boolean }
|
|||
|
||||
/**
|
||||
* Get the base directory for worktrees
|
||||
* - Docker: FIXED at /workspace/worktrees (end users can't override)
|
||||
* - Local: ~/tmp/worktrees by default, WORKTREE_BASE env var to override
|
||||
* Now delegates to archon-paths module for consistency
|
||||
*/
|
||||
export function getWorktreeBase(_repoPath: string): string {
|
||||
// 1. Docker: FIXED location, no override for end users
|
||||
const isDocker =
|
||||
process.env.WORKSPACE_PATH === '/workspace' ||
|
||||
(process.env.HOME === '/root' && process.env.WORKSPACE_PATH);
|
||||
|
||||
if (isDocker) {
|
||||
return '/workspace/worktrees';
|
||||
}
|
||||
|
||||
// 2. Local: Check WORKTREE_BASE override (for developers with custom setups)
|
||||
const envBase = process.env.WORKTREE_BASE;
|
||||
if (envBase) {
|
||||
return expandTilde(envBase);
|
||||
}
|
||||
|
||||
// 3. Local default: matches worktree-manager skill
|
||||
return join(homedir(), 'tmp', 'worktrees');
|
||||
}
|
||||
|
||||
/**
|
||||
* Expand ~ to home directory
|
||||
*/
|
||||
function expandTilde(path: string): string {
|
||||
if (path.startsWith('~')) {
|
||||
const pathAfterTilde = path.slice(1).replace(/^[/\\]/, '');
|
||||
return join(homedir(), pathAfterTilde);
|
||||
}
|
||||
return path;
|
||||
return getArchonWorktreesPath();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -6,54 +6,72 @@
|
|||
*/
|
||||
|
||||
import { describe, test, expect, beforeEach, afterAll } from 'bun:test';
|
||||
import { resolve } from 'path';
|
||||
import { resolve, join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
|
||||
// Helper to import fresh module with cleared cache
|
||||
async function importFresh() {
|
||||
// Clear the module from cache by deleting it from Loader registry
|
||||
const modulePath = require.resolve('./path-validation');
|
||||
const archonPathsModulePath = require.resolve('./archon-paths');
|
||||
delete require.cache[modulePath];
|
||||
delete require.cache[archonPathsModulePath];
|
||||
return import('./path-validation');
|
||||
}
|
||||
|
||||
// Default archon workspaces path
|
||||
function getDefaultWorkspacesPath(): string {
|
||||
return join(homedir(), '.archon', 'workspaces');
|
||||
}
|
||||
|
||||
describe('path-validation', () => {
|
||||
const originalWorkspacePath = process.env.WORKSPACE_PATH;
|
||||
const originalArchonHome = process.env.ARCHON_HOME;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset to default /workspace for consistent test behavior
|
||||
// Reset to default for consistent test behavior
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
delete process.env.ARCHON_HOME;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original env var
|
||||
// Restore original env vars
|
||||
if (originalWorkspacePath !== undefined) {
|
||||
process.env.WORKSPACE_PATH = originalWorkspacePath;
|
||||
} else {
|
||||
delete process.env.WORKSPACE_PATH;
|
||||
}
|
||||
if (originalArchonHome !== undefined) {
|
||||
process.env.ARCHON_HOME = originalArchonHome;
|
||||
} else {
|
||||
delete process.env.ARCHON_HOME;
|
||||
}
|
||||
});
|
||||
|
||||
describe('isPathWithinWorkspace', () => {
|
||||
test('should allow paths within /workspace (default)', async () => {
|
||||
test('should allow paths within default archon workspaces', async () => {
|
||||
const { isPathWithinWorkspace } = await importFresh();
|
||||
expect(isPathWithinWorkspace('/workspace/repo')).toBe(true);
|
||||
expect(isPathWithinWorkspace('/workspace/repo/src')).toBe(true);
|
||||
expect(isPathWithinWorkspace('/workspace')).toBe(true);
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(isPathWithinWorkspace(`${defaultPath}/repo`)).toBe(true);
|
||||
expect(isPathWithinWorkspace(`${defaultPath}/repo/src`)).toBe(true);
|
||||
expect(isPathWithinWorkspace(defaultPath)).toBe(true);
|
||||
});
|
||||
|
||||
test('should allow relative paths that resolve within workspace', async () => {
|
||||
const { isPathWithinWorkspace } = await importFresh();
|
||||
expect(isPathWithinWorkspace('repo', '/workspace')).toBe(true);
|
||||
expect(isPathWithinWorkspace('./repo', '/workspace')).toBe(true);
|
||||
expect(isPathWithinWorkspace('repo/src/file.ts', '/workspace')).toBe(true);
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(isPathWithinWorkspace('repo', defaultPath)).toBe(true);
|
||||
expect(isPathWithinWorkspace('./repo', defaultPath)).toBe(true);
|
||||
expect(isPathWithinWorkspace('repo/src/file.ts', defaultPath)).toBe(true);
|
||||
});
|
||||
|
||||
test('should reject path traversal attempts', async () => {
|
||||
const { isPathWithinWorkspace } = await importFresh();
|
||||
expect(isPathWithinWorkspace('/workspace/../etc/passwd')).toBe(false);
|
||||
expect(isPathWithinWorkspace('../etc/passwd', '/workspace')).toBe(false);
|
||||
expect(isPathWithinWorkspace('/workspace/repo/../../etc/passwd')).toBe(false);
|
||||
expect(isPathWithinWorkspace('foo/../../../etc/passwd', '/workspace')).toBe(false);
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(isPathWithinWorkspace(`${defaultPath}/../etc/passwd`)).toBe(false);
|
||||
expect(isPathWithinWorkspace('../etc/passwd', defaultPath)).toBe(false);
|
||||
expect(isPathWithinWorkspace(`${defaultPath}/repo/../../etc/passwd`)).toBe(false);
|
||||
expect(isPathWithinWorkspace('foo/../../../etc/passwd', defaultPath)).toBe(false);
|
||||
});
|
||||
|
||||
test('should reject paths outside workspace', async () => {
|
||||
|
|
@ -65,62 +83,63 @@ describe('path-validation', () => {
|
|||
|
||||
test('should reject paths that look similar but are outside workspace', async () => {
|
||||
const { isPathWithinWorkspace } = await importFresh();
|
||||
expect(isPathWithinWorkspace('/workspace-other')).toBe(false);
|
||||
expect(isPathWithinWorkspace('/workspaces')).toBe(false);
|
||||
expect(isPathWithinWorkspace('/workspace_backup')).toBe(false);
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(isPathWithinWorkspace(`${defaultPath}-other`)).toBe(false);
|
||||
});
|
||||
|
||||
test('should use WORKSPACE_PATH env var when set', async () => {
|
||||
process.env.WORKSPACE_PATH = '/custom/path';
|
||||
test('should use ARCHON_HOME env var when set', async () => {
|
||||
process.env.ARCHON_HOME = '/custom/archon';
|
||||
const { isPathWithinWorkspace } = await importFresh();
|
||||
expect(isPathWithinWorkspace('/custom/path/repo')).toBe(true);
|
||||
expect(isPathWithinWorkspace('/workspace/repo')).toBe(false); // Original path now rejected
|
||||
expect(isPathWithinWorkspace('/custom/archon/workspaces/repo')).toBe(true);
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(isPathWithinWorkspace(`${defaultPath}/repo`)).toBe(false); // Default path now rejected
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateAndResolvePath', () => {
|
||||
test('should return resolved path for valid paths', async () => {
|
||||
const { validateAndResolvePath } = await importFresh();
|
||||
// Use resolve() for platform-specific paths
|
||||
expect(validateAndResolvePath('/workspace/repo')).toBe(resolve('/workspace/repo'));
|
||||
expect(validateAndResolvePath('repo', '/workspace')).toBe(resolve('/workspace/repo'));
|
||||
expect(validateAndResolvePath('./src', '/workspace/repo')).toBe(
|
||||
resolve('/workspace/repo/src')
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(validateAndResolvePath(`${defaultPath}/repo`)).toBe(resolve(`${defaultPath}/repo`));
|
||||
expect(validateAndResolvePath('repo', defaultPath)).toBe(resolve(`${defaultPath}/repo`));
|
||||
expect(validateAndResolvePath('./src', `${defaultPath}/repo`)).toBe(
|
||||
resolve(`${defaultPath}/repo/src`)
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw for path traversal attempts', async () => {
|
||||
const { validateAndResolvePath } = await importFresh();
|
||||
const workspaceRoot = resolve('/workspace');
|
||||
expect(() => validateAndResolvePath('../etc/passwd', '/workspace')).toThrow(
|
||||
`Path must be within ${workspaceRoot} directory`
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(() => validateAndResolvePath('../etc/passwd', defaultPath)).toThrow(
|
||||
`Path must be within ${defaultPath} directory`
|
||||
);
|
||||
expect(() => validateAndResolvePath('/workspace/../etc/passwd')).toThrow(
|
||||
`Path must be within ${workspaceRoot} directory`
|
||||
expect(() => validateAndResolvePath(`${defaultPath}/../etc/passwd`)).toThrow(
|
||||
`Path must be within ${defaultPath} directory`
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw for paths outside workspace', async () => {
|
||||
const { validateAndResolvePath } = await importFresh();
|
||||
const workspaceRoot = resolve('/workspace');
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(() => validateAndResolvePath('/etc/passwd')).toThrow(
|
||||
`Path must be within ${workspaceRoot} directory`
|
||||
`Path must be within ${defaultPath} directory`
|
||||
);
|
||||
expect(() => validateAndResolvePath('/tmp/evil')).toThrow(
|
||||
`Path must be within ${workspaceRoot} directory`
|
||||
`Path must be within ${defaultPath} directory`
|
||||
);
|
||||
});
|
||||
|
||||
test('should use custom WORKSPACE_PATH for validation and error message', async () => {
|
||||
process.env.WORKSPACE_PATH = '/my/custom/workspace';
|
||||
test('should use custom ARCHON_HOME for validation and error message', async () => {
|
||||
process.env.ARCHON_HOME = '/my/custom/archon';
|
||||
const { validateAndResolvePath } = await importFresh();
|
||||
const customWorkspace = resolve('/my/custom/workspace');
|
||||
const customWorkspace = resolve('/my/custom/archon/workspaces');
|
||||
// Valid path under custom workspace
|
||||
expect(validateAndResolvePath('/my/custom/workspace/repo')).toBe(
|
||||
resolve('/my/custom/workspace/repo')
|
||||
expect(validateAndResolvePath('/my/custom/archon/workspaces/repo')).toBe(
|
||||
resolve('/my/custom/archon/workspaces/repo')
|
||||
);
|
||||
// Path under default workspace should now throw with custom workspace in message
|
||||
expect(() => validateAndResolvePath('/workspace/repo')).toThrow(
|
||||
const defaultPath = getDefaultWorkspacesPath();
|
||||
expect(() => validateAndResolvePath(`${defaultPath}/repo`)).toThrow(
|
||||
`Path must be within ${customWorkspace} directory`
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,29 +2,26 @@
|
|||
* Path validation utilities to prevent path traversal attacks
|
||||
*/
|
||||
import { resolve, sep } from 'path';
|
||||
import { getArchonWorkspacesPath } from './archon-paths';
|
||||
|
||||
// resolve() converts relative paths to absolute (cross-platform)
|
||||
const WORKSPACE_ROOT = resolve(process.env.WORKSPACE_PATH ?? '/workspace');
|
||||
// Lazy evaluation to allow tests to modify env vars
|
||||
function getWorkspaceRoot(): string {
|
||||
return resolve(getArchonWorkspacesPath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a resolved path stays within the allowed workspace directory.
|
||||
* Prevents path traversal attacks using sequences like "../"
|
||||
*
|
||||
* @param targetPath - The path to validate (can be absolute or relative)
|
||||
* @param basePath - Optional base path to resolve relative paths against (defaults to /workspace)
|
||||
* @param basePath - Optional base path to resolve relative paths against (defaults to workspace root)
|
||||
* @returns true if path is within workspace, false otherwise
|
||||
*/
|
||||
export function isPathWithinWorkspace(
|
||||
targetPath: string,
|
||||
basePath: string = WORKSPACE_ROOT
|
||||
): boolean {
|
||||
// Resolve to absolute path
|
||||
const resolvedTarget = resolve(basePath, targetPath);
|
||||
const resolvedWorkspace = resolve(WORKSPACE_ROOT);
|
||||
|
||||
// Check if resolved path starts with workspace root
|
||||
// Use trailing separator to prevent matching /workspace-other
|
||||
return resolvedTarget === resolvedWorkspace || resolvedTarget.startsWith(resolvedWorkspace + sep);
|
||||
export function isPathWithinWorkspace(targetPath: string, basePath?: string): boolean {
|
||||
const workspaceRoot = getWorkspaceRoot();
|
||||
const effectiveBase = basePath ?? workspaceRoot;
|
||||
const resolvedTarget = resolve(effectiveBase, targetPath);
|
||||
return resolvedTarget === workspaceRoot || resolvedTarget.startsWith(workspaceRoot + sep);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -36,14 +33,13 @@ export function isPathWithinWorkspace(
|
|||
* @returns The resolved absolute path
|
||||
* @throws Error if path is outside workspace
|
||||
*/
|
||||
export function validateAndResolvePath(
|
||||
targetPath: string,
|
||||
basePath: string = WORKSPACE_ROOT
|
||||
): string {
|
||||
const resolvedPath = resolve(basePath, targetPath);
|
||||
export function validateAndResolvePath(targetPath: string, basePath?: string): string {
|
||||
const workspaceRoot = getWorkspaceRoot();
|
||||
const effectiveBase = basePath ?? workspaceRoot;
|
||||
const resolvedPath = resolve(effectiveBase, targetPath);
|
||||
|
||||
if (!isPathWithinWorkspace(resolvedPath)) {
|
||||
throw new Error(`Path must be within ${WORKSPACE_ROOT} directory`);
|
||||
throw new Error(`Path must be within ${workspaceRoot} directory`);
|
||||
}
|
||||
|
||||
return resolvedPath;
|
||||
|
|
|
|||
Loading…
Reference in a new issue