mirror of
https://github.com/hyperdxio/hyperdx
synced 2026-04-21 13:37:15 +00:00
## Summary - Isolate dev, E2E, and integration test environments so multiple git worktrees can run all three simultaneously without port conflicts - Each worktree gets a deterministic slot (0-99) with unique port ranges: dev (30100-31199), E2E (20320-21399), CI integration (14320-40098) - Dev portal dashboard (http://localhost:9900) auto-discovers all running stacks, streams logs, and provides a History tab for past run logs ## Port Isolation | Environment | Port Range | Project Name | |---|---|---| | Dev stack | 30100-31199 | `hdx-dev-<slot>` | | E2E tests | 20320-21399 | `e2e-<slot>` | | CI integration | 14320-40098 | `int-<slot>` | All three can run simultaneously from the same worktree with zero port conflicts. ## Dev Portal Features **Live tab:** - Auto-discovers dev, E2E, and integration Docker containers + local services (API, App) - Groups all environments for the same worktree into a single card - SSE log streaming with ANSI color rendering, capped at 5000 lines - Auto-starts in background from `make dev`, `make dev-e2e`, `make dev-int` **History tab:** - Logs archived to `~/.config/hyperdx/dev-slots/<slot>/history/` on exit (instead of deleted) - Each archived run includes `meta.json` with worktree/branch metadata - Grouped by worktree with collapsible cards, search by worktree/branch - View any past log file in the same log panel, delete individual runs or clear all - Custom dark-themed confirm modal (no native browser dialogs) ## What Changed - **`scripts/dev-env.sh`** — Slot-based port assignments, portal auto-start, log archival on exit - **`scripts/test-e2e.sh`** — E2E port range (20320-21399), log capture via `tee`, portal auto-start, log archival - **`scripts/ensure-dev-portal.sh`** — Shared singleton portal launcher (works sourced or executed) - **`scripts/dev-portal/server.js`** — Discovery for dev/E2E/CI containers, history API (list/read/delete), local service port probing - **`scripts/dev-portal/index.html`** — Live/History tabs, worktree-grouped cards, search, collapse/expand, custom confirm modal, ANSI color log rendering - **`docker-compose.dev.yml`** — Parameterized ports/volumes/project name with `hdx.dev.*` labels - **`packages/app/tests/e2e/docker-compose.yml`** — Updated to new E2E port defaults - **`Makefile`** — `dev-int`/`dev-e2e` targets with log capture + portal auto-start; `dev-portal-stop`; `dev-clean` stops everything + wipes slot data - **`.env` files** — Ports use `${VAR:-default}` syntax across dev, E2E, and CI environments - **`agent_docs/development.md`** — Full documentation for isolation, port tables, E2E/CI port ranges ## How to Use ```bash # Start dev stack (auto-starts portal) make dev # Run E2E tests (auto-starts portal, separate ports) make dev-e2e FILE=navigation # Run integration tests (auto-starts portal, separate ports) make dev-int FILE=alerts # All three can run simultaneously from the same worktree # Portal at http://localhost:9900 shows everything # Stop portal make dev-portal-stop # Clean up everything (all stacks + portal + history) make dev-clean ``` ## Dev Portal <img width="1692" height="944" alt="image" src="https://github.com/user-attachments/assets/6ed388a3-43bc-4552-aa8d-688077b79fb7" /> <img width="1689" height="935" alt="image" src="https://github.com/user-attachments/assets/8677a138-0a40-4746-93ed-3b355c8bd45e" /> ## Test Plan - [x] Run `make dev` — verify services start with slot-assigned ports - [x] Run `make dev` in a second worktree — verify different ports, no conflicts - [x] Run `make dev-e2e` and `make dev-int` simultaneously — no port conflicts - [x] Open http://localhost:9900 — verify all stacks grouped by worktree - [x] Click a service to view logs — verify ANSI colors render correctly - [x] Stop a stack — verify logs archived to History tab with correct worktree - [x] History tab — search, collapse/expand, view archived logs, delete - [x] `make dev-clean` — stops everything, wipes slot data and history
287 lines
9.4 KiB
TypeScript
287 lines
9.4 KiB
TypeScript
/**
|
|
* Global setup for full-stack E2E tests
|
|
*
|
|
* This setup:
|
|
* 1. Clears MongoDB database to ensure clean state
|
|
* 2. Creates a test user and team
|
|
* 3. Applies DEFAULT_SOURCES from .env.e2e
|
|
* 4. Saves authentication state for tests
|
|
*
|
|
* Full-stack mode uses:
|
|
* - MongoDB (local) for authentication, teams, users, persistence
|
|
* - API server (local) for backend logic
|
|
* - Demo ClickHouse (remote) for telemetry data (logs, traces, metrics, K8s)
|
|
*/
|
|
|
|
import { execSync } from 'child_process';
|
|
import fs from 'fs';
|
|
import path from 'path';
|
|
import { chromium, FullConfig } from '@playwright/test';
|
|
|
|
import { seedClickHouse } from './seed-clickhouse';
|
|
|
|
// Configuration constants
|
|
const API_HEALTH_CHECK_MAX_RETRIES = parseInt(
|
|
process.env.E2E_API_HEALTH_CHECK_MAX_RETRIES || '30',
|
|
10,
|
|
);
|
|
const API_HEALTH_CHECK_RETRY_DELAY_MS = 1000;
|
|
const SOURCE_SELECTOR_TIMEOUT_MS = 10000;
|
|
const PAGE_LOAD_TIMEOUT_MS = 30000;
|
|
|
|
// Password must be at least 8 characters with uppercase, lowercase, number, and special char
|
|
const DEFAULT_TEST_USER = {
|
|
email: process.env.E2E_TEST_USER_EMAIL || 'e2e-test@hyperdx.io',
|
|
password: process.env.E2E_TEST_USER_PASSWORD || 'TestPassword123!',
|
|
} as const;
|
|
|
|
// Port configuration from HDX_E2E_* env vars (set by scripts/test-e2e.sh)
|
|
const API_PORT = process.env.HDX_E2E_API_PORT || '21000';
|
|
const APP_PORT = process.env.HDX_E2E_APP_PORT || '21300';
|
|
const MONGO_PORT = process.env.HDX_E2E_MONGO_PORT || '21100';
|
|
|
|
const API_URL = process.env.E2E_API_URL || `http://localhost:${API_PORT}`;
|
|
const APP_URL = process.env.E2E_APP_URL || `http://localhost:${APP_PORT}`;
|
|
const AUTH_FILE = path.join(__dirname, '.auth/user.json');
|
|
const MONGO_URI =
|
|
process.env.MONGO_URI || `mongodb://localhost:${MONGO_PORT}/hyperdx-e2e`;
|
|
|
|
/**
|
|
* Clears the MongoDB database to ensure a clean slate for tests
|
|
*/
|
|
function clearDatabase() {
|
|
console.log('Clearing MongoDB database for fresh test run...');
|
|
|
|
try {
|
|
const dockerComposeFile = path.join(__dirname, 'docker-compose.yml');
|
|
const e2eSlot = process.env.HDX_E2E_SLOT || '0';
|
|
const e2eProject = `e2e-${e2eSlot}`;
|
|
if (fs.existsSync(dockerComposeFile)) {
|
|
execSync(
|
|
`docker compose -p ${e2eProject} -f "${dockerComposeFile}" exec -T db mongosh --quiet --eval "use hyperdx-e2e; db.dropDatabase()" 2>&1`,
|
|
{ encoding: 'utf-8', stdio: 'pipe' },
|
|
);
|
|
console.log(' ✓ Database cleared successfully (via Docker)');
|
|
return;
|
|
}
|
|
|
|
throw new Error('Could not connect to MongoDB');
|
|
} catch (error) {
|
|
console.warn(' ⚠ Warning: Could not clear database');
|
|
console.warn(` ${error instanceof Error ? error.message : String(error)}`);
|
|
console.warn(
|
|
' This may cause issues if old data exists from previous test runs',
|
|
);
|
|
console.warn(
|
|
' Consider manually clearing the database or setting E2E_UNIQUE_USER=true',
|
|
);
|
|
}
|
|
}
|
|
|
|
async function globalSetup(_config: FullConfig) {
|
|
console.log('Setting up full-stack E2E environment');
|
|
console.log(' MongoDB: local (auth, teams, persistence)');
|
|
console.log(' ClickHouse: local instance (telemetry data)');
|
|
|
|
// Set timezone
|
|
process.env.TZ = 'America/New_York';
|
|
|
|
// Seed ClickHouse with test data
|
|
await seedClickHouse();
|
|
|
|
// Clean up any existing auth state to ensure fresh setup
|
|
if (fs.existsSync(AUTH_FILE)) {
|
|
console.log(' Removing existing auth state');
|
|
fs.unlinkSync(AUTH_FILE);
|
|
}
|
|
|
|
// Generate unique test user if E2E_UNIQUE_USER is set (useful for parallel CI runs)
|
|
const MOCK_USER =
|
|
process.env.E2E_UNIQUE_USER === 'true'
|
|
? {
|
|
email: `e2e-test-${Date.now()}-${Math.random().toString(36).slice(2, 9)}@hyperdx.io`,
|
|
password: DEFAULT_TEST_USER.password,
|
|
}
|
|
: { ...DEFAULT_TEST_USER };
|
|
|
|
if (process.env.E2E_UNIQUE_USER === 'true') {
|
|
console.log(` Using unique test user: ${MOCK_USER.email}`);
|
|
}
|
|
|
|
// Wait for API server to be ready
|
|
console.log('Waiting for API server to be ready...');
|
|
|
|
for (let i = 0; i < API_HEALTH_CHECK_MAX_RETRIES; i++) {
|
|
try {
|
|
const response = await fetch(`${API_URL}/health`).catch(() => null);
|
|
if (response?.ok) {
|
|
console.log(' API server is ready');
|
|
break;
|
|
}
|
|
} catch {
|
|
// Continue retrying
|
|
}
|
|
|
|
if (i === API_HEALTH_CHECK_MAX_RETRIES - 1) {
|
|
throw new Error(
|
|
`API server not ready after ${(API_HEALTH_CHECK_MAX_RETRIES * API_HEALTH_CHECK_RETRY_DELAY_MS) / 1000} seconds`,
|
|
);
|
|
}
|
|
|
|
await new Promise(resolve =>
|
|
setTimeout(resolve, API_HEALTH_CHECK_RETRY_DELAY_MS),
|
|
);
|
|
}
|
|
|
|
// Clear MongoDB database to ensure DEFAULT_SOURCES is applied
|
|
clearDatabase();
|
|
|
|
// Create test user and save auth state
|
|
console.log('Creating test user and logging in');
|
|
|
|
const browser = await chromium.launch();
|
|
const context = await browser.newContext({ baseURL: APP_URL });
|
|
const page = await context.newPage();
|
|
|
|
try {
|
|
// Register user
|
|
console.log(` Registering user: ${MOCK_USER.email}`);
|
|
const registerResponse = await page.request.post(
|
|
`${API_URL}/register/password`,
|
|
{
|
|
data: {
|
|
email: MOCK_USER.email,
|
|
password: MOCK_USER.password,
|
|
confirmPassword: MOCK_USER.password,
|
|
},
|
|
},
|
|
);
|
|
|
|
if (!registerResponse.ok()) {
|
|
const status = registerResponse.status();
|
|
const body = await registerResponse.text();
|
|
|
|
// 409 Conflict should not happen since we cleared the database
|
|
// If it does, it indicates the database clear failed
|
|
if (status === 409) {
|
|
console.warn(
|
|
' ⚠ Warning: User/team already exists (409 Conflict) - database may not have been cleared',
|
|
);
|
|
console.warn(
|
|
' DEFAULT_SOURCES will NOT be applied (only happens on new team creation)',
|
|
);
|
|
console.warn(' Tests may fail due to stale or incorrect sources');
|
|
} else {
|
|
// Any other error is a real failure
|
|
throw new Error(`Registration failed: ${status} ${body}`);
|
|
}
|
|
} else {
|
|
console.log(' ✓ User registered successfully');
|
|
console.log(' ✓ DEFAULT_SOURCES applied to new team');
|
|
}
|
|
|
|
// Login
|
|
console.log(' Logging in');
|
|
const loginResponse = await page.request.post(`${API_URL}/login/password`, {
|
|
data: {
|
|
email: MOCK_USER.email,
|
|
password: MOCK_USER.password,
|
|
},
|
|
failOnStatusCode: false,
|
|
});
|
|
|
|
// Login returns 302 redirect on success
|
|
if (loginResponse.status() !== 302 && !loginResponse.ok()) {
|
|
const body = await loginResponse.text();
|
|
throw new Error(`Login failed: ${loginResponse.status()} ${body}`);
|
|
}
|
|
|
|
// Navigate to the app to establish session
|
|
await page.goto('/', { timeout: PAGE_LOAD_TIMEOUT_MS });
|
|
|
|
console.log(' Login successful');
|
|
|
|
// Verify default sources were auto-created (via DEFAULT_SOURCES env var)
|
|
console.log('Verifying default sources were created');
|
|
let sourcesResponse;
|
|
try {
|
|
sourcesResponse = await page.request.get(`${API_URL}/sources`);
|
|
} catch (error) {
|
|
console.error(' Network error fetching sources:', error);
|
|
throw new Error(
|
|
`Failed to connect to API at ${API_URL}/sources - is the API server running?`,
|
|
);
|
|
}
|
|
|
|
if (!sourcesResponse.ok()) {
|
|
const errorText = await sourcesResponse.text();
|
|
console.error(
|
|
` API error fetching sources: ${sourcesResponse.status} ${errorText}`,
|
|
);
|
|
|
|
if (
|
|
sourcesResponse.status() === 401 ||
|
|
sourcesResponse.status() === 403
|
|
) {
|
|
throw new Error('Authentication failed - check session setup');
|
|
} else if (sourcesResponse.status() >= 500) {
|
|
throw new Error(
|
|
`API server error (${sourcesResponse.status()}) - check API logs`,
|
|
);
|
|
} else {
|
|
throw new Error(`Failed to fetch sources: ${sourcesResponse.status()}`);
|
|
}
|
|
}
|
|
|
|
const sources = await sourcesResponse.json();
|
|
console.log(` Found ${sources.length} default sources`);
|
|
if (sources.length === 0) {
|
|
console.error(' ❌ ERROR: No sources found');
|
|
console.error(
|
|
' This should not happen since we just created a fresh team',
|
|
);
|
|
console.error(
|
|
' Check that DEFAULT_SOURCES is properly configured in packages/api/.env.e2e',
|
|
);
|
|
throw new Error(
|
|
'No sources found - DEFAULT_SOURCES may be misconfigured',
|
|
);
|
|
} else {
|
|
console.log(' ✓ Sources configured:');
|
|
sources.forEach((source: any) => {
|
|
console.log(` - ${source.name} (${source.kind})`);
|
|
});
|
|
}
|
|
|
|
// Navigate to search page to ensure sources are loaded
|
|
console.log('Navigating to search page');
|
|
await page.goto('/search', { timeout: PAGE_LOAD_TIMEOUT_MS });
|
|
|
|
// Wait for source selector to be ready (indicates sources are loaded)
|
|
await page.waitForSelector('[data-testid="source-selector"]', {
|
|
state: 'visible',
|
|
timeout: SOURCE_SELECTOR_TIMEOUT_MS,
|
|
});
|
|
|
|
// Save authentication state
|
|
const authDir = path.dirname(AUTH_FILE);
|
|
if (!fs.existsSync(authDir)) {
|
|
fs.mkdirSync(authDir, { recursive: true });
|
|
}
|
|
await context.storageState({ path: AUTH_FILE });
|
|
console.log(` Auth state saved to ${AUTH_FILE}`);
|
|
|
|
console.log('Full-stack E2E setup complete');
|
|
console.log(
|
|
' Using local ClickHouse with seeded test data for logs, traces, metrics, and K8s',
|
|
);
|
|
} catch (error) {
|
|
console.error('Setup failed:', error);
|
|
throw error;
|
|
} finally {
|
|
await context.close();
|
|
await browser.close();
|
|
}
|
|
}
|
|
|
|
export default globalSetup;
|