ci: Improvements to test containers e2e setup (#25140)

This commit is contained in:
Declan Carroll 2026-02-03 10:16:32 +00:00 committed by GitHub
parent b26a25cdaf
commit 8730bedd2a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 126 additions and 29 deletions

View file

@ -145,7 +145,7 @@ jobs:
- name: Pre-pull Test Container Images
if: ${{ !contains(inputs.test-command, 'test:local') }}
run: npx tsx packages/testing/containers/pull-test-images.ts || true
run: npx tsx packages/testing/containers/pull-test-images.ts ${{ matrix.images }} || true
- name: Run Tests
# Uses pre-distributed specs if orchestration enabled, otherwise falls back to Playwright sharding

View file

@ -11,7 +11,7 @@ import os from 'os';
* @param maxWaitMs - Maximum time to wait before giving up (default: 10000ms)
*/
export async function waitForNetworkQuiet(
quietDurationMs = 2000,
quietDurationMs = 1000,
maxWaitMs = 10000,
): Promise<void> {
// Only run in CI on Linux

View file

@ -1,44 +1,111 @@
#!/usr/bin/env tsx
/**
* Script to pre-pull all test container images.
* Pre-pull test container images in parallel.
*
* Usage:
* npx tsx pull-test-images.ts # Pull all images
* npx tsx pull-test-images.ts postgres redis mailpit # Pull specific images
*/
import { execSync } from 'child_process';
import { exec } from 'child_process';
import { promisify } from 'util';
import { TEST_CONTAINER_IMAGES } from './test-containers';
console.log('🐳 Pre-pulling test container images...');
const startTime = Date.now();
const timings = [];
const images = Object.values(TEST_CONTAINER_IMAGES);
const execAsync = promisify(exec);
for (const image of images) {
// Skip :local tagged images - these are locally built and won't exist in any registry
if (image.endsWith(':local')) {
console.log(`\n⏭ Skipping ${image} (local build)`);
continue;
}
type ImageKey = keyof typeof TEST_CONTAINER_IMAGES;
interface PullResult {
image: string;
duration: string;
success: boolean;
cached: number;
pulled: number;
}
async function pullImage(image: string): Promise<PullResult> {
const imageStart = Date.now();
console.log(`\nPulling ${image}...`);
try {
execSync(`docker pull ${image}`, { stdio: 'inherit' });
const { stdout, stderr } = await execAsync(`docker pull ${image}`);
const output = stdout + stderr;
// Count cached vs pulled layers
const cached = (output.match(/Already exists/g) ?? []).length;
const pulled = (output.match(/Pull complete/g) ?? []).length;
const duration = ((Date.now() - imageStart) / 1000).toFixed(1);
timings.push({ image, duration, success: true });
console.log(`✅ Successfully pulled ${image} (${duration}s)`);
return { image, duration, success: true, cached, pulled };
} catch {
const duration = ((Date.now() - imageStart) / 1000).toFixed(1);
timings.push({ image, duration, success: false });
console.error(`❌ Failed to pull ${image} (${duration}s)`);
if (process.env.STRICT_IMAGE_PULL === 'true') process.exit(1);
return { image, duration, success: false, cached: 0, pulled: 0 };
}
}
const totalTime = ((Date.now() - startTime) / 1000).toFixed(1);
console.log('\n' + '='.repeat(50));
console.log('📊 Pull Summary:');
timings.forEach(({ image, duration, success }) => {
console.log(` ${success ? '✅' : '❌'} ${image}: ${duration}s`);
});
console.log('='.repeat(50));
console.log(`✅ Total time: ${totalTime}s`);
function isValidImageKey(key: string): key is ImageKey {
return key in TEST_CONTAINER_IMAGES;
}
function getRequestedImages(args: string[]): string[] {
if (args.length === 0) {
return Object.values(TEST_CONTAINER_IMAGES);
}
const invalid = args.filter((arg) => !isValidImageKey(arg));
if (invalid.length > 0) {
console.error(`❌ Unknown image(s): ${invalid.join(', ')}`);
console.error(` Valid: ${Object.keys(TEST_CONTAINER_IMAGES).join(', ')}`);
process.exit(1);
}
return args.filter(isValidImageKey).map((key) => TEST_CONTAINER_IMAGES[key]);
}
async function main() {
const args = process.argv.slice(2);
const images = getRequestedImages(args);
const mode = args.length > 0 ? `${args.length} specified` : 'all';
console.log(`🐳 Pre-pulling test container images (${mode})...`);
const startTime = Date.now();
// Filter out local images and start all pulls in parallel
const imagesToPull = images.filter((image) => {
if (image.endsWith(':local')) {
console.log(`⏭️ Skipping ${image} (local build)`);
return false;
}
return true;
});
for (const image of imagesToPull) {
console.log(`🔄 Starting pull: ${image}`);
}
const pullPromises = imagesToPull.map(pullImage);
const results = await Promise.all(pullPromises);
// Check for failures
const failures = results.filter((r) => !r.success);
if (failures.length > 0 && process.env.STRICT_IMAGE_PULL === 'true') {
console.error(`❌ Failed to pull ${failures.length} image(s)`);
process.exit(1);
}
const totalTime = ((Date.now() - startTime) / 1000).toFixed(1);
const totalCached = results.reduce((sum, r) => sum + r.cached, 0);
const totalPulled = results.reduce((sum, r) => sum + r.pulled, 0);
console.log('\n' + '='.repeat(60));
console.log('📊 Pull Summary:');
results.forEach(({ image, duration, success, cached, pulled }) => {
const layers = cached + pulled > 0 ? ` (${cached} cached, ${pulled} pulled)` : '';
console.log(` ${success ? '✅' : '❌'} ${image}: ${duration}s${layers}`);
});
console.log('='.repeat(60));
console.log(`📦 Layers: ${totalCached} cached, ${totalPulled} pulled`);
console.log(`✅ Total time: ${totalTime}s (parallel)`);
}
void main();

View file

@ -33,6 +33,19 @@ const E2E_PROJECT = 'multi-main:e2e';
const CONTAINER_STARTUP_TIME = 22500; // 22.5 seconds average (heavier stacks with extra services take longer)
const MAX_GROUP_DURATION = 5 * 60 * 1000; // 5 minutes - split groups larger than this
/** Maps capability tags to required container images */
const CAPABILITY_IMAGES = {
email: ['mailpit'],
kafka: ['kafka'],
observability: ['victoriaLogs', 'victoriaMetrics', 'vector', 'jaeger', 'n8nTracer'],
oidc: ['keycloak'],
proxy: ['mockserver'],
'source-control': ['gitea'],
};
/** Base images needed by all E2E tests */
const BASE_IMAGES = ['postgres', 'redis', 'caddy', 'n8n', 'taskRunner'];
const args = process.argv.slice(2);
const matrixMode = args.includes('--matrix');
const orchestrate = args.includes('--orchestrate');
@ -285,11 +298,28 @@ function distribute(numShards) {
return distributeCapabilityAware(numShards);
}
/**
* Get required images for a bucket based on its capabilities
* @param {{capabilities: Set<string>}} bucket
* @returns {string[]}
*/
function getRequiredImages(bucket) {
const images = new Set(BASE_IMAGES);
for (const cap of bucket.capabilities) {
const capImages = CAPABILITY_IMAGES[cap];
if (capImages) {
for (const img of capImages) images.add(img);
}
}
return [...images].sort();
}
if (matrixMode) {
const buckets = orchestrate ? distribute(shards) : null;
const matrix = Array.from({ length: shards }, (_, i) => ({
shard: i + 1,
specs: orchestrate ? (buckets?.[i].specs.join(' ') ?? '') : '',
images: orchestrate && buckets ? getRequiredImages(buckets[i]).join(' ') : '',
}));
if (orchestrate && buckets) {