mirror of
https://github.com/n8n-io/n8n
synced 2026-04-21 15:47:20 +00:00
refactor(core): Drop tool-mode builder leftovers from instance-ai workflow-builder (no-changelog)
- Delete applyPatches (patch-code.ts) and its tests — only consumer was the retired build-workflow.tool.ts - Delete parseAndValidate plus its tests — no production caller remained after tool-mode retirement - Trim extract-code.ts to SDK_IMPORT_STATEMENT; the string-manipulation helpers were tool-mode only - Delete sdk-prompt-sections.ts; the builder prompt imports directly from @n8n/workflow-sdk - Trim the workflow-builder barrel and types to what is still consumed (SDK_IMPORT_STATEMENT, partitionWarnings, ValidationWarning)
This commit is contained in:
parent
60604620f3
commit
85d8ea6911
9 changed files with 7 additions and 1035 deletions
|
|
@ -1,164 +0,0 @@
|
|||
import { resolveLocalImports, stripImportStatements, stripSdkImports } from '../extract-code';
|
||||
|
||||
describe('stripImportStatements', () => {
|
||||
it('should strip all import statements', () => {
|
||||
const code = `import { workflow } from '@n8n/workflow-sdk';
|
||||
import { foo } from './local';
|
||||
|
||||
const x = 1;`;
|
||||
expect(stripImportStatements(code)).toBe('const x = 1;');
|
||||
});
|
||||
});
|
||||
|
||||
describe('stripSdkImports', () => {
|
||||
it('should strip only SDK imports and preserve local imports', () => {
|
||||
const code = `import { workflow, node } from '@n8n/workflow-sdk';
|
||||
import { weatherNode } from '../chunks/weather';
|
||||
|
||||
const x = workflow('test', 'Test');`;
|
||||
const result = stripSdkImports(code);
|
||||
expect(result).toContain("import { weatherNode } from '../chunks/weather'");
|
||||
expect(result).not.toContain('@n8n/workflow-sdk');
|
||||
expect(result).toContain("const x = workflow('test', 'Test');");
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveLocalImports', () => {
|
||||
function makeReadFile(files: Record<string, string>) {
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
return async (filePath: string): Promise<string | null> => {
|
||||
return files[filePath] ?? null;
|
||||
};
|
||||
}
|
||||
|
||||
it('should return code unchanged when there are no local imports', async () => {
|
||||
const code = `import { workflow } from '@n8n/workflow-sdk';
|
||||
const w = workflow('test', 'Test');`;
|
||||
const result = await resolveLocalImports(code, '/workspace/src', makeReadFile({}));
|
||||
expect(result).toContain("const w = workflow('test', 'Test');");
|
||||
});
|
||||
|
||||
it('should resolve a single local import', async () => {
|
||||
const mainCode = `import { workflow } from '@n8n/workflow-sdk';
|
||||
import { weatherNode } from '../chunks/weather';
|
||||
|
||||
export default workflow('test', 'Test').add(weatherNode);`;
|
||||
|
||||
const chunkCode = `import { node, newCredential } from '@n8n/workflow-sdk';
|
||||
|
||||
export const weatherNode = node({
|
||||
type: 'n8n-nodes-base.openWeatherMap',
|
||||
version: 1,
|
||||
config: { name: 'Weather' }
|
||||
});`;
|
||||
|
||||
const readFile = makeReadFile({
|
||||
'/workspace/chunks/weather.ts': chunkCode,
|
||||
});
|
||||
|
||||
const result = await resolveLocalImports(mainCode, '/workspace/src', readFile);
|
||||
|
||||
// Chunk content should be inlined (without SDK import or export keyword)
|
||||
expect(result).toContain('const weatherNode = node({');
|
||||
expect(result).not.toContain('export const weatherNode');
|
||||
// Local import should be removed from main code
|
||||
expect(result).not.toContain("from '../chunks/weather'");
|
||||
// Main code should still have workflow reference
|
||||
expect(result).toContain("workflow('test', 'Test').add(weatherNode)");
|
||||
});
|
||||
|
||||
it('should resolve multiple imports from different files', async () => {
|
||||
const mainCode = `import { workflow } from '@n8n/workflow-sdk';
|
||||
import { weatherNode } from '../chunks/weather';
|
||||
import { emailNode } from '../chunks/email';
|
||||
|
||||
export default workflow('test', 'Test').add(weatherNode).to(emailNode);`;
|
||||
|
||||
const readFile = makeReadFile({
|
||||
'/workspace/chunks/weather.ts': `import { node } from '@n8n/workflow-sdk';
|
||||
export const weatherNode = node({ type: 'weather', version: 1, config: {} });`,
|
||||
'/workspace/chunks/email.ts': `import { node } from '@n8n/workflow-sdk';
|
||||
export const emailNode = node({ type: 'email', version: 1, config: {} });`,
|
||||
});
|
||||
|
||||
const result = await resolveLocalImports(mainCode, '/workspace/src', readFile);
|
||||
|
||||
expect(result).toContain("const weatherNode = node({ type: 'weather'");
|
||||
expect(result).toContain("const emailNode = node({ type: 'email'");
|
||||
expect(result).not.toContain("from '../chunks/weather'");
|
||||
expect(result).not.toContain("from '../chunks/email'");
|
||||
});
|
||||
|
||||
it('should resolve nested imports (chunk importing another chunk)', async () => {
|
||||
const mainCode = `import { workflow } from '@n8n/workflow-sdk';
|
||||
import { compositeNode } from '../chunks/composite';
|
||||
|
||||
export default workflow('test', 'Test').add(compositeNode);`;
|
||||
|
||||
const readFile = makeReadFile({
|
||||
'/workspace/chunks/composite.ts': `import { node } from '@n8n/workflow-sdk';
|
||||
import { helperNode } from './helper';
|
||||
|
||||
export const compositeNode = node({ type: 'composite', version: 1, config: {} });`,
|
||||
'/workspace/chunks/helper.ts': `import { node } from '@n8n/workflow-sdk';
|
||||
|
||||
export const helperNode = node({ type: 'helper', version: 1, config: {} });`,
|
||||
});
|
||||
|
||||
const result = await resolveLocalImports(mainCode, '/workspace/src', readFile);
|
||||
|
||||
expect(result).toContain("const helperNode = node({ type: 'helper'");
|
||||
expect(result).toContain("const compositeNode = node({ type: 'composite'");
|
||||
});
|
||||
|
||||
it('should handle missing files gracefully', async () => {
|
||||
const mainCode = `import { workflow } from '@n8n/workflow-sdk';
|
||||
import { missing } from '../chunks/nonexistent';
|
||||
|
||||
export default workflow('test', 'Test');`;
|
||||
|
||||
const result = await resolveLocalImports(mainCode, '/workspace/src', makeReadFile({}));
|
||||
|
||||
// Should not throw, just skip the missing import
|
||||
expect(result).toContain("workflow('test', 'Test')");
|
||||
// Local import line should still be removed
|
||||
expect(result).not.toContain("from '../chunks/nonexistent'");
|
||||
});
|
||||
|
||||
it('should deduplicate imports referenced from multiple files', async () => {
|
||||
const mainCode = `import { workflow } from '@n8n/workflow-sdk';
|
||||
import { a } from '../chunks/a';
|
||||
import { b } from '../chunks/b';
|
||||
|
||||
export default workflow('test', 'Test');`;
|
||||
|
||||
const readFile = makeReadFile({
|
||||
'/workspace/chunks/a.ts': `import { node } from '@n8n/workflow-sdk';
|
||||
import { shared } from './shared';
|
||||
export const a = node({ type: 'a', version: 1, config: {} });`,
|
||||
'/workspace/chunks/b.ts': `import { node } from '@n8n/workflow-sdk';
|
||||
import { shared } from './shared';
|
||||
export const b = node({ type: 'b', version: 1, config: {} });`,
|
||||
'/workspace/chunks/shared.ts': `import { node } from '@n8n/workflow-sdk';
|
||||
export const shared = node({ type: 'shared', version: 1, config: {} });`,
|
||||
});
|
||||
|
||||
const result = await resolveLocalImports(mainCode, '/workspace/src', readFile);
|
||||
|
||||
// shared should appear exactly once
|
||||
const matches = result.match(/const shared = node/g);
|
||||
expect(matches).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should add .ts extension when resolving import paths', async () => {
|
||||
const mainCode = `import { foo } from '../chunks/foo';
|
||||
const x = foo;`;
|
||||
|
||||
const readFile = makeReadFile({
|
||||
'/workspace/chunks/foo.ts': 'export const foo = 42;',
|
||||
});
|
||||
|
||||
const result = await resolveLocalImports(mainCode, '/workspace/src', readFile);
|
||||
expect(result).toContain('const foo = 42;');
|
||||
});
|
||||
});
|
||||
|
|
@ -1,125 +1,4 @@
|
|||
jest.mock('@n8n/workflow-sdk', () => ({
|
||||
parseWorkflowCodeToBuilder: jest.fn(),
|
||||
validateWorkflow: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('../extract-code', () => ({
|
||||
stripImportStatements: jest.fn((code: string) => code),
|
||||
}));
|
||||
|
||||
import { parseWorkflowCodeToBuilder, validateWorkflow } from '@n8n/workflow-sdk';
|
||||
|
||||
import { stripImportStatements } from '../extract-code';
|
||||
import { parseAndValidate, partitionWarnings } from '../parse-validate';
|
||||
|
||||
const mockedParseWorkflowCodeToBuilder = jest.mocked(parseWorkflowCodeToBuilder);
|
||||
const mockedValidateWorkflow = jest.mocked(validateWorkflow);
|
||||
const mockedStripImportStatements = jest.mocked(stripImportStatements);
|
||||
|
||||
function makeBuilder(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
regenerateNodeIds: jest.fn(),
|
||||
validate: jest.fn().mockReturnValue({ errors: [], warnings: [] }),
|
||||
toJSON: jest.fn().mockReturnValue({ name: 'Test', nodes: [], connections: {} }),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('parseAndValidate', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockedStripImportStatements.mockImplementation((code) => code);
|
||||
mockedValidateWorkflow.mockReturnValue({ errors: [], warnings: [] } as never);
|
||||
});
|
||||
|
||||
it('strips imports, parses code, regenerates IDs, and validates', () => {
|
||||
const builder = makeBuilder();
|
||||
mockedParseWorkflowCodeToBuilder.mockReturnValue(builder as never);
|
||||
|
||||
const result = parseAndValidate('const w = workflow("test");');
|
||||
|
||||
expect(mockedStripImportStatements).toHaveBeenCalledWith('const w = workflow("test");');
|
||||
expect(mockedParseWorkflowCodeToBuilder).toHaveBeenCalled();
|
||||
expect(builder.regenerateNodeIds).toHaveBeenCalled();
|
||||
expect(builder.validate).toHaveBeenCalled();
|
||||
expect(builder.toJSON).toHaveBeenCalled();
|
||||
expect(mockedValidateWorkflow).toHaveBeenCalled();
|
||||
expect(result.workflow).toEqual({ name: 'Test', nodes: [], connections: {} });
|
||||
expect(result.warnings).toEqual([]);
|
||||
});
|
||||
|
||||
it('collects graph validation errors and warnings', () => {
|
||||
const builder = makeBuilder({
|
||||
validate: jest.fn().mockReturnValue({
|
||||
errors: [{ code: 'GRAPH_ERROR', message: 'Cycle detected' }],
|
||||
warnings: [{ code: 'MISSING_TRIGGER', message: 'No trigger found' }],
|
||||
}),
|
||||
});
|
||||
mockedParseWorkflowCodeToBuilder.mockReturnValue(builder as never);
|
||||
|
||||
const result = parseAndValidate('code');
|
||||
|
||||
expect(result.warnings).toHaveLength(2);
|
||||
expect(result.warnings[0]).toEqual({ code: 'GRAPH_ERROR', message: 'Cycle detected' });
|
||||
expect(result.warnings[1]).toEqual({ code: 'MISSING_TRIGGER', message: 'No trigger found' });
|
||||
});
|
||||
|
||||
it('collects schema validation errors', () => {
|
||||
const builder = makeBuilder();
|
||||
mockedParseWorkflowCodeToBuilder.mockReturnValue(builder as never);
|
||||
mockedValidateWorkflow.mockReturnValue({
|
||||
errors: [{ code: 'INVALID_PARAM', message: 'Bad param', nodeName: 'HTTP' }],
|
||||
warnings: [],
|
||||
} as never);
|
||||
|
||||
const result = parseAndValidate('code');
|
||||
|
||||
expect(result.warnings).toContainEqual({
|
||||
code: 'INVALID_PARAM',
|
||||
message: 'Bad param',
|
||||
nodeName: 'HTTP',
|
||||
});
|
||||
});
|
||||
|
||||
it('combines graph and schema validation issues', () => {
|
||||
const builder = makeBuilder({
|
||||
validate: jest.fn().mockReturnValue({
|
||||
errors: [{ code: 'E1', message: 'graph error' }],
|
||||
warnings: [],
|
||||
}),
|
||||
});
|
||||
mockedParseWorkflowCodeToBuilder.mockReturnValue(builder as never);
|
||||
mockedValidateWorkflow.mockReturnValue({
|
||||
errors: [{ code: 'E2', message: 'schema error' }],
|
||||
warnings: [{ code: 'W1', message: 'schema warning' }],
|
||||
} as never);
|
||||
|
||||
const result = parseAndValidate('code');
|
||||
|
||||
expect(result.warnings).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('throws when parsing fails', () => {
|
||||
mockedParseWorkflowCodeToBuilder.mockImplementation(() => {
|
||||
throw new Error('Syntax error at line 5');
|
||||
});
|
||||
|
||||
expect(() => parseAndValidate('bad code')).toThrow(
|
||||
'Failed to parse workflow code: Syntax error at line 5',
|
||||
);
|
||||
});
|
||||
|
||||
it('wraps non-Error exceptions', () => {
|
||||
mockedParseWorkflowCodeToBuilder.mockImplementation(() => {
|
||||
// eslint-disable-next-line @typescript-eslint/only-throw-error
|
||||
throw 'string error';
|
||||
});
|
||||
|
||||
expect(() => parseAndValidate('bad code')).toThrow(
|
||||
'Failed to parse workflow code: Unknown error',
|
||||
);
|
||||
});
|
||||
});
|
||||
import { partitionWarnings } from '../parse-validate';
|
||||
|
||||
describe('partitionWarnings', () => {
|
||||
it('returns empty arrays for no warnings', () => {
|
||||
|
|
|
|||
|
|
@ -1,264 +0,0 @@
|
|||
import { applyPatches } from '../patch-code';
|
||||
|
||||
describe('applyPatches', () => {
|
||||
// ── Exact match ────────────────────────────────────────────────────────────
|
||||
|
||||
describe('exact match', () => {
|
||||
it('should replace a single exact match', () => {
|
||||
const code = 'const x = 1;';
|
||||
const result = applyPatches(code, [{ old_str: 'const x = 1;', new_str: 'const x = 2;' }]);
|
||||
expect(result).toEqual({ success: true, code: 'const x = 2;' });
|
||||
});
|
||||
|
||||
it('should apply multiple patches sequentially', () => {
|
||||
const code = 'const a = 1;\nconst b = 2;';
|
||||
const result = applyPatches(code, [
|
||||
{ old_str: 'const a = 1;', new_str: 'const a = 10;' },
|
||||
{ old_str: 'const b = 2;', new_str: 'const b = 20;' },
|
||||
]);
|
||||
expect(result).toEqual({ success: true, code: 'const a = 10;\nconst b = 20;' });
|
||||
});
|
||||
|
||||
it('should replace only the first occurrence when code has duplicates', () => {
|
||||
const code = 'foo\nfoo\nfoo';
|
||||
const result = applyPatches(code, [{ old_str: 'foo', new_str: 'bar' }]);
|
||||
expect(result).toEqual({ success: true, code: 'bar\nfoo\nfoo' });
|
||||
});
|
||||
});
|
||||
|
||||
// ── Whitespace-normalized match ────────────────────────────────────────────
|
||||
|
||||
describe('whitespace-normalized match', () => {
|
||||
it('should match when extra spaces exist in the code', () => {
|
||||
const code = 'const x = 1;';
|
||||
const result = applyPatches(code, [{ old_str: 'const x = 1;', new_str: 'const x = 2;' }]);
|
||||
expect(result).toEqual({ success: true, code: 'const x = 2;' });
|
||||
});
|
||||
|
||||
it('should match when tabs are used instead of spaces', () => {
|
||||
const code = 'const\tx\t=\t1;';
|
||||
const result = applyPatches(code, [{ old_str: 'const x = 1;', new_str: 'const x = 2;' }]);
|
||||
expect(result).toEqual({ success: true, code: 'const x = 2;' });
|
||||
});
|
||||
|
||||
it('should match when newlines collapse to single space', () => {
|
||||
const code = 'const\n x\n = 1;';
|
||||
const result = applyPatches(code, [{ old_str: 'const x = 1;', new_str: 'const x = 2;' }]);
|
||||
expect(result).toEqual({ success: true, code: 'const x = 2;' });
|
||||
});
|
||||
});
|
||||
|
||||
// ── Trimmed-lines match ────────────────────────────────────────────────────
|
||||
|
||||
describe('trimmed-lines match', () => {
|
||||
it('should match when code has different indentation levels', () => {
|
||||
const code = ' if (true) {\n return 1;\n }';
|
||||
const result = applyPatches(code, [
|
||||
{ old_str: 'if (true) {\nreturn 1;\n}', new_str: 'if (false) {\nreturn 0;\n}' },
|
||||
]);
|
||||
expect(result).toEqual({ success: true, code: 'if (false) {\nreturn 0;\n}' });
|
||||
});
|
||||
|
||||
it('should match when needle has extra indentation but code does not', () => {
|
||||
const code = 'if (true) {\nreturn 1;\n}';
|
||||
const result = applyPatches(code, [
|
||||
{
|
||||
old_str: ' if (true) {\n return 1;\n }',
|
||||
new_str: 'if (false) {\nreturn 0;\n}',
|
||||
},
|
||||
]);
|
||||
expect(result).toEqual({ success: true, code: 'if (false) {\nreturn 0;\n}' });
|
||||
});
|
||||
});
|
||||
|
||||
// ── No match ───────────────────────────────────────────────────────────────
|
||||
|
||||
describe('no match', () => {
|
||||
it('should return an error when old_str is not found', () => {
|
||||
const code = 'const x = 1;';
|
||||
const result = applyPatches(code, [{ old_str: 'const y = 999;', new_str: 'const z = 0;' }]);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error).toContain('Patch failed');
|
||||
expect(result.error).toContain('could not find old_str in code');
|
||||
}
|
||||
});
|
||||
|
||||
it('should include context about the nearest match in the error', () => {
|
||||
const code = 'function hello() {\n return "world";\n}';
|
||||
const result = applyPatches(code, [
|
||||
{ old_str: 'function hello() {\n return "universe";\n}', new_str: 'replaced' },
|
||||
]);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error).toContain('Nearest match');
|
||||
}
|
||||
});
|
||||
|
||||
it('should include the searched string (truncated) in the error', () => {
|
||||
const code = 'short code';
|
||||
const longOldStr = 'x'.repeat(200);
|
||||
const result = applyPatches(code, [{ old_str: longOldStr, new_str: 'replacement' }]);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error).toContain('...');
|
||||
expect(result.error).toContain('Searched for');
|
||||
}
|
||||
});
|
||||
|
||||
it('should mention all tried strategies in the error', () => {
|
||||
const code = 'const x = 1;';
|
||||
const result = applyPatches(code, [
|
||||
{ old_str: 'completely different code', new_str: 'replacement' },
|
||||
]);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error).toContain('exact match');
|
||||
expect(result.error).toContain('whitespace-normalized');
|
||||
expect(result.error).toContain('trimmed-lines');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ── Empty patches ──────────────────────────────────────────────────────────
|
||||
|
||||
describe('empty patches array', () => {
|
||||
it('should return original code unchanged', () => {
|
||||
const code = 'const x = 1;';
|
||||
const result = applyPatches(code, []);
|
||||
expect(result).toEqual({ success: true, code: 'const x = 1;' });
|
||||
});
|
||||
});
|
||||
|
||||
// ── old_str equals new_str ─────────────────────────────────────────────────
|
||||
|
||||
describe('old_str equals new_str', () => {
|
||||
it('should succeed and return the same code', () => {
|
||||
const code = 'const x = 1;';
|
||||
const result = applyPatches(code, [{ old_str: 'const x = 1;', new_str: 'const x = 1;' }]);
|
||||
expect(result).toEqual({ success: true, code: 'const x = 1;' });
|
||||
});
|
||||
});
|
||||
|
||||
// ── Sequential patches ─────────────────────────────────────────────────────
|
||||
|
||||
describe('sequential patches', () => {
|
||||
it('should apply second patch to the result of the first', () => {
|
||||
const code = 'const x = 1;';
|
||||
const result = applyPatches(code, [
|
||||
{ old_str: 'const x = 1;', new_str: 'const x = 2;' },
|
||||
{ old_str: 'const x = 2;', new_str: 'const x = 3;' },
|
||||
]);
|
||||
expect(result).toEqual({ success: true, code: 'const x = 3;' });
|
||||
});
|
||||
|
||||
it('should allow second patch to reference text introduced by first patch', () => {
|
||||
const code = 'hello world';
|
||||
const result = applyPatches(code, [
|
||||
{ old_str: 'hello', new_str: 'goodbye cruel' },
|
||||
{ old_str: 'cruel world', new_str: 'moon' },
|
||||
]);
|
||||
expect(result).toEqual({ success: true, code: 'goodbye moon' });
|
||||
});
|
||||
});
|
||||
|
||||
// ── Failure mid-sequence ───────────────────────────────────────────────────
|
||||
|
||||
describe('failure mid-sequence', () => {
|
||||
it('should return error when second patch fails after first succeeds', () => {
|
||||
const code = 'const a = 1;\nconst b = 2;';
|
||||
const result = applyPatches(code, [
|
||||
{ old_str: 'const a = 1;', new_str: 'const a = 10;' },
|
||||
{ old_str: 'const c = 3;', new_str: 'const c = 30;' },
|
||||
]);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error).toContain('const c = 3;');
|
||||
}
|
||||
});
|
||||
|
||||
it('should not apply any subsequent patches after a failure', () => {
|
||||
const code = 'alpha beta gamma';
|
||||
const result = applyPatches(code, [
|
||||
{ old_str: 'alpha', new_str: 'ALPHA' },
|
||||
{ old_str: 'nonexistent', new_str: 'NOPE' },
|
||||
{ old_str: 'gamma', new_str: 'GAMMA' },
|
||||
]);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Real-world TypeScript patching ─────────────────────────────────────────
|
||||
|
||||
describe('real-world example', () => {
|
||||
it('should patch TypeScript code with indentation differences', () => {
|
||||
const interpolation = '$' + '{name}';
|
||||
const code = [
|
||||
'export function greet(name: string): string {',
|
||||
'\tconst greeting = `Hello, ' + interpolation + '!`;',
|
||||
'\tconsole.log(greeting);',
|
||||
'\treturn greeting;',
|
||||
'}',
|
||||
].join('\n');
|
||||
|
||||
// Patch comes in with different indentation (spaces instead of tabs)
|
||||
const result = applyPatches(code, [
|
||||
{
|
||||
old_str: [
|
||||
' const greeting = `Hello, ' + interpolation + '!`;',
|
||||
' console.log(greeting);',
|
||||
' return greeting;',
|
||||
].join('\n'),
|
||||
new_str: ['\tconst greeting = `Hi, ' + interpolation + '!`;', '\treturn greeting;'].join(
|
||||
'\n',
|
||||
),
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.code).toContain('Hi, ' + interpolation + '!');
|
||||
expect(result.code).not.toContain('console.log');
|
||||
}
|
||||
});
|
||||
|
||||
it('should patch a multiline function with whitespace differences', () => {
|
||||
const code = [
|
||||
'function add(a: number, b: number): number {',
|
||||
' return a + b;',
|
||||
'}',
|
||||
'',
|
||||
'function subtract(a: number, b: number): number {',
|
||||
' return a - b;',
|
||||
'}',
|
||||
].join('\n');
|
||||
|
||||
const result = applyPatches(code, [
|
||||
{
|
||||
old_str: 'function add(a: number, b: number): number {\n return a + b;\n}',
|
||||
new_str:
|
||||
'function add(a: number, b: number): number {\n return a + b + 0; // identity\n}',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.code).toContain('return a + b + 0; // identity');
|
||||
expect(result.code).toContain('function subtract');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle deletion (replacing with empty string)', () => {
|
||||
const code = 'line1\nline2\nline3';
|
||||
const result = applyPatches(code, [{ old_str: '\nline2', new_str: '' }]);
|
||||
expect(result).toEqual({ success: true, code: 'line1\nline3' });
|
||||
});
|
||||
|
||||
it('should handle insertion (empty old_str matches start of code)', () => {
|
||||
const code = 'existing code';
|
||||
// An empty old_str matches at index 0 via exact match (indexOf returns 0)
|
||||
const result = applyPatches(code, [{ old_str: '', new_str: '// header\n' }]);
|
||||
expect(result).toEqual({ success: true, code: '// header\nexisting code' });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,142 +1,6 @@
|
|||
/**
|
||||
* Code extraction utilities for workflow SDK code.
|
||||
*
|
||||
* Adapted from ai-workflow-builder.ee/code-builder/utils/extract-code.ts
|
||||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
|
||||
/**
|
||||
* Comprehensive import statement with all available SDK functions.
|
||||
* This is prepended to workflow code so the LLM knows what's available.
|
||||
* Prepended to workflow code so the LLM knows what's available.
|
||||
*/
|
||||
export const SDK_IMPORT_STATEMENT =
|
||||
"import { workflow, node, trigger, sticky, placeholder, newCredential, ifElse, switchCase, merge, splitInBatches, nextBatch, languageModel, memory, tool, outputParser, embedding, embeddings, vectorStore, retriever, documentLoader, textSplitter, fromAi, expr } from '@n8n/workflow-sdk';";
|
||||
|
||||
/** Matches any import statement (single-line, multi-line, side-effect, default, namespace) */
|
||||
const IMPORT_REGEX = /^\s*import\s+(?:[\s\S]*?from\s+)?['"]([^'"]+)['"];?\s*$/gm;
|
||||
|
||||
/**
|
||||
* Strip import statements from workflow code.
|
||||
* The SDK functions are available as globals, so imports are not needed at runtime.
|
||||
*/
|
||||
export function stripImportStatements(code: string): string {
|
||||
return code
|
||||
.replace(IMPORT_REGEX, '')
|
||||
.replace(/^\s*\n/, '') // Remove leading blank line if present
|
||||
.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Strip only SDK imports (@n8n/workflow-sdk), preserving local imports.
|
||||
*/
|
||||
export function stripSdkImports(code: string): string {
|
||||
const sdkImportRegex = /^\s*import\s+(?:[\s\S]*?from\s+)?['"]@n8n\/workflow-sdk['"];?\s*$/gm;
|
||||
return code.replace(sdkImportRegex, '').trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Matches local import statements and captures the specifier.
|
||||
* E.g. `import { weatherNode } from './chunks/weather'` → captures `./chunks/weather`
|
||||
*/
|
||||
const LOCAL_IMPORT_REGEX = /^\s*import\s+(?:[\s\S]*?from\s+)?['"](\.\.?\/[^'"]+)['"];?\s*$/gm;
|
||||
|
||||
/**
|
||||
* Resolve local imports from the sandbox filesystem.
|
||||
*
|
||||
* Finds local import statements (relative paths like `./foo` or `../chunks/bar`),
|
||||
* reads each imported file, strips SDK imports and `export` keywords, and inlines
|
||||
* the code before the main file's content. The combined result is ready for
|
||||
* `parseWorkflowCodeToBuilder()`.
|
||||
*
|
||||
* Supports one level of nested imports (chunk importing another chunk).
|
||||
*
|
||||
* @param code - The main workflow file content
|
||||
* @param basePath - Directory of the main file (for resolving relative imports)
|
||||
* @param readFile - Function to read a file from the sandbox, returns null if not found
|
||||
*/
|
||||
export async function resolveLocalImports(
|
||||
code: string,
|
||||
basePath: string,
|
||||
readFile: (filePath: string) => Promise<string | null>,
|
||||
): Promise<string> {
|
||||
const resolved = new Set<string>();
|
||||
const inlinedChunks: string[] = [];
|
||||
|
||||
async function resolveFile(fileCode: string, fileDir: string, depth: number): Promise<void> {
|
||||
if (depth > 5) return; // Guard against circular imports
|
||||
|
||||
// Find all local imports in this file
|
||||
const imports: Array<{ fullMatch: string; specifier: string }> = [];
|
||||
let match: RegExpExecArray | null;
|
||||
const regex = new RegExp(LOCAL_IMPORT_REGEX.source, 'gm');
|
||||
|
||||
while ((match = regex.exec(fileCode)) !== null) {
|
||||
imports.push({ fullMatch: match[0], specifier: match[1] });
|
||||
}
|
||||
|
||||
for (const imp of imports) {
|
||||
// Resolve the file path — try .ts extension if not present
|
||||
let resolvedPath = path.resolve(fileDir, imp.specifier);
|
||||
if (!resolvedPath.endsWith('.ts')) {
|
||||
resolvedPath += '.ts';
|
||||
}
|
||||
|
||||
// Skip if already resolved (dedup)
|
||||
if (resolved.has(resolvedPath)) continue;
|
||||
resolved.add(resolvedPath);
|
||||
|
||||
const content = await readFile(resolvedPath);
|
||||
if (content === null) continue; // Skip missing files silently
|
||||
|
||||
// Recursively resolve imports in the chunk
|
||||
await resolveFile(content, path.dirname(resolvedPath), depth + 1);
|
||||
|
||||
// Strip SDK imports and `export` keywords, then add to chunks
|
||||
let cleaned = stripSdkImports(content);
|
||||
// Remove local imports (already resolved recursively)
|
||||
cleaned = cleaned.replace(new RegExp(LOCAL_IMPORT_REGEX.source, 'gm'), '');
|
||||
// Remove `export` from declarations: `export const X` → `const X`, `export default` → removed
|
||||
cleaned = cleaned.replace(/^export\s+default\s+/gm, '');
|
||||
cleaned = cleaned.replace(/^export\s+/gm, '');
|
||||
cleaned = cleaned.trim();
|
||||
|
||||
if (cleaned) {
|
||||
inlinedChunks.push(cleaned);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await resolveFile(code, basePath, 0);
|
||||
|
||||
// Remove local imports from the main code
|
||||
const mainCode = code.replace(new RegExp(LOCAL_IMPORT_REGEX.source, 'gm'), '');
|
||||
|
||||
if (inlinedChunks.length === 0) {
|
||||
return mainCode;
|
||||
}
|
||||
|
||||
// Prepend inlined chunks before the main code
|
||||
return [...inlinedChunks, mainCode].join('\n\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract workflow code from an LLM response.
|
||||
*
|
||||
* Looks for TypeScript/JavaScript code blocks (```typescript, ```ts, or ```)
|
||||
* and extracts the content. If no code block is found, returns the trimmed response.
|
||||
* Also strips any import statements since SDK functions are available as globals.
|
||||
*/
|
||||
export function extractWorkflowCode(response: string): string {
|
||||
// Match ```typescript, ```ts, ```javascript, ```js, or ``` code blocks
|
||||
const codeBlockRegex = /```(?:typescript|ts|javascript|js)?\n([\s\S]*?)```/;
|
||||
const match = response.match(codeBlockRegex);
|
||||
|
||||
if (match) {
|
||||
const code = match[1].trim();
|
||||
return stripImportStatements(code);
|
||||
}
|
||||
|
||||
// Fallback: return trimmed response if no code block found
|
||||
return stripImportStatements(response.trim());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,15 +1,3 @@
|
|||
export {
|
||||
extractWorkflowCode,
|
||||
stripImportStatements,
|
||||
resolveLocalImports,
|
||||
SDK_IMPORT_STATEMENT,
|
||||
} from './extract-code';
|
||||
export { applyPatches } from './patch-code';
|
||||
export { parseAndValidate, partitionWarnings } from './parse-validate';
|
||||
export {
|
||||
EXPRESSION_REFERENCE,
|
||||
ADDITIONAL_FUNCTIONS,
|
||||
WORKFLOW_RULES,
|
||||
WORKFLOW_SDK_PATTERNS,
|
||||
} from './sdk-prompt-sections';
|
||||
export type { ValidationWarning, ParseAndValidateResult } from './types';
|
||||
export { SDK_IMPORT_STATEMENT } from './extract-code';
|
||||
export { partitionWarnings } from './parse-validate';
|
||||
export type { ValidationWarning } from './types';
|
||||
|
|
|
|||
|
|
@ -1,83 +1,4 @@
|
|||
/**
|
||||
* Parse and Validate Handler
|
||||
*
|
||||
* Handles parsing TypeScript workflow code to WorkflowJSON and validation.
|
||||
* Adapted from ai-workflow-builder.ee/code-builder/handlers/parse-validate-handler.ts
|
||||
* without Logger or LangChain dependencies.
|
||||
*/
|
||||
|
||||
import { parseWorkflowCodeToBuilder, validateWorkflow } from '@n8n/workflow-sdk';
|
||||
|
||||
import { stripImportStatements } from './extract-code';
|
||||
import type { ParseAndValidateResult, ValidationWarning } from './types';
|
||||
|
||||
/** Validation issue from graph or JSON validation */
|
||||
interface ValidationIssue {
|
||||
code: string;
|
||||
message: string;
|
||||
nodeName?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect validation issues into the warnings array.
|
||||
*/
|
||||
function collectValidationIssues(
|
||||
issues: ValidationIssue[],
|
||||
allWarnings: ValidationWarning[],
|
||||
): void {
|
||||
for (const issue of issues) {
|
||||
allWarnings.push({
|
||||
code: issue.code,
|
||||
message: issue.message,
|
||||
nodeName: issue.nodeName,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse TypeScript workflow SDK code and validate it in two stages:
|
||||
*
|
||||
* 1. **Structural validation** (`builder.validate()`) — graph consistency,
|
||||
* disconnected nodes, missing triggers
|
||||
* 2. **Schema validation** (`validateWorkflow(json)`) — Zod schema checks
|
||||
* against node parameter definitions loaded via `setSchemaBaseDirs()`
|
||||
*
|
||||
* @param code - The TypeScript workflow code to parse
|
||||
* @returns ParseAndValidateResult with workflow JSON and any warnings/errors
|
||||
* @throws Error if parsing fails
|
||||
*/
|
||||
export function parseAndValidate(code: string): ParseAndValidateResult {
|
||||
// Strip import statements before parsing — SDK functions are available as globals
|
||||
const codeToParse = stripImportStatements(code);
|
||||
|
||||
try {
|
||||
// Parse the TypeScript code to WorkflowBuilder
|
||||
const builder = parseWorkflowCodeToBuilder(codeToParse);
|
||||
|
||||
// Regenerate node IDs deterministically to ensure stable IDs across re-parses
|
||||
builder.regenerateNodeIds();
|
||||
|
||||
const allWarnings: ValidationWarning[] = [];
|
||||
|
||||
// Stage 1: Structural validation via graph validators
|
||||
const graphValidation = builder.validate();
|
||||
collectValidationIssues(graphValidation.errors, allWarnings);
|
||||
collectValidationIssues(graphValidation.warnings, allWarnings);
|
||||
|
||||
const json = builder.toJSON();
|
||||
|
||||
// Stage 2: Schema validation via Zod schemas from schemaBaseDirs
|
||||
const schemaValidation = validateWorkflow(json);
|
||||
collectValidationIssues(schemaValidation.errors, allWarnings);
|
||||
collectValidationIssues(schemaValidation.warnings, allWarnings);
|
||||
|
||||
return { workflow: json, warnings: allWarnings };
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse workflow code: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
import type { ValidationWarning } from './types';
|
||||
|
||||
/**
|
||||
* Separate errors (blocking) from warnings (informational) in validation results.
|
||||
|
|
@ -89,7 +10,6 @@ export function partitionWarnings(warnings: ValidationWarning[]): {
|
|||
errors: ValidationWarning[];
|
||||
informational: ValidationWarning[];
|
||||
} {
|
||||
// Known informational-only codes (not blockers)
|
||||
const informationalCodes = new Set(['MISSING_TRIGGER', 'DISCONNECTED_NODE']);
|
||||
|
||||
const errors: ValidationWarning[] = [];
|
||||
|
|
|
|||
|
|
@ -1,220 +0,0 @@
|
|||
/**
|
||||
* Patch code utilities with layered fuzzy matching.
|
||||
*
|
||||
* Applies str_replace patches with progressive fallback:
|
||||
* 1. Exact match
|
||||
* 2. Whitespace-normalized match (collapse runs of whitespace)
|
||||
* 3. Trimmed-lines match (ignore leading/trailing whitespace per line)
|
||||
*
|
||||
* When all matching fails, returns actionable error with nearby code context
|
||||
* so the LLM can fix its old_str.
|
||||
*/
|
||||
|
||||
interface Patch {
|
||||
old_str: string;
|
||||
new_str: string;
|
||||
}
|
||||
|
||||
interface PatchResult {
|
||||
success: true;
|
||||
code: string;
|
||||
}
|
||||
|
||||
interface PatchError {
|
||||
success: false;
|
||||
error: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize whitespace: collapse consecutive whitespace into single space, trim.
|
||||
*/
|
||||
function normalizeWhitespace(s: string): string {
|
||||
return s.replace(/\s+/g, ' ').trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize each line: trim leading/trailing whitespace per line, join with \n.
|
||||
*/
|
||||
function normalizeTrimmedLines(s: string): string {
|
||||
return s
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the position of `needle` in `haystack` using the normalized matcher.
|
||||
* Returns { start, end } character indices in the original haystack, or null.
|
||||
*
|
||||
* Strategy: build a normalized version of the haystack, find the needle in it,
|
||||
* then map back to original character positions using a position map.
|
||||
*/
|
||||
function fuzzyFind(
|
||||
haystack: string,
|
||||
needle: string,
|
||||
normalizer: (s: string) => string,
|
||||
): { start: number; end: number } | null {
|
||||
const normalizedNeedle = normalizer(needle);
|
||||
if (!normalizedNeedle) return null;
|
||||
|
||||
// Build position map: normalizedIndex → original index
|
||||
// We scan the haystack character by character, applying the same normalization
|
||||
// logic, and track where each normalized character came from.
|
||||
const normalizedHaystack = normalizer(haystack);
|
||||
const idx = normalizedHaystack.indexOf(normalizedNeedle);
|
||||
if (idx === -1) return null;
|
||||
|
||||
// We found a match in the normalized space. Now we need to map back to
|
||||
// original positions. We do this by finding which original substring,
|
||||
// when normalized, produces the match.
|
||||
|
||||
// Sliding window: try substrings of the original haystack.
|
||||
// Start by finding approximate region using character ratio.
|
||||
const ratio = haystack.length / Math.max(normalizedHaystack.length, 1);
|
||||
const approxStart = Math.max(0, Math.floor(idx * ratio) - 50);
|
||||
const approxEnd = Math.min(
|
||||
haystack.length,
|
||||
Math.ceil((idx + normalizedNeedle.length) * ratio) + 50,
|
||||
);
|
||||
|
||||
// Search within the approximate region for exact boundaries
|
||||
for (let start = approxStart; start <= approxEnd; start++) {
|
||||
for (
|
||||
let end = start + needle.length - 20;
|
||||
end <= Math.min(haystack.length, start + needle.length + 50);
|
||||
end++
|
||||
) {
|
||||
const candidate = haystack.slice(start, end);
|
||||
if (normalizer(candidate) === normalizedNeedle) {
|
||||
return { start, end };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: wider search
|
||||
for (let start = 0; start < haystack.length; start++) {
|
||||
for (let end = start + 1; end <= Math.min(haystack.length, start + needle.length * 2); end++) {
|
||||
const candidate = haystack.slice(start, end);
|
||||
if (normalizer(candidate) === normalizedNeedle) {
|
||||
return { start, end };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the best match for `needle` in `code` using layered matching.
|
||||
* Returns the matched region { start, end } or null.
|
||||
*/
|
||||
function findMatch(
|
||||
code: string,
|
||||
needle: string,
|
||||
): { start: number; end: number; strategy: string } | null {
|
||||
// Layer 1: Exact match
|
||||
const exactIdx = code.indexOf(needle);
|
||||
if (exactIdx !== -1) {
|
||||
return { start: exactIdx, end: exactIdx + needle.length, strategy: 'exact' };
|
||||
}
|
||||
|
||||
// Layer 2: Whitespace-normalized match
|
||||
const wsMatch = fuzzyFind(code, needle, normalizeWhitespace);
|
||||
if (wsMatch) {
|
||||
return { ...wsMatch, strategy: 'whitespace-normalized' };
|
||||
}
|
||||
|
||||
// Layer 3: Trimmed-lines match (handles indentation differences)
|
||||
const trimMatch = fuzzyFind(code, needle, normalizeTrimmedLines);
|
||||
if (trimMatch) {
|
||||
return { ...trimMatch, strategy: 'trimmed-lines' };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get code context around a search string for error feedback.
|
||||
* Shows the LLM what the actual code looks like near where it expected the match.
|
||||
*/
|
||||
function getContextForError(code: string, needle: string): string {
|
||||
// Try to find the best partial match — look for the first line of the needle
|
||||
const firstLine = needle.split('\n')[0].trim();
|
||||
if (!firstLine) return '';
|
||||
|
||||
const lines = code.split('\n');
|
||||
let bestLineIdx = -1;
|
||||
let bestScore = 0;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (!line) continue;
|
||||
|
||||
// Check if first line is a substring
|
||||
if (line.includes(firstLine) || firstLine.includes(line)) {
|
||||
bestLineIdx = i;
|
||||
bestScore = 100;
|
||||
break;
|
||||
}
|
||||
|
||||
// Check word overlap
|
||||
const needleWords = new Set(firstLine.toLowerCase().split(/\W+/).filter(Boolean));
|
||||
const lineWords = line.toLowerCase().split(/\W+/).filter(Boolean);
|
||||
const overlap = lineWords.filter((w) => needleWords.has(w)).length;
|
||||
if (overlap > bestScore) {
|
||||
bestScore = overlap;
|
||||
bestLineIdx = i;
|
||||
}
|
||||
}
|
||||
|
||||
if (bestLineIdx === -1 || bestScore < 2) return '';
|
||||
|
||||
// Show 3 lines before and after the best match
|
||||
const start = Math.max(0, bestLineIdx - 3);
|
||||
const end = Math.min(lines.length, bestLineIdx + 4);
|
||||
const context = lines
|
||||
.slice(start, end)
|
||||
.map((l, i) => {
|
||||
const lineNum = start + i + 1;
|
||||
const marker = start + i === bestLineIdx ? '> ' : ' ';
|
||||
return `${marker}${lineNum}: ${l}`;
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
return `\nNearest match in code around line ${bestLineIdx + 1}:\n${context}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply an array of patches to code with layered fuzzy matching.
|
||||
*
|
||||
* Each patch is applied sequentially. If any patch fails all matching
|
||||
* strategies, returns an actionable error with code context.
|
||||
*/
|
||||
export function applyPatches(code: string, patches: Patch[]): PatchResult | PatchError {
|
||||
let result = code;
|
||||
|
||||
for (const patch of patches) {
|
||||
const match = findMatch(result, patch.old_str);
|
||||
|
||||
if (!match) {
|
||||
const context = getContextForError(result, patch.old_str);
|
||||
const truncated = patch.old_str.slice(0, 150) + (patch.old_str.length > 150 ? '...' : '');
|
||||
return {
|
||||
success: false,
|
||||
error:
|
||||
'Patch failed: could not find old_str in code.' +
|
||||
'\nSearched for: "' +
|
||||
truncated +
|
||||
'"' +
|
||||
'\nTried: exact match, whitespace-normalized, trimmed-lines.' +
|
||||
(context || '\nNo similar code found nearby.') +
|
||||
'\nTip: use get-workflow-as-code to see the exact current code, then match it precisely.',
|
||||
};
|
||||
}
|
||||
|
||||
// Apply the replacement using the matched region
|
||||
result = result.slice(0, match.start) + patch.new_str + result.slice(match.end);
|
||||
}
|
||||
|
||||
return { success: true, code: result };
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
/**
|
||||
* SDK prompt sections for the workflow builder sub-agent.
|
||||
*
|
||||
* Re-exports from the canonical source in @n8n/workflow-sdk/prompts.
|
||||
*/
|
||||
|
||||
export {
|
||||
EXPRESSION_REFERENCE,
|
||||
ADDITIONAL_FUNCTIONS,
|
||||
WORKFLOW_RULES,
|
||||
WORKFLOW_SDK_PATTERNS,
|
||||
} from '@n8n/workflow-sdk/prompts/sdk-reference';
|
||||
|
|
@ -1,26 +1,7 @@
|
|||
/**
|
||||
* Types for the workflow builder utilities.
|
||||
*
|
||||
* Adapted from ai-workflow-builder.ee/code-builder/types.ts — only the types
|
||||
* relevant to parse/validate, without LangChain dependencies.
|
||||
*/
|
||||
|
||||
import type { WorkflowJSON } from '@n8n/workflow-sdk';
|
||||
|
||||
/**
|
||||
* Validation warning with optional location info
|
||||
*/
|
||||
/** Validation warning with optional location info. */
|
||||
export interface ValidationWarning {
|
||||
code: string;
|
||||
message: string;
|
||||
nodeName?: string;
|
||||
parameterPath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result from parseAndValidate including workflow and any warnings
|
||||
*/
|
||||
export interface ParseAndValidateResult {
|
||||
workflow: WorkflowJSON;
|
||||
warnings: ValidationWarning[];
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue